Compare commits

...

426 Commits

Author SHA1 Message Date
7a8c061d79 Added CONFIG += c++17 to get correct autocompletions 2020-10-30 18:09:37 +01:00
8d4cde4534 Merge tag 'v5.0.2' into develop
Bump version to 5.0.2
2020-10-30 18:10:54 +02:00
7292024ee6 Merge branch 'release/v5.0.2' 2020-10-30 18:10:54 +02:00
d6df6cbb5d Bump version to 5.0.2 2020-10-30 18:10:47 +02:00
344e94d8a1 Bump version to 5.0.2rc2 2020-10-30 17:51:02 +02:00
5cf73a9165 Remove all hooks when dumping data to JSON and Python 3 is used 2020-10-30 17:50:43 +02:00
96b1a1c79c Fixed an issue with a "wrong" timestamp in device monitor output using "time" filter // Resolve #3712 2020-10-30 14:11:27 +02:00
0bbe7f8c73 Sync docs 2020-10-29 23:48:44 +02:00
e333bb1cca Tests: skip dev-plalform without examples 2020-10-29 23:42:15 +02:00
454cd8d784 Bump version to 5.0.2rc1 2020-10-29 23:18:39 +02:00
743a43ae17 Fixed an issue when multiple pio lib install command with the same local library results in duplicates in `lib_deps` // Resolve #3715 2020-10-29 23:17:47 +02:00
5a1b0e19b2 Initialize a new project or update existing passing working environment name and its options // Resolve #3686 2020-10-29 22:59:48 +02:00
da6cde5cbd Sync docs 2020-10-29 18:09:08 +02:00
5ea864da39 Add py39 env 2020-10-29 18:08:58 +02:00
175448deda Fix tests on PY2 2020-10-29 14:37:50 +02:00
16f90dd821 Ignore possible empty defines when exporting IDE data // Resolve #3690 2020-10-29 12:22:26 +02:00
9efac669e6 Bump version to 5.0.2b5 2020-10-28 22:53:29 +02:00
adf9ba29df Fixed an issue when "pio package publish" command removes original archive after submitting to the registry // Resolve #3716 2020-10-28 22:52:48 +02:00
cacddb9abb Support package packing on the Python 3+ only 2020-10-28 22:33:24 +02:00
edbe213410 Sync docs 2020-10-28 22:32:48 +02:00
891f78be37 Use "ensure_python3" util 2020-10-28 22:32:27 +02:00
175be346a8 Extend package filters 2020-10-28 20:57:26 +02:00
9ae981614f Add pack/sdist target 2020-10-28 20:56:53 +02:00
16f5f3ef46 Do not pack binary files and docs to the package source archive 2020-10-28 14:18:09 +02:00
2cd19b0273 Bump version to 5.0.2b4 2020-10-27 23:00:33 +02:00
e158e54a26 Fix issue with data decoding when calling PIO Core via PIO Home 2020-10-27 22:57:51 +02:00
63a6fe9133 Improved "core.call" RPC for PlatformIO Home // Resolve #3671 2020-10-27 21:07:02 +02:00
779eaee310 Bump version to 5.0.2b3 2020-10-26 22:25:47 +02:00
0ecfe8105f Docs: Unify CLI commands to use "pio" short version command 2020-10-26 22:24:05 +02:00
b8cc867ba4 Allow dev-platform to provide extra debug configuration using BsePlatform::configure_debug_options API 2020-10-26 18:24:46 +02:00
7230556d1b Move extra IDE data to "extra" section 2020-10-26 18:23:28 +02:00
afd79f4655 Improve initiating manifest parser from a package archive 2020-10-22 19:08:20 +03:00
5d87fb8757 Add "Articles" section to Zephyr RTOS 2020-10-22 19:07:44 +03:00
23e9596506 Automatically build PlatformIO Core extra Python dependencies on a host machine if they are missed in the registry // Resolve #3700 2020-10-20 21:06:53 +03:00
428f46fafe Typo test 2020-10-16 17:05:19 +03:00
ee847e03a6 Fix an issue with "'NoneType' object has no attribute 'status_code'" 2020-10-16 14:23:10 +03:00
a870981266 Docs: Fix custom "platform_packages" for ESP8266/32 2020-10-14 23:21:59 +03:00
411bf1107d Disable "lattice_ice40" examples for macOS 2020-10-14 22:33:04 +03:00
5b74c8a942 Minor fixes 2020-10-14 19:53:30 +03:00
a24bab0a27 Fix badge 2020-10-14 17:55:45 +03:00
1cb7764b0e Highlight PlatformIO Labs Technology 2020-10-14 17:53:41 +03:00
d835f52a18 Sync docs 2020-10-10 20:48:56 +03:00
9c20ab81cb fix quoting of defines in ccls template (#3692) 2020-10-02 13:28:02 +03:00
14de3e79c5 Sync docs 2020-09-26 22:52:43 +03:00
21c12030d5 Bump version to 5.0.2b2 2020-09-19 19:30:40 +03:00
2370e16f1b Fixed an "AssertionError: ensure_dir_exists" when checking library updates from simultaneous subprocesses // Resolve #3677 2020-09-19 19:29:51 +03:00
a384411a28 Bump version to 5.0.2b1 2020-09-17 20:41:10 +03:00
1e0ca8f79c Fixed an issue with GCC linker when "native" dev-platform is used in pair with library dependencies // Resolve #3669 2020-09-17 20:40:11 +03:00
2b5e590819 Docs: Explain how to install custom Python packages // Resolve #3673 2020-09-17 19:21:12 +03:00
bf57b777bf Docs: Update docs for PlatformIO IDE 2.0 for VSCode 2020-09-16 19:33:55 +03:00
f656d19ed5 Docs: Added new section about Arduino STM32L0 core 2020-09-14 22:31:11 +03:00
eb09af06ed Bump version to 5.0.2a2 2020-09-12 23:21:33 +03:00
687c339f20 Fixed a "PermissionError: [WinError 5]" on Windows when external repository is used with lib_deps option // Resolve #3664 2020-09-12 23:20:46 +03:00
7bc170a53e Fixed an issue with "KeyError: 'versions'" when dependency does not exist in the registry // Resolve #3666 2020-09-11 21:16:18 +03:00
65297c24d4 Merge branch 'release/v5.0.1' 2020-09-10 17:46:56 +03:00
ea21f3fba0 Merge tag 'v5.0.1' into develop
Bump version to 5.0.1
2020-09-10 17:46:56 +03:00
b515a004d3 Bump version to 5.0.1 2020-09-10 17:46:49 +03:00
7d3fc1ec1a Catch exception if folder already exists 2020-09-09 18:38:55 +03:00
6987d6c1c6 Fixed an issue when can not remove update or remove external dev-platform using PlatformIO Home // Resolve #3663 2020-09-09 17:53:04 +03:00
de2b5ea905 Bump version to 5.0.1b1 2020-09-09 16:27:47 +03:00
f946a0bc08 Reformat code with black==20.8b1 2020-09-09 16:27:36 +03:00
4f47ca5742 Fixed an issue with "Invalid simple block (semantic_version)" from library dependency that refs to an external source (repository, ZIP/Tar archives) // Resolve #3658 2020-09-09 16:13:39 +03:00
54b51fc2fd Merge branch 'develop' of https://github.com/platformio/platformio-core into develop
# Conflicts:
#	HISTORY.rst
2020-09-09 14:38:29 +03:00
1f284e853d Fixed an issue when the package manager tries to install a built-in library from the registry // Resolve #3662 2020-09-09 14:36:01 +03:00
2a30ad0fdf Allow in-progress language standards in IDE templates // Resolve #3653
Note: VS Code only supports finalized names
2020-09-09 13:56:00 +03:00
c454ae336d Added support for "owner" requirement when declaring `dependencies using library.json` 2020-09-09 13:10:42 +03:00
cd59c829e0 Fixed an issue when pio package unpublish command crashes // Resolve #3660 2020-09-09 12:17:09 +03:00
429f416b38 Generate current date for a custom contrib-pysite package 2020-09-07 16:38:20 +03:00
0a881d582d Docs: Add info how to list publish packages 2020-09-07 16:37:37 +03:00
65b1029216 Host SPDX licenses on Bintray, Github is blocked in multiple countries 2020-09-07 13:16:08 +03:00
c7758fd30e Docs: Minor fixes 2020-09-06 21:06:16 +03:00
46f300d62f Docs: Add "Publishing" section to the instruction on how to create own dev-platform 2020-09-06 21:00:45 +03:00
4234dfb6f9 Fixed an issue with "ImportError: cannot import name '_get_backend' from 'cryptography.hazmat.backends'" when using Remote Development // Resolve #3652 2020-09-06 18:37:27 +03:00
9695720343 Bump version to 5.0.1a1 2020-09-04 22:48:21 +03:00
1f28056459 Fixed an issue when using a custom git/ssh package with platform_packages // Resolve #3624 2020-09-04 22:47:49 +03:00
7dacceef04 Exclude tests from python package (#3650) 2020-09-04 18:55:30 +03:00
39883e8d68 Docs: Document "--without-testing" option for pio test command 2020-09-04 18:39:52 +03:00
949ef2c48a Merge tag 'v5.0.0' into develop
Bump version to 5.0.0
2020-09-03 14:43:11 +03:00
ada3f8b270 Merge branch 'release/v5.0.0' 2020-09-03 14:43:10 +03:00
cf4b835b0c Bump version to 5.0.0 2020-09-03 14:42:59 +03:00
fec4569ada Docs: Update docs with new owner-based dependency form 2020-09-03 14:37:24 +03:00
083edc4c76 Refactor to os.path 2020-09-02 20:52:11 +03:00
fe4112a2a3 Follow SemVer complaint version constraints when checking library updates // Resolve #1281 2020-09-02 20:36:56 +03:00
c8ea64edab Fix link to FAQ sections (#3642)
* Fix link to FAQ sections

Use consistently the same host and url and fix one unmatched anchor.

* Update HISTORY.rst

Co-authored-by: Ivan Kravets <me@ikravets.com>
2020-09-02 19:13:20 +03:00
6e5198f373 Minor improvements 2020-09-02 18:49:00 +03:00
44c2b65372 Show ignored project environments only in the verbose mode // Resolve #3641 2020-09-02 17:31:32 +03:00
5cc21511ad Show owner name for packages 2020-09-02 16:07:16 +03:00
2edd7ae649 Update PVS-Studio to the latest v7.09 2020-08-31 15:40:25 +03:00
7a49a74135 Bump version to 5.0.0b3 2020-08-28 21:55:55 +03:00
be487019f5 Fix a broken handling multi-configuration project // Resolve #3615 2020-08-28 21:54:47 +03:00
5dee0a31e6 Do not test for package owner if resource is external 2020-08-28 21:40:17 +03:00
9f2c134e44 Do not detach a new package even if it comes from external source 2020-08-28 21:24:48 +03:00
cdbb837948 Minor fixes 2020-08-28 18:45:52 +03:00
80c1774a19 Docs: PlatformIO Core 5.0: new commands, migration guide, other improvements 2020-08-28 14:08:26 +03:00
1aaa9b6707 Update changelog with static analysis section 2020-08-26 17:44:01 +03:00
4a7f578649 Sync docs and history 2020-08-26 15:40:24 +03:00
d59416431d Parse npm-like "repository" data from a package manifest // Resolve #3637 2020-08-26 15:40:03 +03:00
8625fdc571 Minor imperovements 2020-08-26 14:51:53 +03:00
3c91e3c1e1 Move build dir to the disk root (should fix issue with long path for Zephyr RTOS on WIndows) 2020-08-26 14:51:01 +03:00
1560fb724c Bump version to 5.0.0b2 2020-08-26 06:40:46 +03:00
0db39ccfbd Automatically accept PIO. Core 4.0 compatible dev-platforms 2020-08-26 06:40:22 +03:00
5086b96ede Bump version to 5.0.0b1 2020-08-25 22:22:35 +03:00
210cd76042 Rename "idedata" sub-command to "data" 2020-08-25 22:01:22 +03:00
f77978a295 Apply formatting 2020-08-25 22:01:08 +03:00
3e72f098fe Updates for PIO Check (#3640)
* Update check tools to the latest versions

* Use language standard when exporting defines to check tools

* Buffer Cppcheck output to detect multiline messages

* Add new test for PIO Check

* Pass include paths to Clang-Tidy as individual compiler arguments

Clang-tidy doesn't support response files which can exceed command
length limitations on Windows

* Simplify tests for PIO Check

* Update history

* Sync changelog
2020-08-25 21:19:21 +03:00
b9fe493336 Sync docs 2020-08-25 19:18:26 +03:00
79bfac29ba Update history and sync docs 2020-08-25 18:57:20 +03:00
2ea80d91f8 Minor fixes 2020-08-25 15:55:17 +03:00
fa90251714 Fixed an issue when Unit Testing engine fails with a custom project configuration file // Resolve #3583 2020-08-25 14:35:01 +03:00
ff19109787 Fix test 2020-08-25 14:34:03 +03:00
091ba4346d Bump version to 4.4.0b6 2020-08-24 23:11:43 +03:00
e43176e33a Typo fix 2020-08-24 23:11:24 +03:00
655e2856d1 Bump version to 4.4.0b5 2020-08-24 23:05:01 +03:00
c6a37ef880 Get real path of just installed core-package 2020-08-24 23:04:17 +03:00
6af2bad123 Make PIO Core 4.0 automatically compatible with dev-platforms for PIO Core 2.0 & 3.0 // Resolve #3638 2020-08-24 22:56:31 +03:00
3e7e9e2b3d Remove unused data using a new `pio system prune // Resolve #3522 2020-08-24 15:22:05 +03:00
13db51a556 Install/Uninstall dependencies only for library-type packages // Resolve #3637 2020-08-24 15:10:38 +03:00
d6d95e05e8 Rename "fs.format_filesize" to "fs.humanize_file_size" 2020-08-24 15:09:37 +03:00
b44bc80bd1 PyLint fix for PY2 2020-08-23 21:41:53 +03:00
f39c9fb597 Bump version to 4.4.0b4 2020-08-23 21:07:40 +03:00
24f85a337f Fix "AttributeError: module 'platformio.exception' has no attribute 'InternetIsOffline'" 2020-08-23 21:07:14 +03:00
a069bae1fb Fix a bug with package updating when version is not in SemVer format // Resolve #3635 2020-08-23 15:26:58 +03:00
1c8aca2f6a Check ALL possible version for the first matched package 2020-08-23 15:25:03 +03:00
620241e067 Move package "version" related things to "platformio.package.version" module 2020-08-23 15:24:31 +03:00
da179cb33f Enhance configuration variables 2020-08-23 14:29:31 +03:00
8ea10a18d3 Bump version to 4.4.0b3 2020-08-23 13:22:38 +03:00
e2bb81bae4 Restore legacy util.cd API 2020-08-23 13:22:11 +03:00
dcf91c49ac Remove debug code 2020-08-22 22:56:26 +03:00
c2caf8b839 Bump version to 4.4.0b2 2020-08-22 22:53:41 +03:00
95151062f5 Implement mirroring for HTTP client 2020-08-22 22:52:29 +03:00
7e4bfb1959 Move CacheContent API to "cache.py" module 2020-08-22 20:05:14 +03:00
abae9c7e77 Cache base registry requests 2020-08-22 17:52:12 +03:00
102aa5f22b Port legacy API requests to the new registry client 2020-08-22 17:49:29 +03:00
d92c1d3442 Refactor HTTP related operations 2020-08-22 17:48:49 +03:00
aa186382a8 Upgraded to SCons 4.0 2020-08-22 14:22:37 +03:00
70366d34b9 Sync docs 2020-08-22 13:57:18 +03:00
49b70f44ca Ignore legacy tmp pkg folders 2020-08-22 13:56:57 +03:00
f79fb4190e Sync docs 2020-08-21 14:25:59 +03:00
d980194600 Bump version to 4.4.0b1 2020-08-17 15:34:02 +03:00
fb6e1fd33c PyLint fixes 2020-08-17 15:33:08 +03:00
6f7fc638c7 Fix PyLint errors in tests 2020-08-17 12:56:57 +03:00
2459e85c1d Fix a bug with the custom platform packages // Resolve #3628 2020-08-17 12:13:25 +03:00
74e27a2edc Enable "cyclic reference" for GCC linker only for the embedded dev-platforms // Resolve #3570 2020-08-16 20:26:59 +03:00
808852f4cc Set default timeout for http requests // Resolve #3623 2020-08-16 20:21:30 +03:00
67e6d177b4 Minor fixes for dev-platform factory 2020-08-16 18:48:05 +03:00
04694b4126 Switch legacy platform manager to the new 2020-08-15 23:11:01 +03:00
bb6fb3fdf8 Fix bug with parsing detached packages 2020-08-15 15:24:35 +03:00
4ec64f8980 Fix a test for examples 2020-08-14 17:00:18 +03:00
332874cd4b Fix relative import of platform module on Py27 2020-08-14 16:48:12 +03:00
276ca61cde Refactor dev-platform API 2020-08-14 16:39:15 +03:00
5f3ad70190 Rename meta.PackageSourceItem or PackageItem 2020-08-14 16:38:46 +03:00
ff8ec43a28 Ensure tool-type package is compatible with a host system 2020-08-13 21:46:46 +03:00
ecc369c2f8 Minor fixes 2020-08-13 20:19:27 +03:00
26fdd0a62c Bump version to 4.4.0a8 2020-08-13 18:30:33 +03:00
64ff6a0ff5 Switch legacy core package manager to the new 2020-08-13 18:30:04 +03:00
fd7dba1d74 Package Manifest: increase package author.name field to the 100 chars 2020-08-13 17:50:44 +03:00
38ec517200 Update history 2020-08-12 21:09:42 +03:00
20a74d1654 Merge branch 'feature/pkg-next' into develop 2020-08-12 20:09:18 +03:00
d5451756fd Minor improvements 2020-08-12 20:09:10 +03:00
893ca1b328 Switch library manager to the new package manager 2020-08-12 13:27:05 +03:00
2dd69e21c0 Implement package removing with dependencies 2020-08-01 20:17:07 +03:00
a01b3a2473 Do not raise exception when package is not found (404), return None 2020-08-01 19:58:59 +03:00
6ac538fba4 Remove unused import 2020-08-01 15:49:10 +03:00
41c2d64ef0 Fix "PermissionError: [WinError 32] The process cannot access the file" on Windows 2020-08-01 15:36:28 +03:00
a1970bbfe3 Allow a forced package installation with removing existing package 2020-08-01 14:38:28 +03:00
d329aef876 Initial version of a new package manager 2020-07-31 15:42:26 +03:00
abc0489ac6 Update changelog 2020-07-28 15:59:02 +03:00
2bc47f4e97 PyLint fix 2020-07-28 15:55:25 +03:00
933a09f981 Update unit testing support for mbed framework
- Take into account Mbed OS6 API changes
- RawSerial is used with Mbed OS 5 since Serial doesn't support putc with baremetal profile
2020-07-28 15:22:36 +03:00
adc2d5fe7c Update VSCode template
Starting with cpptools v0.29 escaped paths in compilerArgs field don't work on Windows.
2020-07-28 15:10:52 +03:00
def149a29e Use updated registry API 2020-07-25 17:13:05 +03:00
39cb23813f Allow ignoring "platforms" and "frameworks" fields in "library.json" and treat a library as compatible with all 2020-07-25 11:51:47 +03:00
85f5a6a84a Bump version to 4.4.0a7 2020-07-24 21:00:58 +03:00
6ace5668b8 Update the registry publish endpoints 2020-07-24 20:57:18 +03:00
c193a4ceb7 Handle proxy environment variables in lower case // Resolve #3606 2020-07-23 19:07:29 +03:00
1abc110f8a Merge branch 'develop' of https://github.com/platformio/platformio-core into develop 2020-07-23 17:57:00 +03:00
73740aea89 Sync docs and examples 2020-07-23 17:56:41 +03:00
83110975fa Docs: Sync 2020-07-23 17:42:46 +03:00
881c5ea308 Remove unused code 2020-07-23 17:37:23 +03:00
22f1b94062 Bump version to 4.4.0a6 2020-07-21 12:42:26 +03:00
ea30d94324 Automatically enable LDF dependency chain+ mode (evaluates C/C++ Preprocessor conditional syntax) for Arduino library when “library.properties” has “depends” field // Resolve #3607 2020-07-21 12:41:38 +03:00
1ed462a29a PyLint fix 2020-07-16 01:00:38 +03:00
a2efd7f7c5 Bump version to 4.4.0a5 2020-07-15 23:18:07 +03:00
ca33058637 New commands for the registry package management (pack, publish, unpublish) 2020-07-15 23:16:46 +03:00
a6f143d1ca Dump data intended for IDE extensions/plugins using a new platformio project idedata command 2020-07-15 14:20:29 +03:00
1368fa4c3b Implement new fields (id, ownername, url, requirements) for PackageSpec API 2020-07-14 21:07:09 +03:00
cca3099d13 Ensure that module.json keywords are lowercased 2020-07-14 18:55:29 +03:00
368c66727b Fix issue with package packing when re-map is used and manifest is missed in "include" (copy it now) 2020-07-12 22:39:32 +03:00
a688edbdf1 Fix an issue with manifest parser when "new_from_archive" API is used 2020-07-09 21:53:46 +03:00
e570aadd72 Docs: Sync 2020-07-09 17:17:34 +03:00
f85cf61d68 Revert back max length of author name to 50 chars 2020-07-08 23:23:14 +03:00
f27c71a0d4 Increase author name length to 100 chars for manifest 2020-07-08 22:56:14 +03:00
940682255d Lock Python's isort package to isort<5 2020-07-08 22:16:52 +03:00
a00722bef4 Ignore maintainer's broken email in library.properties manifest 2020-07-08 21:53:28 +03:00
84132d9459 Fix tests 2020-07-08 21:52:34 +03:00
42fd284560 Improve parsing "author" field of library.properties manfiest 2020-07-08 20:21:10 +03:00
40d6847c96 Add option to pass a custom path where to save package archive 2020-07-08 13:46:36 +03:00
abd3f8b3b5 Docs: Remove legacy library dependency syntax for github 2020-07-07 22:53:01 +03:00
3c986ed681 Remove recursively .pio folders when packing a package 2020-07-07 16:28:51 +03:00
8b24b0f657 Sync docs & examples 2020-07-06 23:37:28 +03:00
0f8042eeb4 Implement PackagePacker.get_archive_name API 2020-07-06 15:57:49 +03:00
f97632202b Fix issue with KeyError 2020-07-06 15:57:10 +03:00
a79e933c37 Ignore author's broken email in a package manifest 2020-07-06 14:22:35 +03:00
ef53bcf601 Ignore empty fields in library.properties manifest 2020-07-06 14:17:00 +03:00
08a87f3a21 Do not allow [;.<>] chars for a package name 2020-07-03 19:14:58 +03:00
b3dabb221d Allow "+" in a package name 2020-07-03 16:07:36 +03:00
899a6734ee Add .ccls to .gitignore (vim and emacs) (#3576)
* Add .ccls to .gitignore (vim)

* Add .ccls to .gitignore (emacs)
2020-06-30 21:48:44 +03:00
7f48c8c14e Fix PyLint for PY 2.7 2020-06-30 15:06:40 +03:00
2c24e9eff6 Fall back to latin-1 encoding when failed with UTF-8 while parsing manifest 2020-06-30 14:28:37 +03:00
5cdca9d490 Optimize tests 2020-06-29 21:14:34 +03:00
1ac6c50334 Update multi-environment test for PIO test command 2020-06-29 20:52:15 +03:00
4cbad399f7 Remove mbed framework from several tests 2020-06-29 19:22:22 +03:00
2b8aebbdf9 Extend test for parsing package manifest when "system" is used as a list 2020-06-29 15:06:21 +03:00
e9a15b4e9b Parse package.json manifest keywords 2020-06-27 21:42:13 +03:00
dd18abcac3 Fix tests 2020-06-27 12:59:12 +03:00
b046f21e0d Fix "RuntimeError: dictionary keys changed during iteration" when parsing "library.json" dependencies 2020-06-27 12:46:04 +03:00
29fb803be1 Enable PIO Core tests on Python 3.8 2020-06-27 12:36:57 +03:00
bc2eb0d79f Parse dev-platform keywords 2020-06-26 19:49:25 +03:00
0bec1f1585 Extend system info with "file system" and "locale" encodings 2020-06-26 18:38:17 +03:00
a1ec3e0a22 Remove "vendor_url" and "docs_url" from Platform API 2020-06-25 23:23:55 +03:00
7bc22353cc Docs: Sync dev-platforms 2020-06-25 18:04:04 +03:00
efc2242046 Remove empty data from board information 2020-06-25 14:51:53 +03:00
5dadb8749e Change slogan to "PlatformIO is a professional collaborative platform for embedded development" 2020-06-23 12:33:00 +03:00
82735dd571 Fixed an issue with improper processing of source files added via multiple Build Middlewares // Resolve #3531 2020-06-23 11:46:00 +03:00
9fb4cde2a5 Do not generate ".travis.yml" for a new project, let the user have a choice 2020-06-23 11:26:22 +03:00
164ae2bcbc Extend system info with Python and PIO Core executables // Issue #3521 2020-06-23 11:20:29 +03:00
a172a17c81 Bump version to 4.4.0a4 2020-06-22 23:09:28 +03:00
5ee90f4e61 Display system-wide information using platformio system info command // Resolve #3521 2020-06-22 23:04:36 +03:00
3aae791bee Change slogan to "collaborative platform" 2020-06-22 20:02:43 +03:00
9f05519ccd List available project targets with a new "platformio run –list-targets" command // Resolve #3544 2020-06-22 19:53:31 +03:00
f19491f909 Docs: Sync articles 2020-06-22 17:55:02 +03:00
967a856061 Do not allow ":" and "/" chars in a package name 2020-06-22 15:25:02 +03:00
87d5997b46 Add a test that ensures setUp and tearDown functions can be compiled 2020-06-22 14:42:45 +03:00
c20a1f24cd Don't print relative paths with double-dot 2020-06-18 20:36:59 +03:00
260c36727c fix pio access urn format 2020-06-17 23:56:22 +03:00
03d9965758 Replace urn with prn (#3565)
* Replace urn with prn

* fix

* fix text
2020-06-17 23:46:50 +03:00
e853d61e16 Add orgname filter for access list (#3564)
* add orgname filter for access list

* fix

* fix namings
2020-06-17 18:55:40 +03:00
42e8ea29ff CLI to manage access level on PlatformIO resources. Resolve #3534 (#3563) 2020-06-17 13:53:53 +03:00
1e90c821dc Disable package upload test (#3562) 2020-06-17 00:24:55 +03:00
cad0ae0113 Update slogan to "No more vendor lock-in!" 2020-06-16 15:06:04 +03:00
21f3dd11f4 Fix printing relative paths on Windows // Resolve #3542
Fixes "ValueError" when running "clean" target if "build_dir"
points to a folder on a different logical drive
2020-06-16 12:27:49 +03:00
a9c13aa20e Implement "ManifestParserFactory.new_from_archive" API 2020-06-15 22:05:59 +03:00
d3fd115743 Black format 2020-06-15 22:05:28 +03:00
df0e6016bb Handle possible NodeList in source files when processing Middlewares // Resolve #3531
env.Object() returns a list of objects that breaks the processing of
subsequent middlewares since we only expected File nodes.
2020-06-15 21:25:24 +03:00
cb70e51016 Update changelog for Custom Targets 2020-06-13 16:21:15 +03:00
cf2fa37e56 Bump version to 4.4.0a3 2020-06-13 13:18:54 +03:00
28d9f25f9a Added a new "-e, --environment" option to "platformio project init" command 2020-06-12 23:47:12 +03:00
fdb83c24be Clean autogenerated files before running tests // Resolve #3523
Fixes possible conflicts between auxiliary test transport files when
project contains multiple environments with different platforms
2020-06-11 23:53:52 +03:00
660b57cdd3 Update PIO Home front-end to 3.2.3 2020-06-11 21:16:06 +03:00
405dcda824 Feature/update account tests (#3556)
* update account tests

* change second user

* refactoring

* clean

* fix tests email receiving

* fix
2020-06-11 16:02:38 +03:00
266612bbdf Run CI on pull requests 2020-06-11 15:27:51 +03:00
2722e27415 Sync docs 2020-06-11 15:15:46 +03:00
f571ad9d47 Sync docs 2020-06-11 11:03:48 +03:00
ef8a9835b0 Bump version to 4.4.0a2 2020-06-10 14:26:48 +03:00
b71b939307 Rename "AddSystemTarget" to "AddPlatformTarget" 2020-06-10 14:25:53 +03:00
9e3ba11e8a skip account tests 2020-06-10 12:36:07 +03:00
91e9406304 cleaning 2020-06-10 12:22:28 +03:00
0d8272890c merge account, org and team tests into one file 2020-06-10 12:02:34 +03:00
a182cca5e9 tests fix (#3555)
* replace timestamp with randint in tests

* replace pop3 with imap
2020-06-10 11:07:19 +03:00
e6fbd6acf1 Remove debug code 2020-06-09 23:26:49 +03:00
062a82c89e Sync docs 2020-06-09 20:59:23 +03:00
89cc6f9bf3 Bump version to 4.4.0a1 2020-06-09 18:44:49 +03:00
3c8e0b17a7 Added support for custom targets 2020-06-09 18:43:50 +03:00
e0023bb908 increase tests email receiving time 2020-06-09 17:05:11 +03:00
a5547491ed Add account and org destroy commands. Fix tests (#3552)
* Add account and org destroy commands. Fix tests

* fix tests

* fix

* fix texts
2020-06-09 15:50:37 +03:00
78546e9246 Docs: Add "TensorFlow, Meet The ESP32" to articles list 2020-06-08 19:26:48 +03:00
7457ef043b Docs: Sync ASR Micro dev-platform 2020-06-08 12:00:19 +03:00
e0e97a3629 Cache the latest news in PIO Home for 180 days 2020-06-05 18:29:11 +03:00
f5e6820903 Bump version to 4.3.5a2 2020-06-05 14:18:24 +03:00
27fd3b0b14 Improve detecting if PlatformIO Core is run in container 2020-06-05 14:17:19 +03:00
ced244d30a Sync docs 2020-06-05 11:30:15 +03:00
6fa7cb4af5 Add new dev-platform "ASR Microelectronics ASR605x" 2020-06-04 22:59:05 +03:00
94cb808285 CLI to manage teams. Resolve #3533 (#3547)
* CLI to manage teams.Minor fixes. Resolve #3533

* fix teams tests

* disable org and team tests

* minor fixes. fix error texts

* fix split compatibility
2020-06-04 19:31:30 +03:00
42df3c9c3f Sync docs 2020-06-04 15:27:46 +03:00
0c4c113b0a Fix account shpw command when PLATFORMIO_AUTH_TOKEN is used 2020-06-04 14:09:42 +03:00
3c1b08daab Ignore empty PLATFORMIO_AUTH_TOKEN 2020-06-04 13:57:56 +03:00
d7f4eb5955 Minor grammar fix 2020-06-03 22:40:37 +03:00
87b5fbd237 More cosmetic changes to Org CLI 2020-06-03 22:34:37 +03:00
6c97cc6192 Cosmetic changes to Org CLI 2020-06-03 22:22:13 +03:00
cbcd3f7c4d Fix cmd.org test 2020-06-03 21:40:03 +03:00
f7dceb782c Fix PY2.7 when PermissionError is not avialable 2020-06-03 21:24:01 +03:00
140fff9c23 CLI to manage organizations. Resolve #3532 (#3540)
* CLI to manage organizations. Resolve #3532

* fix tests

* fix test

* add org owner test

* fix org test

* fix invalid username/orgname error text

* refactor auth request in clients

* fix

* fix send auth request

* fix regexp

* remove duplicated code. minor fixes.

* Remove space

Co-authored-by: Ivan Kravets <me@ikravets.com>
2020-06-03 17:41:30 +03:00
8c586dc360 Sync docs 2020-06-03 17:16:59 +03:00
fe52f60389 Bypass PermissionError when cleaning the cache 2020-06-03 14:33:53 +03:00
9064fcbc77 Sync docs 2020-06-03 14:33:03 +03:00
9a1d2970cc Sync docs 2020-05-30 01:10:04 +03:00
26ba6e4756 Add new option to package publishing CLI which allows to disable email notiication 2020-05-28 17:06:36 +03:00
ae58cc74bd Rename checksum header to X-PIO-Content-SHA256 2020-05-28 16:08:37 +03:00
37e795d539 Send package checksum when publishing 2020-05-28 16:07:20 +03:00
49960b257d Implement fs.calculate_file_hashsum 2020-05-28 16:07:02 +03:00
25a421402b fix package type detector 2020-05-28 12:49:32 +03:00
8e72c48319 fix datetime validation in package publish command 2020-05-27 22:30:16 +03:00
c1965b607b Add binary stream to package publishing request 2020-05-27 17:27:05 +03:00
d38f5aca5c Fix metavar for package CLI 2020-05-27 16:20:02 +03:00
c06859aa9f Add package type to unpublish command 2020-05-27 14:30:27 +03:00
e706a2cfe2 Refactor pio account client. Resolve #3525 (#3529) 2020-05-27 13:39:58 +03:00
0c301b2f5d Fix order of arguments 2020-05-27 01:14:07 +03:00
deb12972fb Implement "package unpublish" CLI 2020-05-27 01:10:35 +03:00
8346b9822d Implement "package pack" command 2020-05-26 22:17:55 +03:00
19cdc7d34a Initial support for package publishing in to the registry 2020-05-26 22:01:32 +03:00
49cc5d606b Sync docs 2020-05-26 21:58:58 +03:00
58470e8911 PY2 lint fix 2020-05-26 14:30:43 +03:00
38699cca8f Bump version to 4.3.5a1 2020-05-26 14:26:42 +03:00
0eb8895959 Add support for “globstar/**” (recursive) pattern 2020-05-26 14:25:28 +03:00
99d4e0c390 Merge branch 'release/v4.3.4' 2020-05-23 20:35:59 +03:00
6d32aeb310 Merge tag 'v4.3.4' into develop
Bump version to 4.3.4	81843087	Ivan Kravets <me@ikravets.com>	23 May 2020, 20:33
Bump version to 4.3.4
2020-05-23 20:35:59 +03:00
8184308755 Bump version to 4.3.4 2020-05-23 20:33:13 +03:00
b68953b733 Bump version to 4.3.4b1 2020-05-23 20:01:25 +03:00
7dce494ad6 Rename "misc" command to "system", do not append completion code for Fish shell // Resolve 3435 2020-05-23 20:00:56 +03:00
4921bf8b6a PyLint fix 2020-05-22 14:22:41 +03:00
32cb0d6e4d Handle possible issue on Python 2.x when writing to thread buffer
The problem happens when value has type "unicode" that shouldn't be decoded
2020-05-22 14:17:17 +03:00
e2c5a3c498 Add Python 3.8 for Tox 2020-05-22 14:12:27 +03:00
ec34a65cff Bump version to 4.3.4a5 2020-05-21 15:40:38 +03:00
9296615dbf Merge branch 'develop' of https://github.com/platformio/platformio-core into develop 2020-05-21 15:39:59 +03:00
56795940b9 Sync teensy dev-platform 2020-05-21 15:39:24 +03:00
09a5952248 Add new record to history log
Mention issues about permission error on Windows when cloning
package from Git repository
2020-05-20 21:51:13 +03:00
735435306d Copy and remove cloned package instead of moving // Resolve #2844, Resolve #3328
On Windows, it’s not possible to move a file which is used by another
process (e.g. Git extension in VSCode)
2020-05-20 21:32:55 +03:00
bdd57bf356 Ensure that copytree preserves symlinks 2020-05-20 20:57:55 +03:00
8840b28968 Handle possible issue on Python 2.x when writing to thread buffer
The problem happens when value has type "unicode" that shouldn't be decoded
2020-05-20 17:04:50 +03:00
e31591a35e Print warning about an issue with mapped network drives on Windows // Issue #3417
Starting with Python 3.8 paths to mapped network drives are resolved
to their real path in the system, e.g.: "Z:\path" becomes "\\path" which
causes weird errors in the default terminal with a message that UNC
paths are not supported
2020-05-19 22:37:05 +03:00
457a218723 Sync docs 2020-05-19 13:27:54 +03:00
9724660dda Update SPDX licenses to 3.9 2020-05-19 13:27:44 +03:00
eac6c1c552 Handle error when internet is offline. Resolve # 3503 (#3505)
* Handle error when internet is offline.

* Fix

* minor fix
2020-05-17 22:44:27 +03:00
54d73e834b Github Actions: Checkout submodules recursive 2020-05-14 18:08:51 +03:00
099e3c7198 Use original MongoDB license for "compilation_db.py" 2020-05-13 00:48:11 +03:00
96a68c6b14 Docs: Sync Atmel AVR dev-platform 2020-05-11 22:10:32 +03:00
2a0a1247e3 Revert "Add initialization config for new simavr tool"
This reverts commit 16966a4957.
2020-05-11 18:21:48 +03:00
7555d66748 Revert "Add special debug port for simavr tool"
This reverts commit 7b43444d81.
2020-05-11 18:21:26 +03:00
c76940f7ce PyLint fix 2020-05-10 19:11:06 +03:00
b2ed027bc3 Bump version to 4.3.4a4 2020-05-10 18:36:16 +03:00
01a1981ca1 Added PlatformIO CLI Shell Completion for Fish, Zsh, Bash, and PowerShell // Resolve #3435 2020-05-10 18:35:50 +03:00
03228c528e Bump version to 4.3.4a3 2020-05-09 16:36:05 +03:00
ac510c1553 fix summary caching (#3500) 2020-05-09 16:35:21 +03:00
a1ff5e1a4f Save summary data to local session. (#3497)
* Save summary data to local session.

* naming

* fix account summary test

* add ttl for summary cache

* refactoring get_account_info

* fix
2020-05-08 21:34:52 +03:00
7b43444d81 Add special debug port for simavr tool 2020-05-08 12:31:48 +03:00
16966a4957 Add initialization config for new simavr tool 2020-05-08 01:14:15 +03:00
7e7a6d7807 Skip account tests if env variables not presented (#3494)
* added skip if env variables not presented. fix exception texts

* fix texts

* fix texts
2020-05-06 19:25:24 +03:00
f32dbeeb6d Bump version to 4.3.4a2 2020-05-06 12:30:41 +03:00
f78ffaded0 Remove local PIO Account session from PIO Remote when token is broken 2020-05-06 12:29:01 +03:00
8480ebde89 Rename "authenticating" to "authorizing" wording for PIO Account 2020-05-06 12:27:29 +03:00
44ee7d6a6b Docs: Sync ESP8266 dev-platform 2020-05-04 15:50:11 +03:00
2ab47b7968 Remove account state item if refreshing token failed (#3487) 2020-05-04 13:14:52 +03:00
7181b7632b Docs: Increase content width 2020-05-03 11:06:26 +03:00
b82eaca45e Enable caching for PIP when building contrib-pysite 2020-05-02 15:29:24 +03:00
fd04f31c5f Update link to CLA provider 2020-05-01 23:37:38 +03:00
75abe8a0af Sync docs 2020-05-01 23:36:29 +03:00
f7995ce49a PyLint fix 2020-04-29 12:56:54 +03:00
2c2309acac Bump version to 4.3.4a1 2020-04-29 12:41:56 +03:00
5f79ab34f5 Automatically build `contrib-pysite` package on a target machine when pre-built package is not compatible // Resolve #3482 2020-04-29 12:40:04 +03:00
5bcbee7423 Merge branch 'release/v4.3.3' 2020-04-28 18:06:53 +03:00
961049cf9b Merge tag 'v4.3.3' into develop
Bump version to 4.3.3
2020-04-28 18:06:53 +03:00
72e7492a78 Bump version to 4.3.3 2020-04-28 18:06:46 +03:00
5e4b4bbacd Fix "UnicodeDecodeError: 'utf-8' codec can't decode byte" when non-Latin chars are used in project path // Resolve #3481 2020-04-28 18:05:08 +03:00
a64d368de2 Merge branch 'release/v4.3.2' 2020-04-28 13:27:23 +03:00
6146b58520 Merge tag 'v4.3.2' into develop
Bump version to 4.3.2
2020-04-28 13:27:23 +03:00
f35e6e99af Bump version to 4.3.2 2020-04-28 13:25:52 +03:00
5d8440fdd1 PyLint fixes 2020-04-28 12:48:15 +03:00
d1b394b20a Bump version to 4.3.2rc2 2020-04-27 23:42:22 +03:00
520d6decac Nominate some exceptions to UserSideException 2020-04-27 23:42:02 +03:00
4a251f0ab0 Fix JSONDecodeError when bottle.SimpleTemplate is used 2020-04-27 23:41:36 +03:00
c215abb50c Bump version to 4.3.2rc1 2020-04-26 19:46:33 +03:00
31ca47837d Disable build cache for Github Actions 2020-04-26 12:58:32 +03:00
560699fc6b Apply formatting 2020-04-26 12:58:05 +03:00
51ec94f78c Add new test for PIO Check with --skip-packages option 2020-04-26 01:38:25 +03:00
ac1210fbea Add -imacros files to forcedInclude field in VSCode template 2020-04-26 00:35:22 +03:00
c03f93521b Refactor PIO Check feature (#3478)
* Add new option --skip-packages for check command

Check tools might fail if they're not able to preprocess source
files, for example, Cppcheck uses a custom preprocessor that is
not able to parse complex preprocessor code in zephyr framework).
Instead user can specify this option to skip headers included from
packages and only check project sources.

* Fix toolchain built-in include paths order
C++ and fixed directories should have higher priority

* Refactor check feature

The main purpose is to prepare a more comprehensive build environment.
It's crucial for cppcheck to be able to check complex frameworks like
zephyr, esp-idf, etc. Also detect a special case when cppcheck fails to check
the entire project (e.g. a syntax error due to custom preprocessor)

* Add new test for check feature

Tests ststm32 platform all tools and the main frameworks

* Update check tools to the latest available versions

* Test check tools and Zephyr framework only with Python 3

* Tidy up code

* Add history entry
2020-04-26 00:10:41 +03:00
62ede23b0e Bump version to 4.3.2b1 2020-04-25 15:48:55 +03:00
60f28599d9 Echo what is typed when "send_on_enter" device monitor filter is used // Resolve #3452 2020-04-25 15:48:37 +03:00
629f23c4f3 Bump version to 4.3.2a4 2020-04-25 13:20:56 +03:00
27db344739 Bump version to 4.3.2a3 2020-04-25 13:15:55 +03:00
777a47fd99 Minor improvements 2020-04-25 13:14:54 +03:00
e913159cb4 Sync docs 2020-04-24 21:48:57 +03:00
b285c3137a Extend remote hosts with PlatformIO when checking internet connection 2020-04-24 16:25:02 +03:00
f0576ddcd9 Docs: Fix incorrect type for library.json "libCompatMode" field 2020-04-24 13:14:03 +03:00
6e2cc333f2 disable pio account change password and username update tests 2020-04-24 11:57:17 +03:00
18c7c5a9be Refactor pio account tests. (#3473) 2020-04-24 11:25:09 +03:00
01945716d3 Sync docs 2020-04-24 00:43:32 +03:00
2f5b231dc3 Disable pio account tests (#3472)
* minor fix pio account test

* disable pio account change password and username update tests
2020-04-24 00:13:46 +03:00
75c1aafaef fix pio account tests 2020-04-23 20:45:51 +03:00
b9714d0ac1 Add pio account tests (#3470)
* add pio account tests

* update tests
2020-04-23 16:05:00 +03:00
5774654582 Switch to Github Actions (#3471) 2020-04-23 16:04:15 +03:00
0a46b8ab6a add login with code method for account client. add new account rpc handler. (#3468) 2020-04-21 22:44:32 +03:00
a556573a4f Move env dependent directories to appropriate CMAKE_BUILD_TYPE // Issue #3460
This will allow to dynamically populate list of sources depending on
selected environment. At the same time "src" and "lib" folders remain
common for all environments
2020-04-21 22:01:07 +03:00
fd91819b2c Fix missing include paths for check tools
Includes are now split by scopes and imported as a dictionary
2020-04-21 19:26:00 +03:00
24c04057e9 CLion: Add paths to libraries specified via lib_extra_dirs option (#3463)
* Add paths to libraries specified via lib_extra_dirs option

Besides, global folders in SRC_LIST seem a bit unnecessary
since there might be unused libraries in these folders

* Refactor processing of includes when exporting IDE/Editor projects

Split includes according to their source. That will help export includes in a more flexible way.
For example some IDEs don't need include paths from toolchains

* Add new record to history log

* Typo fix
2020-04-21 17:37:55 +03:00
2960b73da5 Fix an issue when PIO Remote agent was not reconnected automatically 2020-04-21 12:32:03 +03:00
c4645a9a96 Sync docs 2020-04-21 10:43:34 +03:00
877e84ea1d Bump version to 4.3.2a2 2020-04-19 20:05:21 +03:00
cb1058c693 New PIO Account with "username" and profile support 2020-04-19 20:03:46 +03:00
be6bf5052e Open source PIO Remote client 2020-04-19 19:26:56 +03:00
7780003d01 New Account Management System (#3443)
* add login for PIO account to account cli

* Remove PyJWT lib. Fixes.

* Add password change for account

* Refactoring. Add Account Client.

* Fixes.

* http -> https.

* adding error handling for expired session.

* Change broker requests from json to form-data.

* Add pio accoint register command. fixes

* Fixes.

* Fixes.

* Add username and password validation

* fixes

* Add token, forgot commands to pio account

* fix domain

* add update command for pio account

* fixes

* refactor profile update output

* lint

* Update exception text.

* Fix logout

* Add custom user-agent for pio account

* add profile show command. minor fixes.

* Fix pio account show output format.

* Move account related exceptions

* cleaning

* minor fix

* Remove try except for account command authenticated/non-authenticated errors

* fix profile update cli command

* rename first name and last name vars to 'firstname' and 'lastname'
2020-04-19 19:06:06 +03:00
445ca937fd Sync docs 2020-04-18 22:40:27 +03:00
1f4aff7f27 Sync docs 2020-04-16 16:07:02 +03:00
788351a0cd Fixed an incorrect node path used for pattern matching when processing middleware nodes 2020-04-13 16:41:03 +03:00
5ba7753bfa Sync docs 2020-04-12 17:43:27 +03:00
ae57829190 Generate user agent based on PIO Core environment 2020-04-10 17:59:58 +03:00
030ddf4ea1 Apply black formatting 2020-04-10 17:08:16 +03:00
ccc43633b7 Support for a new dev-platform NXP i.MX RT 2020-04-10 13:14:10 +03:00
aba2ea9746 Temporary disable "infineonxmc" from CI due to a broken dev-platform 2020-04-09 12:44:52 +03:00
d5ebbb99a7 Dynamically choose extension for file with unit test transports (#3454)
C file should be used by default as only Arduino and mbed require C++ files.
There might be a lot of legacy projects so custom transport is also set to use C++.
2020-04-09 12:02:38 +03:00
a636a60e00 Sort examples 2020-04-08 22:34:04 +03:00
ad7e3f83aa Fix tests/commands/test_init.py 2020-04-08 17:18:59 +03:00
baa7aab1d7 Specify C++ as the language for .ino files when preprocessing them for PVS-Studio // Resolve #3450 2020-04-07 11:35:17 +03:00
2e320c01b3 Fix test 2020-04-06 18:19:34 +03:00
3cd6c618a4 Docs: Sync STM32 dev-platform 2020-04-06 16:56:05 +03:00
5ea759bc3e Document PIO Core: Integration with custom applications (extensions, plugins) 2020-04-05 19:48:50 +03:00
11cb3a1bf7 Docs: Add info about stm32pio tool for STM32Cube framework 2020-04-04 00:45:45 +03:00
7412cf586b Update docs for Zephyr RTOS 2.2 2020-04-02 00:59:35 +03:00
f976cf7ae5 Docs: Extend tutorials list 2020-03-30 17:15:18 +03:00
e92b498b68 Fixed an issue when saving libraries in new project results in error "No option 'lib_deps' in section" // Resolve #3442 2020-03-27 13:34:14 +02:00
1b0810ec87 Docs: Fix broken link for creating dev-platform // Resolve #123 2020-03-26 22:31:15 +02:00
45e523a468 Docs: Sync with Atmel SAM dev-platform 2020-03-25 17:01:12 +02:00
d42481d196 Sync docs 2020-03-24 18:03:23 +02:00
11c946bfe4 Sync Espressif 32 dev-platform 2020-03-23 19:52:57 +02:00
589d6f9e12 Docs: Sync Espressif 32 dev-platform 2020-03-23 19:35:18 +02:00
79b3a232fc Move debug client and server implementations to "process" folder 2020-03-21 22:00:14 +02:00
f95230b86e Fixed UnicodeDecodeError on Windows when network drive (NAS) is used // Resolve #3417 2020-03-21 21:53:42 +02:00
fc9a16aa81 Merge branch 'feature/issue-3417-unicodeerror-nas' into develop 2020-03-21 21:44:13 +02:00
81a4d28918 Docs: Remove duplicate demo image of PlatformIO for CLion 2020-03-21 16:39:17 +02:00
fd137fe054 Bump version to 4.3.2a1 2020-03-21 13:28:31 +02:00
efd3b244e1 Force PIPE reader to UTF-8 on Windows // Issue #3417 2020-03-21 13:27:46 +02:00
dbeaaf270c fix typo in URL (#3432) 2020-03-21 00:02:50 +02:00
32642b7ec8 Fix broken link to Renode in history 2020-03-20 17:16:49 +02:00
096c2f6165 Typo fix in docs 2020-03-20 17:11:31 +02:00
91ae8b4cc7 Fixed typo in history 2020-03-20 15:15:30 +02:00
5a12f1f56e Merge tag 'v4.3.1' into develop
Bump version to 4.3.1
2020-03-20 15:13:46 +02:00
181 changed files with 12518 additions and 7144 deletions

View File

@ -1,31 +0,0 @@
build: off
platform:
- x64
environment:
matrix:
- TOXENV: "py27"
PLATFORMIO_BUILD_CACHE_DIR: C:\Temp\PIO_Build_Cache_P2_{build}
PYTHON_DIRS: C:\Python27-x64;C:\Python27-x64\Scripts
- TOXENV: "py36"
PLATFORMIO_BUILD_CACHE_DIR: C:\Temp\PIO_Build_Cache_P3_{build}
PYTHON_DIRS: C:\Python36-x64;C:\Python36-x64\Scripts
install:
- cmd: git submodule update --init --recursive
- cmd: SET PATH=%PYTHON_DIRS%;C:\MinGW\bin;%PATH%
- cmd: SET PLATFORMIO_CORE_DIR=C:\.pio
- cmd: pip install --force-reinstall tox
test_script:
- cmd: tox
notifications:
- provider: Slack
incoming_webhook:
secure: E9H0SU0Ju7WLDvgxsV8cs3J62T3nTTX7QkEjsczN0Sto/c9hWkVfhc5gGWUkxhlD975cokHByKGJIdwYwCewqOI+7BrcT8U+nlga4Uau7J8=
on_build_success: false
on_build_failure: true
on_build_status_changed: true

44
.github/workflows/core.yml vendored Normal file
View File

@ -0,0 +1,44 @@
name: Core
on: [push, pull_request]
jobs:
build:
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
python-version: [2.7, 3.7, 3.8]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
with:
submodules: "recursive"
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v1
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install tox
- name: Python Lint
run: |
tox -e lint
- name: Integration Tests
env:
TEST_EMAIL_LOGIN: ${{ secrets.TEST_EMAIL_LOGIN }}
TEST_EMAIL_PASSWORD: ${{ secrets.TEST_EMAIL_PASSWORD }}
TEST_EMAIL_IMAP_SERVER: ${{ secrets.TEST_EMAIL_IMAP_SERVER }}
run: |
tox -e testcore
- name: Slack Notification
uses: homoluctus/slatify@master
if: failure()
with:
type: ${{ job.status }}
job_name: '*Core*'
commit: true
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}

32
.github/workflows/docs.yml vendored Normal file
View File

@ -0,0 +1,32 @@
name: Docs
on: [push, pull_request]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
submodules: "recursive"
- name: Set up Python
uses: actions/setup-python@v1
with:
python-version: 3.7
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install tox
- name: Build docs
run: |
tox -e docs
- name: Slack Notification
uses: homoluctus/slatify@master
if: failure()
with:
type: ${{ job.status }}
job_name: '*Docs*'
commit: true
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}

64
.github/workflows/examples.yml vendored Normal file
View File

@ -0,0 +1,64 @@
name: Examples
on: [push, pull_request]
jobs:
build:
strategy:
fail-fast: false
matrix:
os: [ubuntu-16.04, windows-latest, macos-latest]
python-version: [2.7, 3.7]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
with:
submodules: "recursive"
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v1
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install tox
- name: Run on Linux
if: startsWith(matrix.os, 'ubuntu')
env:
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,siwigsm,intel_mcs51,aceinna_imu"
run: |
# ChipKIT issue: install 32-bit support for GCC PIC32
sudo apt-get install libc6-i386
# Free space
sudo apt clean
docker rmi $(docker image ls -aq)
df -h
# Run
tox -e testexamples
- name: Run on macOS
if: startsWith(matrix.os, 'macos')
env:
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,siwigsm,microchippic32,gd32v,nuclei,lattice_ice40"
run: |
df -h
tox -e testexamples
- name: Run on Windows
if: startsWith(matrix.os, 'windows')
env:
PLATFORMIO_CORE_DIR: C:/pio
PLATFORMIO_WORKSPACE_DIR: C:/pio-workspace/$PROJECT_HASH
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,siwigsm,riscv_gap"
run: |
tox -e testexamples
- name: Slack Notification
uses: homoluctus/slatify@master
if: failure()
with:
type: ${{ job.status }}
job_name: '*Examples*'
commit: true
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}

View File

@ -1,3 +1,3 @@
[settings]
line_length=88
known_third_party=SCons, twisted, autobahn, jsonrpc
known_third_party=OpenSSL, SCons, autobahn, jsonrpc, twisted, zope

View File

@ -1,3 +1,6 @@
[REPORTS]
output-format=colorized
[MESSAGES CONTROL]
disable=
bad-continuation,
@ -12,4 +15,8 @@ disable=
useless-object-inheritance,
useless-import-alias,
fixme,
bad-option-value
bad-option-value,
; PY2 Compat
super-with-arguments,
raise-missing-from

View File

@ -1,39 +0,0 @@
language: python
matrix:
include:
- os: linux
sudo: false
python: 2.7
env: TOX_ENV=docs
- os: linux
sudo: required
python: 2.7
env: TOX_ENV=py27 PLATFORMIO_BUILD_CACHE_DIR=$(mktemp -d)
- os: linux
sudo: required
python: 3.6
env: TOX_ENV=py36 PLATFORMIO_BUILD_CACHE_DIR=$(mktemp -d)
- os: osx
language: generic
env: TOX_ENV=skipexamples
install:
- git submodule update --init --recursive
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then curl -fsSL https://bootstrap.pypa.io/get-pip.py | sudo python; fi
- pip install -U tox
# ChipKIT issue: install 32-bit support for GCC PIC32
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then sudo apt-get install libc6-i386; fi
script:
- tox -e $TOX_ENV
notifications:
email: false
slack:
rooms:
secure: JD6VGfN4+SLU2CwDdiIOr1VgwD+zbYUCE/srwyGuHavnjIkPItkl6T6Bn8Y4VrU6ysbuKotfdV2TAJJ82ivFbY8BvZBc7FBcYp/AGQ4FaCCV5ySv8RDAcQgdE12oaGzMdODiLqsB85f65zOlAFa+htaXyEiRTcotn6Y2hupatrI=
on_failure: always
on_success: change

View File

@ -1,20 +1,20 @@
Contributing
------------
To get started, <a href="https://www.clahub.com/agreements/platformio/platformio-core">sign the Contributor License Agreement</a>.
To get started, <a href="https://cla-assistant.io/platformio/platformio-core">sign the Contributor License Agreement</a>.
1. Fork the repository on GitHub.
2. Clone repository `git clone --recursive https://github.com/YourGithubUsername/platformio-core.git`
3. Run `pip install tox`
4. Go to the root of project where is located `tox.ini` and run `tox -e py27`
4. Go to the root of project where is located `tox.ini` and run `tox -e py37`
5. Activate current development environment:
* Windows: `.tox\py27\Scripts\activate`
* Bash/ZSH: `source .tox/py27/bin/activate`
* Fish: `source .tox/py27/bin/activate.fish`
* Windows: `.tox\py37\Scripts\activate`
* Bash/ZSH: `source .tox/py37/bin/activate`
* Fish: `source .tox/py37/bin/activate.fish`
6. Make changes to code, documentation, etc.
7. Lint source code `make lint`
7. Lint source code `make before-commit`
8. Run the tests `make test`
9. Build documentation `tox -e docs` (creates a directory _build under docs where you can find the html)
10. Commit changes to your forked repository

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,6 @@
lint:
pylint --rcfile=./.pylintrc ./platformio
pylint -j 6 --rcfile=./.pylintrc ./platformio
pylint -j 6 --rcfile=./.pylintrc ./tests
isort:
isort -rc ./platformio
@ -12,7 +13,7 @@ format:
test:
py.test --verbose --capture=no --exitfirst -n 6 --dist=loadscope tests --ignore tests/test_examples.py
before-commit: isort format lint test
before-commit: isort format lint
clean-docs:
rm -rf docs/_build
@ -27,8 +28,11 @@ clean: clean-docs
profile:
# Usage $ > make PIOARGS="boards" profile
python -m cProfile -o .tox/.tmp/cprofile.prof $(shell which platformio) ${PIOARGS}
python -m cProfile -o .tox/.tmp/cprofile.prof -m platformio ${PIOARGS}
snakeviz .tox/.tmp/cprofile.prof
pack:
python setup.py sdist
publish:
python setup.py sdist upload

View File

@ -1,40 +1,43 @@
PlatformIO
==========
.. image:: https://travis-ci.org/platformio/platformio-core.svg?branch=develop
:target: https://travis-ci.org/platformio/platformio-core
:alt: Travis.CI Build Status
.. image:: https://ci.appveyor.com/api/projects/status/unnpw0n3c5k14btn/branch/develop?svg=true
:target: https://ci.appveyor.com/project/ivankravets/platformio-core
:alt: AppVeyor.CI Build Status
.. image:: https://github.com/platformio/platformio-core/workflows/Core/badge.svg
:target: https://docs.platformio.org/page/core/index.html
:alt: CI Build for PlatformIO Core
.. image:: https://github.com/platformio/platformio-core/workflows/Examples/badge.svg
:target: https://github.com/platformio/platformio-examples
:alt: CI Build for dev-platform examples
.. image:: https://github.com/platformio/platformio-core/workflows/Docs/badge.svg
:target: https://docs.platformio.org?utm_source=github&utm_medium=core
:alt: CI Build for Docs
.. image:: https://img.shields.io/pypi/v/platformio.svg
:target: https://pypi.python.org/pypi/platformio/
:alt: Latest Version
.. image:: https://img.shields.io/badge/license-Apache%202.0-blue.svg
:target: https://pypi.python.org/pypi/platformio/
:alt: License
.. image:: https://img.shields.io/badge/PlatformIO-Community-orange.svg
:alt: Community Forums
:target: https://community.platformio.org?utm_source=github&utm_medium=core
.. image:: https://img.shields.io/badge/PlatformIO-Labs-orange.svg
:alt: Community Labs
:target: https://piolabs.com/?utm_source=github&utm_medium=core
**Quick Links:** `Web <https://platformio.org?utm_source=github&utm_medium=core>`_ |
`PlatformIO IDE <https://platformio.org/platformio-ide?utm_source=github&utm_medium=core>`_ |
`Project Examples <https://github.com/platformio/platformio-examples/>`__ |
`Docs <https://docs.platformio.org?utm_source=github&utm_medium=core>`_ |
`Donate <https://platformio.org/donate?utm_source=github&utm_medium=core>`_ |
`Contact Us <https://platformio.org/contact?utm_source=github&utm_medium=core>`_
`Contact Us <https://piolabs.com/?utm_source=github&utm_medium=core>`_
**Social:** `Twitter <https://twitter.com/PlatformIO_Org>`_ |
`LinkedIn <https://www.linkedin.com/company/platformio/>`_ |
**Social:** `LinkedIn <https://www.linkedin.com/company/platformio/>`_ |
`Twitter <https://twitter.com/PlatformIO_Org>`_ |
`Facebook <https://www.facebook.com/platformio>`_ |
`Hackaday <https://hackaday.io/project/7980-platformio>`_ |
`Bintray <https://bintray.com/platformio>`_ |
`Community <https://community.platformio.org?utm_source=github&utm_medium=core>`_
`Community Forums <https://community.platformio.org?utm_source=github&utm_medium=core>`_
.. image:: https://raw.githubusercontent.com/platformio/platformio-web/develop/app/images/platformio-ide-laptop.png
:target: https://platformio.org?utm_source=github&utm_medium=core
`PlatformIO <https://platformio.org?utm_source=github&utm_medium=core>`_ a new generation ecosystem for embedded development
`PlatformIO <https://platformio.org?utm_source=github&utm_medium=core>`_ is a professional collaborative platform for embedded development
**A place where Developers and Teams have true Freedom! No more vendor lock-in!**
* Open source, maximum permissive Apache 2.0 license
* Cross-platform IDE and Unified Debugger
@ -45,26 +48,24 @@ PlatformIO
Get Started
-----------
* `What is PlatformIO? <https://docs.platformio.org/en/latest/what-is-platformio.html?utm_source=github&utm_medium=core>`_
Instruments
-----------
* `What is PlatformIO? <https://docs.platformio.org/page/what-is-platformio.html?utm_source=github&utm_medium=core>`_
* `PlatformIO IDE <https://platformio.org/platformio-ide?utm_source=github&utm_medium=core>`_
* `PlatformIO Core (CLI) <https://docs.platformio.org/en/latest/core.html?utm_source=github&utm_medium=core>`_
* `Library Management <https://docs.platformio.org/page/librarymanager/index.html?utm_source=github&utm_medium=core>`_
* `PlatformIO Core (CLI) <https://docs.platformio.org/page/core.html?utm_source=github&utm_medium=core>`_
* `Project Examples <https://github.com/platformio/platformio-examples?utm_source=github&utm_medium=core>`__
Solutions
---------
* `Library Management <https://docs.platformio.org/page/librarymanager/index.html?utm_source=github&utm_medium=core>`_
* `Desktop IDEs Integration <https://docs.platformio.org/page/ide.html?utm_source=github&utm_medium=core>`_
* `Continuous Integration <https://docs.platformio.org/page/ci/index.html?utm_source=github&utm_medium=core>`_
* `Advanced Scripting API <https://docs.platformio.org/page/projectconf/advanced_scripting.html?utm_source=github&utm_medium=core>`_
Professional
------------
**Advanced**
* `PIO Check <https://docs.platformio.org/page/plus/pio-check.html?utm_source=github&utm_medium=core>`_
* `PIO Remote <https://docs.platformio.org/page/plus/pio-remote.html?utm_source=github&utm_medium=core>`_
* `PIO Unified Debugger <https://docs.platformio.org/page/plus/debugging.html?utm_source=github&utm_medium=core>`_
* `PIO Unit Testing <https://docs.platformio.org/en/latest/plus/unit-testing.html?utm_source=github&utm_medium=core>`_
* `Debugging <https://docs.platformio.org/page/plus/debugging.html?utm_source=github&utm_medium=core>`_
* `Unit Testing <https://docs.platformio.org/page/plus/unit-testing.html?utm_source=github&utm_medium=core>`_
* `Static Code Analysis <https://docs.platformio.org/page/plus/pio-check.html?utm_source=github&utm_medium=core>`_
* `Remote Development <https://docs.platformio.org/page/plus/pio-remote.html?utm_source=github&utm_medium=core>`_
Registry
--------
@ -78,6 +79,7 @@ Development Platforms
---------------------
* `Aceinna IMU <https://platformio.org/platforms/aceinna_imu?utm_source=github&utm_medium=core>`_
* `ASR Microelectronics ASR605x <https://platformio.org/platforms/asrmicro650x?utm_source=github&utm_medium=core>`_
* `Atmel AVR <https://platformio.org/platforms/atmelavr?utm_source=github&utm_medium=core>`_
* `Atmel SAM <https://platformio.org/platforms/atmelsam?utm_source=github&utm_medium=core>`_
* `Espressif 32 <https://platformio.org/platforms/espressif32?utm_source=github&utm_medium=core>`_
@ -140,8 +142,7 @@ Telemetry / Privacy Policy
Share minimal diagnostics and usage information to help us make PlatformIO better.
It is enabled by default. For more information see:
* `Telemetry Setting <https://docs.platformio.org/en/latest/userguide/cmd_settings.html?utm_source=github&utm_medium=core#enable-telemetry>`_
* `SSL Setting <https://docs.platformio.org/en/latest/userguide/cmd_settings.html?utm_source=github&utm_medium=core#strict-ssl>`_
* `Telemetry Setting <https://docs.platformio.org/page/userguide/cmd_settings.html?utm_source=github&utm_medium=core#enable-telemetry>`_
License
-------

2
docs

Submodule docs updated: d97117eb2e...deae09a880

View File

@ -12,18 +12,22 @@
# See the License for the specific language governing permissions and
# limitations under the License.
VERSION = (4, 3, 1)
import sys
VERSION = (5, 0, 2)
__version__ = ".".join([str(s) for s in VERSION])
__title__ = "platformio"
__description__ = (
"A new generation ecosystem for embedded development. "
"A professional collaborative platform for embedded development. "
"Cross-platform IDE and Unified Debugger. "
"Static Code Analyzer and Remote Unit Testing. "
"Multi-platform and Multi-architecture Build System. "
"Firmware File Explorer and Memory Inspection. "
"Arduino, ARM mbed, Espressif (ESP8266/ESP32), STM32, PIC32, nRF51/nRF52, "
"RISC-V, FPGA, CMSIS, SPL, AVR, Samsung ARTIK, libOpenCM3"
"IoT, Arduino, CMSIS, ESP-IDF, FreeRTOS, libOpenCM3, mbedOS, Pulp OS, SPL, "
"STM32Cube, Zephyr RTOS, ARM, AVR, Espressif (ESP8266/ESP32), FPGA, "
"MCS-51 (8051), MSP430, Nordic (nRF51/nRF52), NXP i.MX RT, PIC32, RISC-V, "
"STMicroelectronics (STM8/STM32), Teensy"
)
__url__ = "https://platformio.org"
@ -33,4 +37,28 @@ __email__ = "contact@platformio.org"
__license__ = "Apache Software License"
__copyright__ = "Copyright 2014-present PlatformIO"
__apiurl__ = "https://api.platformio.org"
__accounts_api__ = "https://api.accounts.platformio.org"
__registry_api__ = [
"https://api.registry.platformio.org",
"https://api.registry.ns1.platformio.org",
]
__pioremote_endpoint__ = "ssl:host=remote.platformio.org:port=4413"
__default_requests_timeout__ = (10, None) # (connect, read)
__core_packages__ = {
"contrib-piohome": "~3.3.1",
"contrib-pysite": "~2.%d%d.0" % (sys.version_info.major, sys.version_info.minor),
"tool-unity": "~1.20500.0",
"tool-scons": "~2.20501.7" if sys.version_info.major == 2 else "~4.40001.0",
"tool-cppcheck": "~1.210.0",
"tool-clangtidy": "~1.100000.0",
"tool-pvs-studio": "~7.9.0",
}
__check_internet_hosts__ = [
"185.199.110.153", # Github.com
"88.198.170.159", # platformio.org
"github.com",
"platformio.org",
]

View File

@ -22,6 +22,13 @@ from platformio import __version__, exception, maintenance, util
from platformio.commands import PlatformioCLI
from platformio.compat import CYGWIN
try:
import click_completion # pylint: disable=import-error
click_completion.init()
except: # pylint: disable=bare-except
pass
@click.command(
cls=PlatformioCLI, context_settings=dict(help_option_names=["-h", "--help"])

View File

@ -12,24 +12,21 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import codecs
from __future__ import absolute_import
import getpass
import hashlib
import json
import os
import platform
import socket
import uuid
from os import environ, getenv, listdir, remove
from os.path import dirname, isdir, isfile, join, realpath
from time import time
import requests
from platformio import exception, fs, lockfile
from platformio import __version__, exception, fs, proc
from platformio.compat import WINDOWS, dump_json_to_unicode, hashlib_encode_data
from platformio.proc import is_ci
from platformio.project.helpers import (
get_default_projects_dir,
get_project_cache_dir,
get_project_core_dir,
)
from platformio.package.lockfile import LockFile
from platformio.project.helpers import get_default_projects_dir, get_project_core_dir
def projects_dir_validate(projects_dir):
@ -59,10 +56,9 @@ DEFAULT_SETTINGS = {
"value": 7,
},
"enable_cache": {
"description": "Enable caching for API requests and Library Manager",
"description": "Enable caching for HTTP API requests",
"value": True,
},
"strict_ssl": {"description": "Strict SSL for PlatformIO Services", "value": False},
"enable_telemetry": {
"description": ("Telemetry service <http://bit.ly/pio-telemetry> (Yes/No)"),
"value": True,
@ -72,7 +68,7 @@ DEFAULT_SETTINGS = {
"value": False,
},
"projects_dir": {
"description": "Default location for PlatformIO projects (PIO Home)",
"description": "Default location for PlatformIO projects (PlatformIO Home)",
"value": get_default_projects_dir(),
"validator": projects_dir_validate,
},
@ -123,7 +119,7 @@ class State(object):
def _lock_state_file(self):
if not self.lock:
return
self._lockfile = lockfile.LockFile(self.path)
self._lockfile = LockFile(self.path)
try:
self._lockfile.acquire()
except IOError:
@ -141,6 +137,9 @@ class State(object):
def as_dict(self):
return self._storage
def keys(self):
return self._storage.keys()
def get(self, key, default=True):
return self._storage.get(key, default)
@ -166,146 +165,6 @@ class State(object):
return item in self._storage
class ContentCache(object):
def __init__(self, cache_dir=None):
self.cache_dir = None
self._db_path = None
self._lockfile = None
self.cache_dir = cache_dir or get_project_cache_dir()
self._db_path = join(self.cache_dir, "db.data")
def __enter__(self):
self.delete()
return self
def __exit__(self, type_, value, traceback):
pass
def _lock_dbindex(self):
if not self.cache_dir:
os.makedirs(self.cache_dir)
self._lockfile = lockfile.LockFile(self.cache_dir)
try:
self._lockfile.acquire()
except: # pylint: disable=bare-except
return False
return True
def _unlock_dbindex(self):
if self._lockfile:
self._lockfile.release()
return True
def get_cache_path(self, key):
assert "/" not in key and "\\" not in key
key = str(key)
assert len(key) > 3
return join(self.cache_dir, key[-2:], key)
@staticmethod
def key_from_args(*args):
h = hashlib.md5()
for arg in args:
if arg:
h.update(hashlib_encode_data(arg))
return h.hexdigest()
def get(self, key):
cache_path = self.get_cache_path(key)
if not isfile(cache_path):
return None
with codecs.open(cache_path, "rb", encoding="utf8") as fp:
return fp.read()
def set(self, key, data, valid):
if not get_setting("enable_cache"):
return False
cache_path = self.get_cache_path(key)
if isfile(cache_path):
self.delete(key)
if not data:
return False
if not isdir(self.cache_dir):
os.makedirs(self.cache_dir)
tdmap = {"s": 1, "m": 60, "h": 3600, "d": 86400}
assert valid.endswith(tuple(tdmap))
expire_time = int(time() + tdmap[valid[-1]] * int(valid[:-1]))
if not self._lock_dbindex():
return False
if not isdir(dirname(cache_path)):
os.makedirs(dirname(cache_path))
try:
with codecs.open(cache_path, "wb", encoding="utf8") as fp:
fp.write(data)
with open(self._db_path, "a") as fp:
fp.write("%s=%s\n" % (str(expire_time), cache_path))
except UnicodeError:
if isfile(cache_path):
try:
remove(cache_path)
except OSError:
pass
return self._unlock_dbindex()
def delete(self, keys=None):
""" Keys=None, delete expired items """
if not isfile(self._db_path):
return None
if not keys:
keys = []
if not isinstance(keys, list):
keys = [keys]
paths_for_delete = [self.get_cache_path(k) for k in keys]
found = False
newlines = []
with open(self._db_path) as fp:
for line in fp.readlines():
line = line.strip()
if "=" not in line:
continue
expire, path = line.split("=")
try:
if (
time() < int(expire)
and isfile(path)
and path not in paths_for_delete
):
newlines.append(line)
continue
except ValueError:
pass
found = True
if isfile(path):
try:
remove(path)
if not listdir(dirname(path)):
fs.rmtree(dirname(path))
except OSError:
pass
if found and self._lock_dbindex():
with open(self._db_path, "w") as fp:
fp.write("\n".join(newlines) + "\n")
self._unlock_dbindex()
return True
def clean(self):
if not self.cache_dir or not isdir(self.cache_dir):
return
fs.rmtree(self.cache_dir)
def clean_cache():
with ContentCache() as cc:
cc.clean()
def sanitize_setting(name, value):
if name not in DEFAULT_SETTINGS:
raise exception.InvalidSettingName(name)
@ -343,8 +202,8 @@ def delete_state_item(name):
def get_setting(name):
_env_name = "PLATFORMIO_SETTING_%s" % name.upper()
if _env_name in environ:
return sanitize_setting(name, getenv(_env_name))
if _env_name in os.environ:
return sanitize_setting(name, os.getenv(_env_name))
with State() as state:
if "settings" in state and name in state["settings"]:
@ -380,31 +239,32 @@ def is_disabled_progressbar():
return any(
[
get_session_var("force_option"),
is_ci(),
getenv("PLATFORMIO_DISABLE_PROGRESSBAR") == "true",
proc.is_ci(),
os.getenv("PLATFORMIO_DISABLE_PROGRESSBAR") == "true",
]
)
def get_cid():
# pylint: disable=import-outside-toplevel
from platformio.clients.http import fetch_remote_content
cid = get_state_item("cid")
if cid:
return cid
uid = None
if getenv("C9_UID"):
uid = getenv("C9_UID")
elif getenv("CHE_API", getenv("CHE_API_ENDPOINT")):
if os.getenv("C9_UID"):
uid = os.getenv("C9_UID")
elif os.getenv("CHE_API", os.getenv("CHE_API_ENDPOINT")):
try:
uid = (
requests.get(
uid = json.loads(
fetch_remote_content(
"{api}/user?token={token}".format(
api=getenv("CHE_API", getenv("CHE_API_ENDPOINT")),
token=getenv("USER_TOKEN"),
api=os.getenv("CHE_API", os.getenv("CHE_API_ENDPOINT")),
token=os.getenv("USER_TOKEN"),
)
)
.json()
.get("id")
)
).get("id")
except: # pylint: disable=bare-except
pass
if not uid:
@ -414,3 +274,32 @@ def get_cid():
if WINDOWS or os.getuid() > 0: # pylint: disable=no-member
set_state_item("cid", cid)
return cid
def get_user_agent():
data = [
"PlatformIO/%s" % __version__,
"CI/%d" % int(proc.is_ci()),
"Container/%d" % int(proc.is_container()),
]
if get_session_var("caller_id"):
data.append("Caller/%s" % get_session_var("caller_id"))
if os.getenv("PLATFORMIO_IDE"):
data.append("IDE/%s" % os.getenv("PLATFORMIO_IDE"))
data.append("Python/%s" % platform.python_version())
data.append("Platform/%s" % platform.platform())
return " ".join(data)
def get_host_id():
h = hashlib.sha1(hashlib_encode_data(get_cid()))
try:
username = getpass.getuser()
h.update(hashlib_encode_data(username))
except: # pylint: disable=bare-except
pass
return h.hexdigest()
def get_host_name():
return str(socket.gethostname())[:255]

View File

@ -28,9 +28,9 @@ from SCons.Script import DefaultEnvironment # pylint: disable=import-error
from SCons.Script import Import # pylint: disable=import-error
from SCons.Script import Variables # pylint: disable=import-error
from platformio import fs
from platformio import compat, fs
from platformio.compat import dump_json_to_unicode
from platformio.managers.platform import PlatformBase
from platformio.platform.base import PlatformBase
from platformio.proc import get_pythonexe_path
from platformio.project.helpers import get_project_dir
@ -55,6 +55,7 @@ DEFAULT_ENV_OPTIONS = dict(
"c++",
"link",
"platformio",
"piotarget",
"pioplatform",
"pioproject",
"piomaxlen",
@ -77,6 +78,7 @@ DEFAULT_ENV_OPTIONS = dict(
PROGNAME="program",
PROG_PATH=join("$BUILD_DIR", "$PROGNAME$PROGSUFFIX"),
PYTHONEXE=get_pythonexe_path(),
IDE_EXTRA_DATA={},
)
if not int(ARGUMENTS.get("PIOVERBOSE", 0)):
@ -120,6 +122,18 @@ env.Replace(
],
)
if (
compat.WINDOWS
and sys.version_info >= (3, 8)
and env["PROJECT_DIR"].startswith("\\\\")
):
click.secho(
"There is a known issue with Python 3.8+ and mapped network drives on "
"Windows.\nPlease downgrade Python to the latest 3.7. More details at:\n"
"https://github.com/platformio/platformio-core/issues/3417",
fg="yellow",
)
if env.subst("$BUILD_CACHE_DIR"):
if not isdir(env.subst("$BUILD_CACHE_DIR")):
makedirs(env.subst("$BUILD_CACHE_DIR"))
@ -147,7 +161,7 @@ env.LoadPioPlatform()
env.SConscriptChdir(0)
env.SConsignFile(
join("$BUILD_DIR", ".sconsign.py%d%d" % (sys.version_info[0], sys.version_info[1]))
join("$BUILD_DIR", ".sconsign%d%d" % (sys.version_info[0], sys.version_info[1]))
)
for item in env.GetExtraScripts("pre"):
@ -205,7 +219,7 @@ if "idedata" in COMMAND_LINE_TARGETS:
click.echo(
"\n%s\n"
% dump_json_to_unicode(
projenv.DumpIDEData() # pylint: disable=undefined-variable
projenv.DumpIDEData(env) # pylint: disable=undefined-variable
)
)
env.Exit(0)

View File

@ -1,17 +1,24 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
# Copyright 2015 MongoDB Inc.
# Copyright 2020 MongoDB Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# http://www.apache.org/licenses/LICENSE-2.0
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# pylint: disable=unused-argument, protected-access, unused-variable, import-error
# Original: https://github.com/mongodb/mongo/blob/master/site_scons/site_tools/compilation_db.py

View File

@ -20,49 +20,50 @@ from glob import glob
from SCons.Defaults import processDefines # pylint: disable=import-error
from platformio.compat import glob_escape
from platformio.managers.core import get_core_package_dir
from platformio.package.manager.core import get_core_package_dir
from platformio.proc import exec_command, where_is_program
def _dump_includes(env):
includes = []
includes = {}
for item in env.get("CPPPATH", []):
includes.append(env.subst(item))
includes["build"] = [
env.subst("$PROJECT_INCLUDE_DIR"),
env.subst("$PROJECT_SRC_DIR"),
]
includes["build"].extend(
[os.path.realpath(env.subst(item)) for item in env.get("CPPPATH", [])]
)
# installed libs
includes["compatlib"] = []
for lb in env.GetLibBuilders():
includes.extend(lb.get_include_dirs())
includes["compatlib"].extend(
[os.path.realpath(inc) for inc in lb.get_include_dirs()]
)
# includes from toolchains
p = env.PioPlatform()
for name in p.get_installed_packages():
if p.get_package_type(name) != "toolchain":
includes["toolchain"] = []
for pkg in p.get_installed_packages():
if p.get_package_type(pkg.metadata.name) != "toolchain":
continue
toolchain_dir = glob_escape(p.get_package_dir(name))
toolchain_dir = glob_escape(pkg.path)
toolchain_incglobs = [
os.path.join(toolchain_dir, "*", "include*"),
os.path.join(toolchain_dir, "*", "include", "c++", "*"),
os.path.join(toolchain_dir, "*", "include", "c++", "*", "*-*-*"),
os.path.join(toolchain_dir, "lib", "gcc", "*", "*", "include*"),
os.path.join(toolchain_dir, "*", "include*"),
]
for g in toolchain_incglobs:
includes.extend(glob(g))
includes["toolchain"].extend([os.path.realpath(inc) for inc in glob(g)])
includes["unity"] = []
unity_dir = get_core_package_dir("tool-unity")
if unity_dir:
includes.append(unity_dir)
includes["unity"].append(unity_dir)
includes.extend([env.subst("$PROJECT_INCLUDE_DIR"), env.subst("$PROJECT_SRC_DIR")])
# remove duplicates
result = []
for item in includes:
item = os.path.realpath(item)
if item not in result:
result.append(item)
return result
return includes
def _get_gcc_defines(env):
@ -92,7 +93,9 @@ def _dump_defines(env):
defines = []
# global symbols
for item in processDefines(env.get("CPPDEFINES", [])):
defines.append(env.subst(item).replace("\\", ""))
item = item.strip()
if item:
defines.append(env.subst(item).replace("\\", ""))
# special symbol for Atmel AVR MCU
if env["PIOPLATFORM"] == "atmelavr":
@ -142,7 +145,8 @@ def _escape_build_flag(flags):
return [flag if " " not in flag else '"%s"' % flag for flag in flags]
def DumpIDEData(env):
def DumpIDEData(env, globalenv):
""" env here is `projenv`"""
env["__escape_build_flag"] = _escape_build_flag
@ -158,19 +162,21 @@ def DumpIDEData(env):
"libsource_dirs": [env.subst(l) for l in env.GetLibSourceDirs()],
"defines": _dump_defines(env),
"includes": _dump_includes(env),
"cc_flags": env.subst(LINTCCOM),
"cxx_flags": env.subst(LINTCXXCOM),
"cc_path": where_is_program(env.subst("$CC"), env.subst("${ENV['PATH']}")),
"cxx_path": where_is_program(env.subst("$CXX"), env.subst("${ENV['PATH']}")),
"gdb_path": where_is_program(env.subst("$GDB"), env.subst("${ENV['PATH']}")),
"prog_path": env.subst("$PROG_PATH"),
"flash_extra_images": [
{"offset": item[0], "path": env.subst(item[1])}
for item in env.get("FLASH_EXTRA_IMAGES", [])
],
"svd_path": _get_svd_path(env),
"compiler_type": env.GetCompilerType(),
"targets": globalenv.DumpTargets(),
"extra": dict(
flash_images=[
{"offset": item[0], "path": env.subst(item[1])}
for item in env.get("FLASH_EXTRA_IMAGES", [])
]
),
}
data["extra"].update(env.get("IDE_EXTRA_DATA", {}))
env_ = env.Clone()
# https://github.com/platformio/platformio-atom-ide/issues/34

View File

@ -12,8 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=no-member, no-self-use, unused-argument, too-many-lines
# pylint: disable=no-self-use, unused-argument, too-many-lines
# pylint: disable=too-many-instance-attributes, too-many-public-methods
# pylint: disable=assignment-from-no-return
from __future__ import absolute_import
@ -22,7 +23,6 @@ import io
import os
import re
import sys
from os.path import basename, commonprefix, isdir, isfile, join, realpath, sep
import click
import SCons.Scanner # pylint: disable=import-error
@ -32,12 +32,15 @@ from SCons.Script import DefaultEnvironment # pylint: disable=import-error
from platformio import exception, fs, util
from platformio.builder.tools import platformio as piotool
from platformio.clients.http import InternetIsOffline
from platformio.compat import WINDOWS, hashlib_encode_data, string_types
from platformio.managers.lib import LibraryManager
from platformio.package.exception import UnknownPackageError
from platformio.package.manager.library import LibraryPackageManager
from platformio.package.manifest.parser import (
ManifestParserError,
ManifestParserFactory,
)
from platformio.package.meta import PackageItem
from platformio.project.options import ProjectOptions
@ -45,7 +48,7 @@ class LibBuilderFactory(object):
@staticmethod
def new(env, path, verbose=int(ARGUMENTS.get("PIOVERBOSE", 0))):
clsname = "UnknownLibBuilder"
if isfile(join(path, "library.json")):
if os.path.isfile(os.path.join(path, "library.json")):
clsname = "PlatformIOLibBuilder"
else:
used_frameworks = LibBuilderFactory.get_used_frameworks(env, path)
@ -62,12 +65,12 @@ class LibBuilderFactory(object):
@staticmethod
def get_used_frameworks(env, path):
if any(
isfile(join(path, fname))
os.path.isfile(os.path.join(path, fname))
for fname in ("library.properties", "keywords.txt")
):
return ["arduino"]
if isfile(join(path, "module.json")):
if os.path.isfile(os.path.join(path, "module.json")):
return ["mbed"]
include_re = re.compile(
@ -83,7 +86,7 @@ class LibBuilderFactory(object):
fname, piotool.SRC_BUILD_EXT + piotool.SRC_HEADER_EXT
):
continue
with io.open(join(root, fname), errors="ignore") as fp:
with io.open(os.path.join(root, fname), errors="ignore") as fp:
content = fp.read()
if not content:
continue
@ -110,7 +113,7 @@ class LibBuilderBase(object):
def __init__(self, env, path, manifest=None, verbose=False):
self.env = env.Clone()
self.envorigin = env.Clone()
self.path = realpath(env.subst(path))
self.path = os.path.realpath(env.subst(path))
self.verbose = verbose
try:
@ -144,11 +147,11 @@ class LibBuilderBase(object):
p2 = p2.lower()
if p1 == p2:
return True
return commonprefix((p1 + sep, p2)) == p1 + sep
return os.path.commonprefix((p1 + os.path.sep, p2)) == p1 + os.path.sep
@property
def name(self):
return self._manifest.get("name", basename(self.path))
return self._manifest.get("name", os.path.basename(self.path))
@property
def version(self):
@ -169,13 +172,19 @@ class LibBuilderBase(object):
@property
def include_dir(self):
if not all(isdir(join(self.path, d)) for d in ("include", "src")):
if not all(
os.path.isdir(os.path.join(self.path, d)) for d in ("include", "src")
):
return None
return join(self.path, "include")
return os.path.join(self.path, "include")
@property
def src_dir(self):
return join(self.path, "src") if isdir(join(self.path, "src")) else self.path
return (
os.path.join(self.path, "src")
if os.path.isdir(os.path.join(self.path, "src"))
else self.path
)
def get_include_dirs(self):
items = []
@ -188,7 +197,9 @@ class LibBuilderBase(object):
@property
def build_dir(self):
lib_hash = hashlib.sha1(hashlib_encode_data(self.path)).hexdigest()[:3]
return join("$BUILD_DIR", "lib%s" % lib_hash, basename(self.path))
return os.path.join(
"$BUILD_DIR", "lib%s" % lib_hash, os.path.basename(self.path)
)
@property
def build_flags(self):
@ -267,7 +278,7 @@ class LibBuilderBase(object):
if self.extra_script:
self.env.SConscriptChdir(1)
self.env.SConscript(
realpath(self.extra_script),
os.path.realpath(self.extra_script),
exports={"env": self.env, "pio_lib_builder": self},
)
self.env.ProcessUnFlags(self.build_unflags)
@ -293,14 +304,14 @@ class LibBuilderBase(object):
def get_search_files(self):
items = [
join(self.src_dir, item)
os.path.join(self.src_dir, item)
for item in self.env.MatchSourceFiles(self.src_dir, self.src_filter)
]
include_dir = self.include_dir
if include_dir:
items.extend(
[
join(include_dir, item)
os.path.join(include_dir, item)
for item in self.env.MatchSourceFiles(include_dir)
]
)
@ -369,7 +380,7 @@ class LibBuilderBase(object):
continue
_f_part = _h_path[: _h_path.rindex(".")]
for ext in piotool.SRC_C_EXT + piotool.SRC_CXX_EXT:
if not isfile("%s.%s" % (_f_part, ext)):
if not os.path.isfile("%s.%s" % (_f_part, ext)):
continue
_c_path = self.env.File("%s.%s" % (_f_part, ext))
if _c_path not in result:
@ -463,23 +474,24 @@ class UnknownLibBuilder(LibBuilderBase):
class ArduinoLibBuilder(LibBuilderBase):
def load_manifest(self):
manifest_path = join(self.path, "library.properties")
if not isfile(manifest_path):
manifest_path = os.path.join(self.path, "library.properties")
if not os.path.isfile(manifest_path):
return {}
return ManifestParserFactory.new_from_file(manifest_path).as_dict()
def get_include_dirs(self):
include_dirs = LibBuilderBase.get_include_dirs(self)
if isdir(join(self.path, "src")):
if os.path.isdir(os.path.join(self.path, "src")):
return include_dirs
if isdir(join(self.path, "utility")):
include_dirs.append(join(self.path, "utility"))
if os.path.isdir(os.path.join(self.path, "utility")):
include_dirs.append(os.path.join(self.path, "utility"))
return include_dirs
@property
def src_filter(self):
src_dir = join(self.path, "src")
if isdir(src_dir):
src_dir = os.path.join(self.path, "src")
if os.path.isdir(src_dir):
# pylint: disable=no-member
src_filter = LibBuilderBase.src_filter.fget(self)
for root, _, files in os.walk(src_dir, followlinks=True):
found = False
@ -490,50 +502,68 @@ class ArduinoLibBuilder(LibBuilderBase):
if not found:
continue
rel_path = root.replace(src_dir, "")
if rel_path.startswith(sep):
rel_path = rel_path[1:] + sep
if rel_path.startswith(os.path.sep):
rel_path = rel_path[1:] + os.path.sep
src_filter.append("-<%s*.[aA][sS][mM]>" % rel_path)
return src_filter
src_filter = []
is_utility = isdir(join(self.path, "utility"))
is_utility = os.path.isdir(os.path.join(self.path, "utility"))
for ext in piotool.SRC_BUILD_EXT + piotool.SRC_HEADER_EXT:
# arduino ide ignores files with .asm or .ASM extensions
if ext.lower() == "asm":
continue
src_filter.append("+<*.%s>" % ext)
if is_utility:
src_filter.append("+<utility%s*.%s>" % (sep, ext))
src_filter.append("+<utility%s*.%s>" % (os.path.sep, ext))
return src_filter
@property
def dependencies(self):
# do not include automatically all libraries for build
# chain+ will decide later
return None
@property
def lib_ldf_mode(self):
# pylint: disable=no-member
if not self._manifest.get("dependencies"):
return LibBuilderBase.lib_ldf_mode.fget(self)
missing = object()
global_value = self.env.GetProjectConfig().getraw(
"env:" + self.env["PIOENV"], "lib_ldf_mode", missing
)
if global_value != missing:
return LibBuilderBase.lib_ldf_mode.fget(self)
# automatically enable C++ Preprocessing in runtime
# (Arduino IDE has this behavior)
return "chain+"
def is_frameworks_compatible(self, frameworks):
return util.items_in_list(frameworks, ["arduino", "energia"])
def is_platforms_compatible(self, platforms):
items = self._manifest.get("platforms", [])
if not items:
return LibBuilderBase.is_platforms_compatible(self, platforms)
return util.items_in_list(platforms, items)
return util.items_in_list(platforms, self._manifest.get("platforms") or ["*"])
class MbedLibBuilder(LibBuilderBase):
def load_manifest(self):
manifest_path = join(self.path, "module.json")
if not isfile(manifest_path):
manifest_path = os.path.join(self.path, "module.json")
if not os.path.isfile(manifest_path):
return {}
return ManifestParserFactory.new_from_file(manifest_path).as_dict()
@property
def include_dir(self):
if isdir(join(self.path, "include")):
return join(self.path, "include")
if os.path.isdir(os.path.join(self.path, "include")):
return os.path.join(self.path, "include")
return None
@property
def src_dir(self):
if isdir(join(self.path, "source")):
return join(self.path, "source")
return LibBuilderBase.src_dir.fget(self)
if os.path.isdir(os.path.join(self.path, "source")):
return os.path.join(self.path, "source")
return LibBuilderBase.src_dir.fget(self) # pylint: disable=no-member
def get_include_dirs(self):
include_dirs = LibBuilderBase.get_include_dirs(self)
@ -542,13 +572,13 @@ class MbedLibBuilder(LibBuilderBase):
# library with module.json
for p in self._manifest.get("extraIncludes", []):
include_dirs.append(join(self.path, p))
include_dirs.append(os.path.join(self.path, p))
# old mbed library without manifest, add to CPPPATH all folders
if not self._manifest:
for root, _, __ in os.walk(self.path):
part = root.replace(self.path, "").lower()
if any(s in part for s in ("%s." % sep, "test", "example")):
if any(s in part for s in ("%s." % os.path.sep, "test", "example")):
continue
if root not in include_dirs:
include_dirs.append(root)
@ -564,7 +594,7 @@ class MbedLibBuilder(LibBuilderBase):
def _process_mbed_lib_confs(self):
mbed_lib_paths = [
join(root, "mbed_lib.json")
os.path.join(root, "mbed_lib.json")
for root, _, files in os.walk(self.path)
if "mbed_lib.json" in files
]
@ -573,8 +603,8 @@ class MbedLibBuilder(LibBuilderBase):
mbed_config_path = None
for p in self.env.get("CPPPATH"):
mbed_config_path = join(self.env.subst(p), "mbed_config.h")
if isfile(mbed_config_path):
mbed_config_path = os.path.join(self.env.subst(p), "mbed_config.h")
if os.path.isfile(mbed_config_path):
break
mbed_config_path = None
if not mbed_config_path:
@ -666,30 +696,31 @@ class MbedLibBuilder(LibBuilderBase):
class PlatformIOLibBuilder(LibBuilderBase):
def load_manifest(self):
manifest_path = join(self.path, "library.json")
if not isfile(manifest_path):
manifest_path = os.path.join(self.path, "library.json")
if not os.path.isfile(manifest_path):
return {}
return ManifestParserFactory.new_from_file(manifest_path).as_dict()
def _has_arduino_manifest(self):
return isfile(join(self.path, "library.properties"))
return os.path.isfile(os.path.join(self.path, "library.properties"))
@property
def include_dir(self):
if "includeDir" in self._manifest.get("build", {}):
with fs.cd(self.path):
return realpath(self._manifest.get("build").get("includeDir"))
return LibBuilderBase.include_dir.fget(self)
return os.path.realpath(self._manifest.get("build").get("includeDir"))
return LibBuilderBase.include_dir.fget(self) # pylint: disable=no-member
@property
def src_dir(self):
if "srcDir" in self._manifest.get("build", {}):
with fs.cd(self.path):
return realpath(self._manifest.get("build").get("srcDir"))
return LibBuilderBase.src_dir.fget(self)
return os.path.realpath(self._manifest.get("build").get("srcDir"))
return LibBuilderBase.src_dir.fget(self) # pylint: disable=no-member
@property
def src_filter(self):
# pylint: disable=no-member
if "srcFilter" in self._manifest.get("build", {}):
return self._manifest.get("build").get("srcFilter")
if self.env["SRC_FILTER"]:
@ -702,19 +733,19 @@ class PlatformIOLibBuilder(LibBuilderBase):
def build_flags(self):
if "flags" in self._manifest.get("build", {}):
return self._manifest.get("build").get("flags")
return LibBuilderBase.build_flags.fget(self)
return LibBuilderBase.build_flags.fget(self) # pylint: disable=no-member
@property
def build_unflags(self):
if "unflags" in self._manifest.get("build", {}):
return self._manifest.get("build").get("unflags")
return LibBuilderBase.build_unflags.fget(self)
return LibBuilderBase.build_unflags.fget(self) # pylint: disable=no-member
@property
def extra_script(self):
if "extraScript" in self._manifest.get("build", {}):
return self._manifest.get("build").get("extraScript")
return LibBuilderBase.extra_script.fget(self)
return LibBuilderBase.extra_script.fget(self) # pylint: disable=no-member
@property
def lib_archive(self):
@ -726,12 +757,14 @@ class PlatformIOLibBuilder(LibBuilderBase):
return self.env.GetProjectConfig().get(
"env:" + self.env["PIOENV"], "lib_archive"
)
# pylint: disable=no-member
return self._manifest.get("build", {}).get(
"libArchive", LibBuilderBase.lib_archive.fget(self)
)
@property
def lib_ldf_mode(self):
# pylint: disable=no-member
return self.validate_ldf_mode(
self._manifest.get("build", {}).get(
"libLDFMode", LibBuilderBase.lib_ldf_mode.fget(self)
@ -740,6 +773,7 @@ class PlatformIOLibBuilder(LibBuilderBase):
@property
def lib_compat_mode(self):
# pylint: disable=no-member
return self.validate_compat_mode(
self._manifest.get("build", {}).get(
"libCompatMode", LibBuilderBase.lib_compat_mode.fget(self)
@ -747,16 +781,10 @@ class PlatformIOLibBuilder(LibBuilderBase):
)
def is_platforms_compatible(self, platforms):
items = self._manifest.get("platforms")
if not items:
return LibBuilderBase.is_platforms_compatible(self, platforms)
return util.items_in_list(platforms, items)
return util.items_in_list(platforms, self._manifest.get("platforms") or ["*"])
def is_frameworks_compatible(self, frameworks):
items = self._manifest.get("frameworks")
if not items:
return LibBuilderBase.is_frameworks_compatible(self, frameworks)
return util.items_in_list(frameworks, items)
return util.items_in_list(frameworks, self._manifest.get("frameworks") or ["*"])
def get_include_dirs(self):
include_dirs = LibBuilderBase.get_include_dirs(self)
@ -765,10 +793,10 @@ class PlatformIOLibBuilder(LibBuilderBase):
if (
"build" not in self._manifest
and self._has_arduino_manifest()
and not isdir(join(self.path, "src"))
and isdir(join(self.path, "utility"))
and not os.path.isdir(os.path.join(self.path, "src"))
and os.path.isdir(os.path.join(self.path, "utility"))
):
include_dirs.append(join(self.path, "utility"))
include_dirs.append(os.path.join(self.path, "utility"))
for path in self.env.get("CPPPATH", []):
if path not in self.envorigin.get("CPPPATH", []):
@ -787,7 +815,7 @@ class ProjectAsLibBuilder(LibBuilderBase):
@property
def include_dir(self):
include_dir = self.env.subst("$PROJECT_INCLUDE_DIR")
return include_dir if isdir(include_dir) else None
return include_dir if os.path.isdir(include_dir) else None
@property
def src_dir(self):
@ -796,7 +824,7 @@ class ProjectAsLibBuilder(LibBuilderBase):
def get_include_dirs(self):
include_dirs = []
project_include_dir = self.env.subst("$PROJECT_INCLUDE_DIR")
if isdir(project_include_dir):
if os.path.isdir(project_include_dir):
include_dirs.append(project_include_dir)
for include_dir in LibBuilderBase.get_include_dirs(self):
if include_dir not in include_dirs:
@ -810,7 +838,7 @@ class ProjectAsLibBuilder(LibBuilderBase):
if "__test" in COMMAND_LINE_TARGETS:
items.extend(
[
join("$PROJECT_TEST_DIR", item)
os.path.join("$PROJECT_TEST_DIR", item)
for item in self.env.MatchSourceFiles(
"$PROJECT_TEST_DIR", "$PIOTEST_SRC_FILTER"
)
@ -820,7 +848,7 @@ class ProjectAsLibBuilder(LibBuilderBase):
@property
def lib_ldf_mode(self):
mode = LibBuilderBase.lib_ldf_mode.fget(self)
mode = LibBuilderBase.lib_ldf_mode.fget(self) # pylint: disable=no-member
if not mode.startswith("chain"):
return mode
# parse all project files
@ -828,6 +856,7 @@ class ProjectAsLibBuilder(LibBuilderBase):
@property
def src_filter(self):
# pylint: disable=no-member
return self.env.get("SRC_FILTER") or LibBuilderBase.src_filter.fget(self)
@property
@ -839,34 +868,36 @@ class ProjectAsLibBuilder(LibBuilderBase):
pass
def install_dependencies(self):
def _is_builtin(uri):
def _is_builtin(spec):
for lb in self.env.GetLibBuilders():
if lb.name == uri:
if lb.name == spec:
return True
return False
not_found_uri = []
for uri in self.dependencies:
not_found_specs = []
for spec in self.dependencies:
# check if built-in library
if _is_builtin(uri):
if _is_builtin(spec):
continue
found = False
for storage_dir in self.env.GetLibSourceDirs():
lm = LibraryManager(storage_dir)
if lm.get_package_dir(*lm.parse_pkg_uri(uri)):
lm = LibraryPackageManager(storage_dir)
if lm.get_package(spec):
found = True
break
if not found:
not_found_uri.append(uri)
not_found_specs.append(spec)
did_install = False
lm = LibraryManager(self.env.subst(join("$PROJECT_LIBDEPS_DIR", "$PIOENV")))
for uri in not_found_uri:
lm = LibraryPackageManager(
self.env.subst(os.path.join("$PROJECT_LIBDEPS_DIR", "$PIOENV"))
)
for spec in not_found_specs:
try:
lm.install(uri)
lm.install(spec)
did_install = True
except (exception.LibNotFound, exception.InternetIsOffline) as e:
except (UnknownPackageError, InternetIsOffline) as e:
click.secho("Warning! %s" % e, fg="yellow")
# reset cache
@ -874,17 +905,17 @@ class ProjectAsLibBuilder(LibBuilderBase):
DefaultEnvironment().Replace(__PIO_LIB_BUILDERS=None)
def process_dependencies(self): # pylint: disable=too-many-branches
for uri in self.dependencies:
for spec in self.dependencies:
found = False
for storage_dir in self.env.GetLibSourceDirs():
if found:
break
lm = LibraryManager(storage_dir)
lib_dir = lm.get_package_dir(*lm.parse_pkg_uri(uri))
if not lib_dir:
lm = LibraryPackageManager(storage_dir)
pkg = lm.get_package(spec)
if not pkg:
continue
for lb in self.env.GetLibBuilders():
if lib_dir != lb.path:
if pkg.path != lb.path:
continue
if lb not in self.depbuilders:
self.depend_recursive(lb)
@ -896,7 +927,7 @@ class ProjectAsLibBuilder(LibBuilderBase):
# look for built-in libraries by a name
# which don't have package manifest
for lb in self.env.GetLibBuilders():
if lb.name != uri:
if lb.name != spec:
continue
if lb not in self.depbuilders:
self.depend_recursive(lb)
@ -951,12 +982,12 @@ def GetLibBuilders(env): # pylint: disable=too-many-branches
found_incompat = False
for storage_dir in env.GetLibSourceDirs():
storage_dir = realpath(storage_dir)
if not isdir(storage_dir):
storage_dir = os.path.realpath(storage_dir)
if not os.path.isdir(storage_dir):
continue
for item in sorted(os.listdir(storage_dir)):
lib_dir = join(storage_dir, item)
if item == "__cores__" or not isdir(lib_dir):
lib_dir = os.path.join(storage_dir, item)
if item == "__cores__" or not os.path.isdir(lib_dir):
continue
try:
lb = LibBuilderFactory.new(env, lib_dir)
@ -988,10 +1019,6 @@ def GetLibBuilders(env): # pylint: disable=too-many-branches
def ConfigureProjectLibBuilder(env):
def _get_vcs_info(lb):
path = LibraryManager.get_src_manifest_path(lb.path)
return fs.load_json(path) if path else None
def _correct_found_libs(lib_builders):
# build full dependency graph
found_lbs = [lb for lb in lib_builders if lb.dependent]
@ -1007,15 +1034,15 @@ def ConfigureProjectLibBuilder(env):
margin = "| " * (level)
for lb in root.depbuilders:
title = "<%s>" % lb.name
vcs_info = _get_vcs_info(lb)
if lb.version:
pkg = PackageItem(lb.path)
if pkg.metadata:
title += " %s" % pkg.metadata.version
elif lb.version:
title += " %s" % lb.version
if vcs_info and vcs_info.get("version"):
title += " #%s" % vcs_info.get("version")
click.echo("%s|-- %s" % (margin, title), nl=False)
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
if vcs_info:
click.echo(" [%s]" % vcs_info.get("url"), nl=False)
if pkg.metadata and pkg.metadata.spec.external:
click.echo(" [%s]" % pkg.metadata.spec.url, nl=False)
click.echo(" (", nl=False)
click.echo(lb.path, nl=False)
click.echo(")", nl=False)
@ -1024,7 +1051,7 @@ def ConfigureProjectLibBuilder(env):
_print_deps_tree(lb, level + 1)
project = ProjectAsLibBuilder(env, "$PROJECT_DIR")
ldf_mode = LibBuilderBase.lib_ldf_mode.fget(project)
ldf_mode = LibBuilderBase.lib_ldf_mode.fget(project) # pylint: disable=no-member
click.echo("LDF: Library Dependency Finder -> http://bit.ly/configure-pio-ldf")
click.echo(

View File

@ -16,19 +16,16 @@ from __future__ import absolute_import
import atexit
import io
import os
import re
import sys
from os import environ, remove, walk
from os.path import basename, isdir, isfile, join, realpath, relpath, sep
from tempfile import mkstemp
import click
from SCons.Action import Action # pylint: disable=import-error
from SCons.Script import ARGUMENTS # pylint: disable=import-error
from platformio import fs, util
from platformio.compat import get_filesystem_encoding, get_locale_encoding, glob_escape
from platformio.managers.core import get_core_package_dir
from platformio.package.manager.core import get_core_package_dir
from platformio.proc import exec_command
@ -126,11 +123,11 @@ class InoToCPPConverter(object):
'$CXX -o "{0}" -x c++ -fpreprocessed -dD -E "{1}"'.format(
out_file, tmp_path
),
"Converting " + basename(out_file[:-4]),
"Converting " + os.path.basename(out_file[:-4]),
)
)
atexit.register(_delete_file, tmp_path)
return isfile(out_file)
return os.path.isfile(out_file)
def _join_multiline_strings(self, contents):
if "\\\n" not in contents:
@ -233,7 +230,9 @@ class InoToCPPConverter(object):
def ConvertInoToCpp(env):
src_dir = glob_escape(env.subst("$PROJECT_SRC_DIR"))
ino_nodes = env.Glob(join(src_dir, "*.ino")) + env.Glob(join(src_dir, "*.pde"))
ino_nodes = env.Glob(os.path.join(src_dir, "*.ino")) + env.Glob(
os.path.join(src_dir, "*.pde")
)
if not ino_nodes:
return
c = InoToCPPConverter(env)
@ -244,8 +243,8 @@ def ConvertInoToCpp(env):
def _delete_file(path):
try:
if isfile(path):
remove(path)
if os.path.isfile(path):
os.remove(path)
except: # pylint: disable=bare-except
pass
@ -255,7 +254,7 @@ def _get_compiler_type(env):
if env.subst("$CC").endswith("-gcc"):
return "gcc"
try:
sysenv = environ.copy()
sysenv = os.environ.copy()
sysenv["PATH"] = str(env["ENV"]["PATH"])
result = exec_command([env.subst("$CC"), "-v"], env=sysenv)
except OSError:
@ -277,8 +276,8 @@ def GetCompilerType(env):
def GetActualLDScript(env):
def _lookup_in_ldpath(script):
for d in env.get("LIBPATH", []):
path = join(env.subst(d), script)
if isfile(path):
path = os.path.join(env.subst(d), script)
if os.path.isfile(path):
return path
return None
@ -297,7 +296,7 @@ def GetActualLDScript(env):
else:
continue
script = env.subst(raw_script.replace('"', "").strip())
if isfile(script):
if os.path.isfile(script):
return script
path = _lookup_in_ldpath(script)
if path:
@ -319,29 +318,6 @@ def GetActualLDScript(env):
env.Exit(1)
def VerboseAction(_, act, actstr):
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
return act
return Action(act, actstr)
def PioClean(env, clean_dir):
if not isdir(clean_dir):
print("Build environment is clean")
env.Exit(0)
clean_rel_path = relpath(clean_dir)
for root, _, files in walk(clean_dir):
for f in files:
dst = join(root, f)
remove(dst)
print(
"Removed %s" % (dst if clean_rel_path.startswith(".") else relpath(dst))
)
print("Done cleaning")
fs.rmtree(clean_dir)
env.Exit(0)
def ConfigureDebugFlags(env):
def _cleanup_debug_flags(scope):
if scope not in env:
@ -370,16 +346,16 @@ def ConfigureDebugFlags(env):
def ConfigureTestTarget(env):
env.Append(
CPPDEFINES=["UNIT_TEST", "UNITY_INCLUDE_CONFIG_H"],
CPPPATH=[join("$BUILD_DIR", "UnityTestLib")],
CPPPATH=[os.path.join("$BUILD_DIR", "UnityTestLib")],
)
unitylib = env.BuildLibrary(
join("$BUILD_DIR", "UnityTestLib"), get_core_package_dir("tool-unity")
os.path.join("$BUILD_DIR", "UnityTestLib"), get_core_package_dir("tool-unity")
)
env.Prepend(LIBS=[unitylib])
src_filter = ["+<*.cpp>", "+<*.c>"]
if "PIOTEST_RUNNING_NAME" in env:
src_filter.append("+<%s%s>" % (env["PIOTEST_RUNNING_NAME"], sep))
src_filter.append("+<%s%s>" % (env["PIOTEST_RUNNING_NAME"], os.path.sep))
env.Replace(PIOTEST_SRC_FILTER=src_filter)
@ -393,7 +369,7 @@ def GetExtraScripts(env, scope):
if not items:
return items
with fs.cd(env.subst("$PROJECT_DIR")):
return [realpath(item) for item in items]
return [os.path.realpath(item) for item in items]
def exists(_):
@ -404,8 +380,6 @@ def generate(env):
env.AddMethod(ConvertInoToCpp)
env.AddMethod(GetCompilerType)
env.AddMethod(GetActualLDScript)
env.AddMethod(VerboseAction)
env.AddMethod(PioClean)
env.AddMethod(ConfigureDebugFlags)
env.AddMethod(ConfigureTestTarget)
env.AddMethod(GetExtraScripts)

View File

@ -14,15 +14,18 @@
from __future__ import absolute_import
import os
import sys
from os.path import isdir, isfile, join
from SCons.Script import ARGUMENTS # pylint: disable=import-error
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
from platformio import exception, fs, util
from platformio import fs, util
from platformio.compat import WINDOWS
from platformio.managers.platform import PlatformFactory
from platformio.package.meta import PackageItem
from platformio.package.version import get_original_version
from platformio.platform.exception import UnknownBoard
from platformio.platform.factory import PlatformFactory
from platformio.project.config import ProjectOptions
# pylint: disable=too-many-branches, too-many-locals
@ -34,7 +37,7 @@ def PioPlatform(env):
if "framework" in variables:
# support PIO Core 3.0 dev/platforms
variables["pioframework"] = variables["framework"]
p = PlatformFactory.newPlatform(env["PLATFORM_MANIFEST"])
p = PlatformFactory.new(os.path.dirname(env["PLATFORM_MANIFEST"]))
p.configure_default_packages(variables, COMMAND_LINE_TARGETS)
return p
@ -46,7 +49,7 @@ def BoardConfig(env, board=None):
board = board or env.get("BOARD")
assert board, "BoardConfig: Board is not defined"
return p.board_config(board)
except (AssertionError, exception.UnknownBoard) as e:
except (AssertionError, UnknownBoard) as e:
sys.stderr.write("Error: %s\n" % str(e))
env.Exit(1)
@ -55,37 +58,42 @@ def GetFrameworkScript(env, framework):
p = env.PioPlatform()
assert p.frameworks and framework in p.frameworks
script_path = env.subst(p.frameworks[framework]["script"])
if not isfile(script_path):
script_path = join(p.get_dir(), script_path)
if not os.path.isfile(script_path):
script_path = os.path.join(p.get_dir(), script_path)
return script_path
def LoadPioPlatform(env):
p = env.PioPlatform()
installed_packages = p.get_installed_packages()
# Ensure real platform name
env["PIOPLATFORM"] = p.name
# Add toolchains and uploaders to $PATH and $*_LIBRARY_PATH
systype = util.get_systype()
for name in installed_packages:
type_ = p.get_package_type(name)
for pkg in p.get_installed_packages():
type_ = p.get_package_type(pkg.metadata.name)
if type_ not in ("toolchain", "uploader", "debugger"):
continue
pkg_dir = p.get_package_dir(name)
env.PrependENVPath(
"PATH", join(pkg_dir, "bin") if isdir(join(pkg_dir, "bin")) else pkg_dir
"PATH",
os.path.join(pkg.path, "bin")
if os.path.isdir(os.path.join(pkg.path, "bin"))
else pkg.path,
)
if not WINDOWS and isdir(join(pkg_dir, "lib")) and type_ != "toolchain":
if (
not WINDOWS
and os.path.isdir(os.path.join(pkg.path, "lib"))
and type_ != "toolchain"
):
env.PrependENVPath(
"DYLD_LIBRARY_PATH" if "darwin" in systype else "LD_LIBRARY_PATH",
join(pkg_dir, "lib"),
os.path.join(pkg.path, "lib"),
)
# Platform specific LD Scripts
if isdir(join(p.get_dir(), "ldscripts")):
env.Prepend(LIBPATH=[join(p.get_dir(), "ldscripts")])
if os.path.isdir(os.path.join(p.get_dir(), "ldscripts")):
env.Prepend(LIBPATH=[os.path.join(p.get_dir(), "ldscripts")])
if "BOARD" not in env:
return
@ -125,6 +133,7 @@ def LoadPioPlatform(env):
def PrintConfiguration(env): # pylint: disable=too-many-statements
platform = env.PioPlatform()
pkg_metadata = PackageItem(platform.get_dir()).metadata
board_config = env.BoardConfig() if "BOARD" in env else None
def _get_configuration_data():
@ -139,14 +148,19 @@ def PrintConfiguration(env): # pylint: disable=too-many-statements
)
def _get_plaform_data():
data = ["PLATFORM: %s %s" % (platform.title, platform.version)]
src_manifest_path = platform.pm.get_src_manifest_path(platform.get_dir())
if src_manifest_path:
src_manifest = fs.load_json(src_manifest_path)
if "version" in src_manifest:
data.append("#" + src_manifest["version"])
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
data.append("(%s)" % src_manifest["url"])
data = [
"PLATFORM: %s (%s)"
% (
platform.title,
pkg_metadata.version if pkg_metadata else platform.version,
)
]
if (
int(ARGUMENTS.get("PIOVERBOSE", 0))
and pkg_metadata
and pkg_metadata.spec.external
):
data.append("(%s)" % pkg_metadata.spec.url)
if board_config:
data.extend([">", board_config.get("name")])
return data
@ -165,7 +179,8 @@ def PrintConfiguration(env): # pylint: disable=too-many-statements
ram = board_config.get("upload", {}).get("maximum_ram_size")
flash = board_config.get("upload", {}).get("maximum_size")
data.append(
"%s RAM, %s Flash" % (fs.format_filesize(ram), fs.format_filesize(flash))
"%s RAM, %s Flash"
% (fs.humanize_file_size(ram), fs.humanize_file_size(flash))
)
return data
@ -196,20 +211,14 @@ def PrintConfiguration(env): # pylint: disable=too-many-statements
def _get_packages_data():
data = []
for name, options in platform.packages.items():
if options.get("optional"):
continue
pkg_dir = platform.get_package_dir(name)
if not pkg_dir:
continue
manifest = platform.pm.load_manifest(pkg_dir)
original_version = util.get_original_version(manifest["version"])
info = "%s %s" % (manifest["name"], manifest["version"])
for item in platform.dump_used_packages():
original_version = get_original_version(item["version"])
info = "%s %s" % (item["name"], item["version"])
extra = []
if original_version:
extra.append(original_version)
if "__src_url" in manifest and int(ARGUMENTS.get("PIOVERBOSE", 0)):
extra.append(manifest["__src_url"])
if "src_url" in item and int(ARGUMENTS.get("PIOVERBOSE", 0)):
extra.append(item["src_url"])
if extra:
info += " (%s)" % ", ".join(extra)
data.append(info)

View File

@ -0,0 +1,119 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import os
from SCons.Action import Action # pylint: disable=import-error
from SCons.Script import ARGUMENTS # pylint: disable=import-error
from SCons.Script import AlwaysBuild # pylint: disable=import-error
from platformio import compat, fs
def VerboseAction(_, act, actstr):
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
return act
return Action(act, actstr)
def PioClean(env, clean_dir):
def _relpath(path):
if compat.WINDOWS:
prefix = os.getcwd()[:2].lower()
if (
":" not in prefix
or not path.lower().startswith(prefix)
or os.path.relpath(path).startswith("..")
):
return path
return os.path.relpath(path)
if not os.path.isdir(clean_dir):
print("Build environment is clean")
env.Exit(0)
clean_rel_path = _relpath(clean_dir)
for root, _, files in os.walk(clean_dir):
for f in files:
dst = os.path.join(root, f)
os.remove(dst)
print(
"Removed %s"
% (dst if not clean_rel_path.startswith(".") else _relpath(dst))
)
print("Done cleaning")
fs.rmtree(clean_dir)
env.Exit(0)
def AddTarget( # pylint: disable=too-many-arguments
env,
name,
dependencies,
actions,
title=None,
description=None,
group="Generic",
always_build=True,
):
if "__PIO_TARGETS" not in env:
env["__PIO_TARGETS"] = {}
assert name not in env["__PIO_TARGETS"]
env["__PIO_TARGETS"][name] = dict(
name=name, title=title, description=description, group=group
)
target = env.Alias(name, dependencies, actions)
if always_build:
AlwaysBuild(target)
return target
def AddPlatformTarget(env, *args, **kwargs):
return env.AddTarget(group="Platform", *args, **kwargs)
def AddCustomTarget(env, *args, **kwargs):
return env.AddTarget(group="Custom", *args, **kwargs)
def DumpTargets(env):
targets = env.get("__PIO_TARGETS") or {}
# pre-fill default targets if embedded dev-platform
if env.PioPlatform().is_embedded() and not any(
t["group"] == "Platform" for t in targets.values()
):
targets["upload"] = dict(name="upload", group="Platform", title="Upload")
targets["compiledb"] = dict(
name="compiledb",
title="Compilation Database",
description="Generate compilation database `compile_commands.json`",
group="Advanced",
)
targets["clean"] = dict(name="clean", title="Clean", group="Generic")
return list(targets.values())
def exists(_):
return True
def generate(env):
env.AddMethod(VerboseAction)
env.AddMethod(PioClean)
env.AddMethod(AddTarget)
env.AddMethod(AddPlatformTarget)
env.AddMethod(AddCustomTarget)
env.AddMethod(DumpTargets)
return env

View File

@ -26,9 +26,9 @@ from SCons.Script import DefaultEnvironment # pylint: disable=import-error
from SCons.Script import Export # pylint: disable=import-error
from SCons.Script import SConscript # pylint: disable=import-error
from platformio import fs
from platformio.compat import string_types
from platformio.util import pioversion_to_intstr
from platformio import __version__, fs
from platformio.compat import MACOS, string_types
from platformio.package.version import pepver_to_semver
SRC_HEADER_EXT = ["h", "hpp"]
SRC_ASM_EXT = ["S", "spp", "SPP", "sx", "s", "asm", "ASM"]
@ -66,7 +66,11 @@ def BuildProgram(env):
env.Prepend(LINKFLAGS=["-T", env.subst("$LDSCRIPT_PATH")])
# enable "cyclic reference" for linker
if env.get("LIBS") and env.GetCompilerType() == "gcc":
if (
env.get("LIBS")
and env.GetCompilerType() == "gcc"
and (env.PioPlatform().is_embedded() or not MACOS)
):
env.Prepend(_LIBFLAGS="-Wl,--start-group ")
env.Append(_LIBFLAGS=" -Wl,--end-group")
@ -90,11 +94,16 @@ def BuildProgram(env):
def ProcessProgramDeps(env):
def _append_pio_macros():
core_version = pepver_to_semver(__version__)
env.AppendUnique(
CPPDEFINES=[
(
"PLATFORMIO",
int("{0:02d}{1:02d}{2:02d}".format(*pioversion_to_intstr())),
int(
"{0:02d}{1:02d}{2:02d}".format(
core_version.major, core_version.minor, core_version.patch
)
),
)
]
)
@ -282,18 +291,21 @@ def CollectBuildFiles(
if fs.path_endswith_ext(item, SRC_BUILD_EXT):
sources.append(env.File(os.path.join(_var_dir, os.path.basename(item))))
for callback, pattern in env.get("__PIO_BUILD_MIDDLEWARES", []):
tmp = []
for node in sources:
if pattern and not fnmatch.fnmatch(node.get_path(), pattern):
tmp.append(node)
continue
n = callback(node)
if n:
tmp.append(n)
sources = tmp
middlewares = env.get("__PIO_BUILD_MIDDLEWARES")
if not middlewares:
return sources
return sources
new_sources = []
for node in sources:
new_node = node
for callback, pattern in middlewares:
if pattern and not fnmatch.fnmatch(node.srcnode().get_path(), pattern):
continue
new_node = callback(new_node)
if new_node:
new_sources.append(new_node)
return new_sources
def AddBuildMiddleware(env, callback, pattern=None):

165
platformio/cache.py Normal file
View File

@ -0,0 +1,165 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import codecs
import hashlib
import os
from time import time
from platformio import app, fs
from platformio.compat import hashlib_encode_data
from platformio.package.lockfile import LockFile
from platformio.project.helpers import get_project_cache_dir
class ContentCache(object):
def __init__(self, namespace=None):
self.cache_dir = os.path.join(get_project_cache_dir(), namespace or "content")
self._db_path = os.path.join(self.cache_dir, "db.data")
self._lockfile = None
if not os.path.isdir(self.cache_dir):
os.makedirs(self.cache_dir)
def __enter__(self):
# cleanup obsolete items
self.delete()
return self
def __exit__(self, type_, value, traceback):
pass
@staticmethod
def key_from_args(*args):
h = hashlib.sha1()
for arg in args:
if arg:
h.update(hashlib_encode_data(arg))
return h.hexdigest()
def get_cache_path(self, key):
assert "/" not in key and "\\" not in key
key = str(key)
assert len(key) > 3
return os.path.join(self.cache_dir, key)
def get(self, key):
cache_path = self.get_cache_path(key)
if not os.path.isfile(cache_path):
return None
with codecs.open(cache_path, "rb", encoding="utf8") as fp:
return fp.read()
def set(self, key, data, valid):
if not app.get_setting("enable_cache"):
return False
cache_path = self.get_cache_path(key)
if os.path.isfile(cache_path):
self.delete(key)
if not data:
return False
tdmap = {"s": 1, "m": 60, "h": 3600, "d": 86400}
assert valid.endswith(tuple(tdmap))
expire_time = int(time() + tdmap[valid[-1]] * int(valid[:-1]))
if not self._lock_dbindex():
return False
if not os.path.isdir(os.path.dirname(cache_path)):
os.makedirs(os.path.dirname(cache_path))
try:
with codecs.open(cache_path, "wb", encoding="utf8") as fp:
fp.write(data)
with open(self._db_path, "a") as fp:
fp.write("%s=%s\n" % (str(expire_time), os.path.basename(cache_path)))
except UnicodeError:
if os.path.isfile(cache_path):
try:
os.remove(cache_path)
except OSError:
pass
return self._unlock_dbindex()
def delete(self, keys=None):
""" Keys=None, delete expired items """
if not os.path.isfile(self._db_path):
return None
if not keys:
keys = []
if not isinstance(keys, list):
keys = [keys]
paths_for_delete = [self.get_cache_path(k) for k in keys]
found = False
newlines = []
with open(self._db_path) as fp:
for line in fp.readlines():
line = line.strip()
if "=" not in line:
continue
expire, fname = line.split("=")
path = os.path.join(self.cache_dir, fname)
try:
if (
time() < int(expire)
and os.path.isfile(path)
and path not in paths_for_delete
):
newlines.append(line)
continue
except ValueError:
pass
found = True
if os.path.isfile(path):
try:
os.remove(path)
if not os.listdir(os.path.dirname(path)):
fs.rmtree(os.path.dirname(path))
except OSError:
pass
if found and self._lock_dbindex():
with open(self._db_path, "w") as fp:
fp.write("\n".join(newlines) + "\n")
self._unlock_dbindex()
return True
def clean(self):
if not os.path.isdir(self.cache_dir):
return
fs.rmtree(self.cache_dir)
def _lock_dbindex(self):
self._lockfile = LockFile(self.cache_dir)
try:
self._lockfile.acquire()
except: # pylint: disable=bare-except
return False
return True
def _unlock_dbindex(self):
if self._lockfile:
self._lockfile.release()
return True
#
# Helpers
#
def cleanup_content_cache(namespace=None):
with ContentCache(namespace) as cc:
cc.clean()

View File

@ -0,0 +1,13 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@ -0,0 +1,326 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import time
from platformio import __accounts_api__, app
from platformio.clients.http import HTTPClient
from platformio.exception import PlatformioException
class AccountError(PlatformioException):
MESSAGE = "{0}"
class AccountNotAuthorized(AccountError):
MESSAGE = "You are not authorized! Please log in to PlatformIO Account."
class AccountAlreadyAuthorized(AccountError):
MESSAGE = "You are already authorized with {0} account."
class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
SUMMARY_CACHE_TTL = 60 * 60 * 24 * 7
def __init__(self):
super(AccountClient, self).__init__(__accounts_api__)
@staticmethod
def get_refresh_token():
try:
return app.get_state_item("account").get("auth").get("refresh_token")
except: # pylint:disable=bare-except
raise AccountNotAuthorized()
@staticmethod
def delete_local_session():
app.delete_state_item("account")
@staticmethod
def delete_local_state(key):
account = app.get_state_item("account")
if not account or key not in account:
return
del account[key]
app.set_state_item("account", account)
def send_auth_request(self, *args, **kwargs):
headers = kwargs.get("headers", {})
if "Authorization" not in headers:
token = self.fetch_authentication_token()
headers["Authorization"] = "Bearer %s" % token
kwargs["headers"] = headers
return self.fetch_json_data(*args, **kwargs)
def login(self, username, password):
try:
self.fetch_authentication_token()
except: # pylint:disable=bare-except
pass
else:
raise AccountAlreadyAuthorized(
app.get_state_item("account", {}).get("email", "")
)
data = self.fetch_json_data(
"post",
"/v1/login",
data={"username": username, "password": password},
)
app.set_state_item("account", data)
return data
def login_with_code(self, client_id, code, redirect_uri):
try:
self.fetch_authentication_token()
except: # pylint:disable=bare-except
pass
else:
raise AccountAlreadyAuthorized(
app.get_state_item("account", {}).get("email", "")
)
result = self.fetch_json_data(
"post",
"/v1/login/code",
data={"client_id": client_id, "code": code, "redirect_uri": redirect_uri},
)
app.set_state_item("account", result)
return result
def logout(self):
refresh_token = self.get_refresh_token()
self.delete_local_session()
try:
self.fetch_json_data(
"post",
"/v1/logout",
data={"refresh_token": refresh_token},
)
except AccountError:
pass
return True
def change_password(self, old_password, new_password):
return self.send_auth_request(
"post",
"/v1/password",
data={"old_password": old_password, "new_password": new_password},
)
def registration(
self, username, email, password, firstname, lastname
): # pylint:disable=too-many-arguments
try:
self.fetch_authentication_token()
except: # pylint:disable=bare-except
pass
else:
raise AccountAlreadyAuthorized(
app.get_state_item("account", {}).get("email", "")
)
return self.fetch_json_data(
"post",
"/v1/registration",
data={
"username": username,
"email": email,
"password": password,
"firstname": firstname,
"lastname": lastname,
},
)
def auth_token(self, password, regenerate):
return self.send_auth_request(
"post",
"/v1/token",
data={"password": password, "regenerate": 1 if regenerate else 0},
).get("auth_token")
def forgot_password(self, username):
return self.fetch_json_data(
"post",
"/v1/forgot",
data={"username": username},
)
def get_profile(self):
return self.send_auth_request(
"get",
"/v1/profile",
)
def update_profile(self, profile, current_password):
profile["current_password"] = current_password
self.delete_local_state("summary")
response = self.send_auth_request(
"put",
"/v1/profile",
data=profile,
)
return response
def get_account_info(self, offline=False):
account = app.get_state_item("account") or {}
if (
account.get("summary")
and account["summary"].get("expire_at", 0) > time.time()
):
return account["summary"]
if offline and account.get("email"):
return {
"profile": {
"email": account.get("email"),
"username": account.get("username"),
}
}
result = self.send_auth_request(
"get",
"/v1/summary",
)
account["summary"] = dict(
profile=result.get("profile"),
packages=result.get("packages"),
subscriptions=result.get("subscriptions"),
user_id=result.get("user_id"),
expire_at=int(time.time()) + self.SUMMARY_CACHE_TTL,
)
app.set_state_item("account", account)
return result
def destroy_account(self):
return self.send_auth_request("delete", "/v1/account")
def create_org(self, orgname, email, displayname):
return self.send_auth_request(
"post",
"/v1/orgs",
data={"orgname": orgname, "email": email, "displayname": displayname},
)
def get_org(self, orgname):
return self.send_auth_request("get", "/v1/orgs/%s" % orgname)
def list_orgs(self):
return self.send_auth_request(
"get",
"/v1/orgs",
)
def update_org(self, orgname, data):
return self.send_auth_request(
"put", "/v1/orgs/%s" % orgname, data={k: v for k, v in data.items() if v}
)
def destroy_org(self, orgname):
return self.send_auth_request(
"delete",
"/v1/orgs/%s" % orgname,
)
def add_org_owner(self, orgname, username):
return self.send_auth_request(
"post",
"/v1/orgs/%s/owners" % orgname,
data={"username": username},
)
def list_org_owners(self, orgname):
return self.send_auth_request(
"get",
"/v1/orgs/%s/owners" % orgname,
)
def remove_org_owner(self, orgname, username):
return self.send_auth_request(
"delete",
"/v1/orgs/%s/owners" % orgname,
data={"username": username},
)
def create_team(self, orgname, teamname, description):
return self.send_auth_request(
"post",
"/v1/orgs/%s/teams" % orgname,
data={"name": teamname, "description": description},
)
def destroy_team(self, orgname, teamname):
return self.send_auth_request(
"delete",
"/v1/orgs/%s/teams/%s" % (orgname, teamname),
)
def get_team(self, orgname, teamname):
return self.send_auth_request(
"get",
"/v1/orgs/%s/teams/%s" % (orgname, teamname),
)
def list_teams(self, orgname):
return self.send_auth_request(
"get",
"/v1/orgs/%s/teams" % orgname,
)
def update_team(self, orgname, teamname, data):
return self.send_auth_request(
"put",
"/v1/orgs/%s/teams/%s" % (orgname, teamname),
data={k: v for k, v in data.items() if v},
)
def add_team_member(self, orgname, teamname, username):
return self.send_auth_request(
"post",
"/v1/orgs/%s/teams/%s/members" % (orgname, teamname),
data={"username": username},
)
def remove_team_member(self, orgname, teamname, username):
return self.send_auth_request(
"delete",
"/v1/orgs/%s/teams/%s/members" % (orgname, teamname),
data={"username": username},
)
def fetch_authentication_token(self):
if os.environ.get("PLATFORMIO_AUTH_TOKEN"):
return os.environ.get("PLATFORMIO_AUTH_TOKEN")
auth = app.get_state_item("account", {}).get("auth", {})
if auth.get("access_token") and auth.get("access_token_expire"):
if auth.get("access_token_expire") > time.time():
return auth.get("access_token")
if auth.get("refresh_token"):
try:
data = self.fetch_json_data(
"post",
"/v1/login",
headers={
"Authorization": "Bearer %s" % auth.get("refresh_token")
},
)
app.set_state_item("account", data)
return data.get("auth").get("access_token")
except AccountError:
self.delete_local_session()
raise AccountNotAuthorized()

205
platformio/clients/http.py Normal file
View File

@ -0,0 +1,205 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import math
import os
import socket
import requests.adapters
from requests.packages.urllib3.util.retry import Retry # pylint:disable=import-error
from platformio import __check_internet_hosts__, __default_requests_timeout__, app, util
from platformio.cache import ContentCache
from platformio.exception import PlatformioException, UserSideException
try:
from urllib.parse import urljoin
except ImportError:
from urlparse import urljoin
class HTTPClientError(PlatformioException):
def __init__(self, message, response=None):
super(HTTPClientError, self).__init__()
self.message = message
self.response = response
def __str__(self): # pragma: no cover
return self.message
class InternetIsOffline(UserSideException):
MESSAGE = (
"You are not connected to the Internet.\n"
"PlatformIO needs the Internet connection to"
" download dependent packages or to work with PlatformIO Account."
)
class EndpointSession(requests.Session):
def __init__(self, base_url, *args, **kwargs):
super(EndpointSession, self).__init__(*args, **kwargs)
self.base_url = base_url
def request( # pylint: disable=signature-differs,arguments-differ
self, method, url, *args, **kwargs
):
# print(self.base_url, method, url, args, kwargs)
return super(EndpointSession, self).request(
method, urljoin(self.base_url, url), *args, **kwargs
)
class EndpointSessionIterator(object):
def __init__(self, endpoints):
if not isinstance(endpoints, list):
endpoints = [endpoints]
self.endpoints = endpoints
self.endpoints_iter = iter(endpoints)
self.retry = Retry(
total=math.ceil(6 / len(self.endpoints)),
backoff_factor=1,
# method_whitelist=list(Retry.DEFAULT_METHOD_WHITELIST) + ["POST"],
status_forcelist=[413, 429, 500, 502, 503, 504],
)
def __iter__(self): # pylint: disable=non-iterator-returned
return self
def next(self):
""" For Python 2 compatibility """
return self.__next__()
def __next__(self):
base_url = next(self.endpoints_iter)
session = EndpointSession(base_url)
session.headers.update({"User-Agent": app.get_user_agent()})
adapter = requests.adapters.HTTPAdapter(max_retries=self.retry)
session.mount(base_url, adapter)
return session
class HTTPClient(object):
def __init__(self, endpoints):
self._session_iter = EndpointSessionIterator(endpoints)
self._session = None
self._next_session()
def __del__(self):
if not self._session:
return
self._session.close()
self._session = None
def _next_session(self):
if self._session:
self._session.close()
self._session = next(self._session_iter)
@util.throttle(500)
def send_request(self, method, path, **kwargs):
# check Internet before and resolve issue with 60 seconds timeout
ensure_internet_on(raise_exception=True)
# set default timeout
if "timeout" not in kwargs:
kwargs["timeout"] = __default_requests_timeout__
while True:
try:
return getattr(self._session, method)(path, **kwargs)
except (
requests.exceptions.ConnectionError,
requests.exceptions.Timeout,
) as e:
try:
self._next_session()
except: # pylint: disable=bare-except
raise HTTPClientError(str(e))
def fetch_json_data(self, method, path, **kwargs):
cache_valid = kwargs.pop("cache_valid") if "cache_valid" in kwargs else None
if not cache_valid:
return self._parse_json_response(self.send_request(method, path, **kwargs))
cache_key = ContentCache.key_from_args(
method, path, kwargs.get("params"), kwargs.get("data")
)
with ContentCache("http") as cc:
result = cc.get(cache_key)
if result is not None:
return json.loads(result)
response = self.send_request(method, path, **kwargs)
data = self._parse_json_response(response)
cc.set(cache_key, response.text, cache_valid)
return data
@staticmethod
def _parse_json_response(response, expected_codes=(200, 201, 202)):
if response.status_code in expected_codes:
try:
return response.json()
except ValueError:
pass
try:
message = response.json()["message"]
except (KeyError, ValueError):
message = response.text
raise HTTPClientError(message, response)
#
# Helpers
#
@util.memoized(expire="10s")
def _internet_on():
timeout = 2
socket.setdefaulttimeout(timeout)
for host in __check_internet_hosts__:
try:
for var in ("HTTP_PROXY", "HTTPS_PROXY"):
if not os.getenv(var) and not os.getenv(var.lower()):
continue
requests.get("http://%s" % host, allow_redirects=False, timeout=timeout)
return True
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((host, 80))
s.close()
return True
except: # pylint: disable=bare-except
pass
return False
def ensure_internet_on(raise_exception=False):
result = _internet_on()
if raise_exception and not result:
raise InternetIsOffline()
return result
def fetch_remote_content(*args, **kwargs):
kwargs["headers"] = kwargs.get("headers", {})
if "User-Agent" not in kwargs["headers"]:
kwargs["headers"]["User-Agent"] = app.get_user_agent()
if "timeout" not in kwargs:
kwargs["timeout"] = __default_requests_timeout__
r = requests.get(*args, **kwargs)
r.raise_for_status()
return r.text

View File

@ -0,0 +1,147 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio import __registry_api__, fs
from platformio.clients.account import AccountClient
from platformio.clients.http import HTTPClient, HTTPClientError
from platformio.package.meta import PackageType
# pylint: disable=too-many-arguments
class RegistryClient(HTTPClient):
def __init__(self):
super(RegistryClient, self).__init__(__registry_api__)
def send_auth_request(self, *args, **kwargs):
headers = kwargs.get("headers", {})
if "Authorization" not in headers:
token = AccountClient().fetch_authentication_token()
headers["Authorization"] = "Bearer %s" % token
kwargs["headers"] = headers
return self.fetch_json_data(*args, **kwargs)
def publish_package(
self, archive_path, owner=None, released_at=None, private=False, notify=True
):
account = AccountClient()
if not owner:
owner = (
account.get_account_info(offline=True).get("profile").get("username")
)
with open(archive_path, "rb") as fp:
return self.send_auth_request(
"post",
"/v3/packages/%s/%s" % (owner, PackageType.from_archive(archive_path)),
params={
"private": 1 if private else 0,
"notify": 1 if notify else 0,
"released_at": released_at,
},
headers={
"Content-Type": "application/octet-stream",
"X-PIO-Content-SHA256": fs.calculate_file_hashsum(
"sha256", archive_path
),
},
data=fp,
)
def unpublish_package( # pylint: disable=redefined-builtin
self, type, name, owner=None, version=None, undo=False
):
account = AccountClient()
if not owner:
owner = (
account.get_account_info(offline=True).get("profile").get("username")
)
path = "/v3/packages/%s/%s/%s" % (owner, type, name)
if version:
path += "/" + version
return self.send_auth_request(
"delete",
path,
params={"undo": 1 if undo else 0},
)
def update_resource(self, urn, private):
return self.send_auth_request(
"put",
"/v3/resources/%s" % urn,
data={"private": int(private)},
)
def grant_access_for_resource(self, urn, client, level):
return self.send_auth_request(
"put",
"/v3/resources/%s/access" % urn,
data={"client": client, "level": level},
)
def revoke_access_from_resource(self, urn, client):
return self.send_auth_request(
"delete",
"/v3/resources/%s/access" % urn,
data={"client": client},
)
def list_resources(self, owner):
return self.send_auth_request(
"get", "/v3/resources", params={"owner": owner} if owner else None
)
def list_packages(self, query=None, filters=None, page=None):
assert query or filters
search_query = []
if filters:
valid_filters = (
"authors",
"keywords",
"frameworks",
"platforms",
"headers",
"ids",
"names",
"owners",
"types",
)
assert set(filters.keys()) <= set(valid_filters)
for name, values in filters.items():
for value in set(
values if isinstance(values, (list, tuple)) else [values]
):
search_query.append('%s:"%s"' % (name[:-1], value))
if query:
search_query.append(query)
params = dict(query=" ".join(search_query))
if page:
params["page"] = int(page)
return self.fetch_json_data(
"get", "/v3/packages", params=params, cache_valid="1h"
)
def get_package(self, type_, owner, name, version=None):
try:
return self.fetch_json_data(
"get",
"/v3/packages/{owner}/{type}/{name}".format(
type=type_, owner=owner.lower(), name=name.lower()
),
params=dict(version=version) if version else None,
cache_valid="1h",
)
except HTTPClientError as e:
if e.response is not None and e.response.status_code == 404:
return None
raise e

View File

@ -0,0 +1,146 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=unused-argument
import json
import re
import click
from tabulate import tabulate
from platformio.clients.registry import RegistryClient
from platformio.commands.account import validate_username
from platformio.commands.team import validate_orgname_teamname
def validate_client(value):
if ":" in value:
validate_orgname_teamname(value)
else:
validate_username(value)
return value
@click.group("access", short_help="Manage resource access")
def cli():
pass
def validate_urn(value):
value = str(value).strip()
if not re.match(r"^prn:reg:pkg:(\d+):(\w+)$", value, flags=re.I):
raise click.BadParameter("Invalid URN format.")
return value
@cli.command("public", short_help="Make resource public")
@click.argument(
"urn",
callback=lambda _, __, value: validate_urn(value),
)
@click.option("--urn-type", type=click.Choice(["prn:reg:pkg"]), default="prn:reg:pkg")
def access_public(urn, urn_type):
client = RegistryClient()
client.update_resource(urn=urn, private=0)
return click.secho(
"The resource %s has been successfully updated." % urn,
fg="green",
)
@cli.command("private", short_help="Make resource private")
@click.argument(
"urn",
callback=lambda _, __, value: validate_urn(value),
)
@click.option("--urn-type", type=click.Choice(["prn:reg:pkg"]), default="prn:reg:pkg")
def access_private(urn, urn_type):
client = RegistryClient()
client.update_resource(urn=urn, private=1)
return click.secho(
"The resource %s has been successfully updated." % urn,
fg="green",
)
@cli.command("grant", short_help="Grant access")
@click.argument("level", type=click.Choice(["admin", "maintainer", "guest"]))
@click.argument(
"client",
metavar="[<ORGNAME:TEAMNAME>|<USERNAME>]",
callback=lambda _, __, value: validate_client(value),
)
@click.argument(
"urn",
callback=lambda _, __, value: validate_urn(value),
)
@click.option("--urn-type", type=click.Choice(["prn:reg:pkg"]), default="prn:reg:pkg")
def access_grant(level, client, urn, urn_type):
reg_client = RegistryClient()
reg_client.grant_access_for_resource(urn=urn, client=client, level=level)
return click.secho(
"Access for resource %s has been granted for %s" % (urn, client),
fg="green",
)
@cli.command("revoke", short_help="Revoke access")
@click.argument(
"client",
metavar="[ORGNAME:TEAMNAME|USERNAME]",
callback=lambda _, __, value: validate_client(value),
)
@click.argument(
"urn",
callback=lambda _, __, value: validate_urn(value),
)
@click.option("--urn-type", type=click.Choice(["prn:reg:pkg"]), default="prn:reg:pkg")
def access_revoke(client, urn, urn_type):
reg_client = RegistryClient()
reg_client.revoke_access_from_resource(urn=urn, client=client)
return click.secho(
"Access for resource %s has been revoked for %s" % (urn, client),
fg="green",
)
@cli.command("list", short_help="List published resources")
@click.argument("owner", required=False)
@click.option("--urn-type", type=click.Choice(["prn:reg:pkg"]), default="prn:reg:pkg")
@click.option("--json-output", is_flag=True)
def access_list(owner, urn_type, json_output):
reg_client = RegistryClient()
resources = reg_client.list_resources(owner=owner)
if json_output:
return click.echo(json.dumps(resources))
if not resources:
return click.secho("You do not have any resources.", fg="yellow")
for resource in resources:
click.echo()
click.secho(resource.get("name"), fg="cyan")
click.echo("-" * len(resource.get("name")))
table_data = []
table_data.append(("URN:", resource.get("urn")))
table_data.append(("Owner:", resource.get("owner")))
table_data.append(
(
"Access level(s):",
", ".join(
(level.capitalize() for level in resource.get("access_levels"))
),
)
)
click.echo(tabulate(table_data, tablefmt="plain"))
return click.echo()

View File

@ -14,59 +14,286 @@
# pylint: disable=unused-argument
import sys
import datetime
import json
import re
import click
from tabulate import tabulate
from platformio.managers.core import pioplus_call
from platformio.clients.account import AccountClient, AccountNotAuthorized
@click.group("account", short_help="Manage PIO Account")
@click.group("account", short_help="Manage PlatformIO account")
def cli():
pass
@cli.command("register", short_help="Create new PIO Account")
@click.option("-u", "--username")
def account_register(**kwargs):
pioplus_call(sys.argv[1:])
def validate_username(value, field="username"):
value = str(value).strip()
if not re.match(r"^[a-z\d](?:[a-z\d]|-(?=[a-z\d])){0,37}$", value, flags=re.I):
raise click.BadParameter(
"Invalid %s format. "
"%s must contain only alphanumeric characters "
"or single hyphens, cannot begin or end with a hyphen, "
"and must not be longer than 38 characters."
% (field.lower(), field.capitalize())
)
return value
@cli.command("login", short_help="Log in to PIO Account")
@click.option("-u", "--username")
@click.option("-p", "--password")
def account_login(**kwargs):
pioplus_call(sys.argv[1:])
def validate_email(value):
value = str(value).strip()
if not re.match(r"^[a-z\d_.+-]+@[a-z\d\-]+\.[a-z\d\-.]+$", value, flags=re.I):
raise click.BadParameter("Invalid email address")
return value
@cli.command("logout", short_help="Log out of PIO Account")
def validate_password(value):
value = str(value).strip()
if not re.match(r"^(?=.*[a-z])(?=.*\d).{8,}$", value):
raise click.BadParameter(
"Invalid password format. "
"Password must contain at least 8 characters"
" including a number and a lowercase letter"
)
return value
@cli.command("register", short_help="Create new PlatformIO Account")
@click.option(
"-u",
"--username",
prompt=True,
callback=lambda _, __, value: validate_username(value),
)
@click.option(
"-e", "--email", prompt=True, callback=lambda _, __, value: validate_email(value)
)
@click.option(
"-p",
"--password",
prompt=True,
hide_input=True,
confirmation_prompt=True,
callback=lambda _, __, value: validate_password(value),
)
@click.option("--firstname", prompt=True)
@click.option("--lastname", prompt=True)
def account_register(username, email, password, firstname, lastname):
client = AccountClient()
client.registration(username, email, password, firstname, lastname)
return click.secho(
"An account has been successfully created. "
"Please check your mail to activate your account and verify your email address.",
fg="green",
)
@cli.command("login", short_help="Log in to PlatformIO Account")
@click.option("-u", "--username", prompt="Username or email")
@click.option("-p", "--password", prompt=True, hide_input=True)
def account_login(username, password):
client = AccountClient()
client.login(username, password)
return click.secho("Successfully logged in!", fg="green")
@cli.command("logout", short_help="Log out of PlatformIO Account")
def account_logout():
pioplus_call(sys.argv[1:])
client = AccountClient()
client.logout()
return click.secho("Successfully logged out!", fg="green")
@cli.command("password", short_help="Change password")
@click.option("--old-password")
@click.option("--new-password")
def account_password(**kwargs):
pioplus_call(sys.argv[1:])
@click.option("--old-password", prompt=True, hide_input=True)
@click.option("--new-password", prompt=True, hide_input=True, confirmation_prompt=True)
def account_password(old_password, new_password):
client = AccountClient()
client.change_password(old_password, new_password)
return click.secho("Password successfully changed!", fg="green")
@cli.command("token", short_help="Get or regenerate Authentication Token")
@click.option("-p", "--password")
@click.option("-p", "--password", prompt=True, hide_input=True)
@click.option("--regenerate", is_flag=True)
@click.option("--json-output", is_flag=True)
def account_token(**kwargs):
pioplus_call(sys.argv[1:])
def account_token(password, regenerate, json_output):
client = AccountClient()
auth_token = client.auth_token(password, regenerate)
if json_output:
return click.echo(json.dumps({"status": "success", "result": auth_token}))
return click.secho("Personal Authentication Token: %s" % auth_token, fg="green")
@cli.command("forgot", short_help="Forgot password")
@click.option("-u", "--username")
def account_forgot(**kwargs):
pioplus_call(sys.argv[1:])
@click.option("--username", prompt="Username or email")
def account_forgot(username):
client = AccountClient()
client.forgot_password(username)
return click.secho(
"If this account is registered, we will send the "
"further instructions to your email.",
fg="green",
)
@cli.command("show", short_help="PIO Account information")
@cli.command("update", short_help="Update profile information")
@click.option("--current-password", prompt=True, hide_input=True)
@click.option("--username")
@click.option("--email")
@click.option("--firstname")
@click.option("--lastname")
def account_update(current_password, **kwargs):
client = AccountClient()
profile = client.get_profile()
new_profile = profile.copy()
if not any(kwargs.values()):
for field in profile:
new_profile[field] = click.prompt(
field.replace("_", " ").capitalize(), default=profile[field]
)
if field == "email":
validate_email(new_profile[field])
if field == "username":
validate_username(new_profile[field])
else:
new_profile.update({key: value for key, value in kwargs.items() if value})
client.update_profile(new_profile, current_password)
click.secho("Profile successfully updated!", fg="green")
username_changed = new_profile["username"] != profile["username"]
email_changed = new_profile["email"] != profile["email"]
if not username_changed and not email_changed:
return None
try:
client.logout()
except AccountNotAuthorized:
pass
if email_changed:
return click.secho(
"Please check your mail to verify your new email address and re-login. ",
fg="yellow",
)
return click.secho("Please re-login.", fg="yellow")
@cli.command("destroy", short_help="Destroy account")
def account_destroy():
client = AccountClient()
click.confirm(
"Are you sure you want to delete the %s user account?\n"
"Warning! All linked data will be permanently removed and can not be restored."
% client.get_account_info().get("profile").get("username"),
abort=True,
)
client.destroy_account()
try:
client.logout()
except AccountNotAuthorized:
pass
return click.secho(
"User account has been destroyed.",
fg="green",
)
@cli.command("show", short_help="PlatformIO Account information")
@click.option("--offline", is_flag=True)
@click.option("--json-output", is_flag=True)
def account_show(**kwargs):
pioplus_call(sys.argv[1:])
def account_show(offline, json_output):
client = AccountClient()
info = client.get_account_info(offline)
if json_output:
return click.echo(json.dumps(info))
click.echo()
if info.get("profile"):
print_profile(info["profile"])
if info.get("packages"):
print_packages(info["packages"])
if info.get("subscriptions"):
print_subscriptions(info["subscriptions"])
return click.echo()
def print_profile(profile):
click.secho("Profile", fg="cyan", bold=True)
click.echo("=" * len("Profile"))
data = []
if profile.get("username"):
data.append(("Username:", profile["username"]))
if profile.get("email"):
data.append(("Email:", profile["email"]))
if profile.get("firstname"):
data.append(("First name:", profile["firstname"]))
if profile.get("lastname"):
data.append(("Last name:", profile["lastname"]))
click.echo(tabulate(data, tablefmt="plain"))
def print_packages(packages):
click.echo()
click.secho("Packages", fg="cyan")
click.echo("=" * len("Packages"))
for package in packages:
click.echo()
click.secho(package.get("name"), bold=True)
click.echo("-" * len(package.get("name")))
if package.get("description"):
click.echo(package.get("description"))
data = []
expire = "-"
if "subscription" in package:
expire = datetime.datetime.strptime(
(
package["subscription"].get("end_at")
or package["subscription"].get("next_bill_at")
),
"%Y-%m-%dT%H:%M:%SZ",
).strftime("%Y-%m-%d")
data.append(("Expire:", expire))
services = []
for key in package:
if not key.startswith("service."):
continue
if isinstance(package[key], dict):
services.append(package[key].get("title"))
else:
services.append(package[key])
if services:
data.append(("Services:", ", ".join(services)))
click.echo(tabulate(data, tablefmt="plain"))
def print_subscriptions(subscriptions):
click.echo()
click.secho("Subscriptions", fg="cyan")
click.echo("=" * len("Subscriptions"))
for subscription in subscriptions:
click.echo()
click.secho(subscription.get("product_name"), bold=True)
click.echo("-" * len(subscription.get("product_name")))
data = [("State:", subscription.get("status"))]
begin_at = datetime.datetime.strptime(
subscription.get("begin_at"), "%Y-%m-%dT%H:%M:%SZ"
).strftime("%Y-%m-%d %H:%M:%S")
data.append(("Start date:", begin_at or "-"))
end_at = subscription.get("end_at")
if end_at:
end_at = datetime.datetime.strptime(
subscription.get("end_at"), "%Y-%m-%dT%H:%M:%SZ"
).strftime("%Y-%m-%d %H:%M:%S")
data.append(("End date:", end_at or "-"))
next_bill_at = subscription.get("next_bill_at")
if next_bill_at:
next_bill_at = datetime.datetime.strptime(
subscription.get("next_bill_at"), "%Y-%m-%dT%H:%M:%SZ"
).strftime("%Y-%m-%d %H:%M:%S")
data.append(("Next payment:", next_bill_at or "-"))
data.append(
("Edit:", click.style(subscription.get("update_url"), fg="blue") or "-")
)
data.append(
("Cancel:", click.style(subscription.get("cancel_url"), fg="blue") or "-")
)
click.echo(tabulate(data, tablefmt="plain"))

View File

@ -19,10 +19,10 @@ from tabulate import tabulate
from platformio import fs
from platformio.compat import dump_json_to_unicode
from platformio.managers.platform import PlatformManager
from platformio.package.manager.platform import PlatformPackageManager
@click.command("boards", short_help="Embedded Board Explorer")
@click.command("boards", short_help="Embedded board explorer")
@click.argument("query", required=False)
@click.option("--installed", is_flag=True)
@click.option("--json-output", is_flag=True)
@ -59,8 +59,8 @@ def print_boards(boards):
click.style(b["id"], fg="cyan"),
b["mcu"],
"%dMHz" % (b["fcpu"] / 1000000),
fs.format_filesize(b["rom"]),
fs.format_filesize(b["ram"]),
fs.humanize_file_size(b["rom"]),
fs.humanize_file_size(b["ram"]),
b["name"],
)
for b in boards
@ -71,7 +71,7 @@ def print_boards(boards):
def _get_boards(installed=False):
pm = PlatformManager()
pm = PlatformPackageManager()
return pm.get_installed_boards() if installed else pm.get_all_boards()

View File

@ -31,7 +31,7 @@ from platformio.project.config import ProjectConfig
from platformio.project.helpers import find_project_dir_above, get_project_dir
@click.command("check", short_help="Run a static analysis tool on code")
@click.command("check", short_help="Static code analysis")
@click.option("-e", "--environment", multiple=True)
@click.option(
"-d",
@ -61,6 +61,7 @@ from platformio.project.helpers import find_project_dir_above, get_project_dir
multiple=True,
type=click.Choice(DefectItem.SEVERITY_LABELS.values()),
)
@click.option("--skip-packages", is_flag=True)
def cli(
environment,
project_dir,
@ -72,6 +73,7 @@ def cli(
verbose,
json_output,
fail_on_defect,
skip_packages,
):
app.set_session_var("custom_project_conf", project_conf)
@ -114,6 +116,7 @@ def cli(
severity=[DefectItem.SEVERITY_LABELS[DefectItem.SEVERITY_HIGH]]
if silent
else severity or config.get("env:" + envname, "check_severity"),
skip_packages=skip_packages or env_options.get("check_skip_packages"),
)
for tool in config.get("env:" + envname, "check_tool"):
@ -222,7 +225,7 @@ def collect_component_stats(result):
component = dirname(defect.file) or defect.file
_append_defect(component, defect)
if component.startswith(get_project_dir()):
if component.lower().startswith(get_project_dir().lower()):
while os.sep in component:
component = dirname(component)
_append_defect(component, defect)

View File

@ -51,7 +51,7 @@ class DefectItem(object):
self.cwe = cwe
self.id = id
self.file = file
if file.startswith(get_project_dir()):
if file.lower().startswith(get_project_dir().lower()):
self.file = os.path.relpath(file, get_project_dir())
def __repr__(self):

View File

@ -12,14 +12,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import glob
import os
from tempfile import NamedTemporaryFile
import click
from platformio import fs, proc
from platformio import compat, fs, proc
from platformio.commands.check.defect import DefectItem
from platformio.project.helpers import get_project_dir, load_project_ide_data
from platformio.project.helpers import load_project_ide_data
class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
@ -32,12 +32,13 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
self.cpp_includes = []
self.cpp_defines = []
self.toolchain_defines = []
self._tmp_files = []
self.cc_path = None
self.cxx_path = None
self._defects = []
self._on_defect_callback = None
self._bad_input = False
self._load_cpp_data(project_dir, envname)
self._load_cpp_data(project_dir)
# detect all defects by default
if not self.options.get("severity"):
@ -52,17 +53,17 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
for s in self.options["severity"]
]
def _load_cpp_data(self, project_dir, envname):
data = load_project_ide_data(project_dir, envname)
def _load_cpp_data(self, project_dir):
data = load_project_ide_data(project_dir, self.envname)
if not data:
return
self.cc_flags = data.get("cc_flags", "").split(" ")
self.cxx_flags = data.get("cxx_flags", "").split(" ")
self.cpp_includes = data.get("includes", [])
self.cc_flags = click.parser.split_arg_string(data.get("cc_flags", ""))
self.cxx_flags = click.parser.split_arg_string(data.get("cxx_flags", ""))
self.cpp_includes = self._dump_includes(data.get("includes", {}))
self.cpp_defines = data.get("defines", [])
self.cc_path = data.get("cc_path")
self.cxx_path = data.get("cxx_path")
self.toolchain_defines = self._get_toolchain_defines(self.cc_path)
self.toolchain_defines = self._get_toolchain_defines()
def get_flags(self, tool):
result = []
@ -75,21 +76,54 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
return result
def _get_toolchain_defines(self):
def _extract_defines(language, includes_file):
build_flags = self.cxx_flags if language == "c++" else self.cc_flags
defines = []
cmd = "echo | %s -x %s %s %s -dM -E -" % (
self.cc_path,
language,
" ".join(
[f for f in build_flags if f.startswith(("-m", "-f", "-std"))]
),
includes_file,
)
result = proc.exec_command(cmd, shell=True)
for line in result["out"].split("\n"):
tokens = line.strip().split(" ", 2)
if not tokens or tokens[0] != "#define":
continue
if len(tokens) > 2:
defines.append("%s=%s" % (tokens[1], tokens[2]))
else:
defines.append(tokens[1])
return defines
incflags_file = self._long_includes_hook(self.cpp_includes)
return {lang: _extract_defines(lang, incflags_file) for lang in ("c", "c++")}
def _create_tmp_file(self, data):
with NamedTemporaryFile("w", delete=False) as fp:
fp.write(data)
self._tmp_files.append(fp.name)
return fp.name
def _long_includes_hook(self, includes):
data = []
for inc in includes:
data.append('-I"%s"' % fs.to_unix_path(inc))
return '@"%s"' % self._create_tmp_file(" ".join(data))
@staticmethod
def _get_toolchain_defines(cc_path):
defines = []
result = proc.exec_command("echo | %s -dM -E -x c++ -" % cc_path, shell=True)
for line in result["out"].split("\n"):
tokens = line.strip().split(" ", 2)
if not tokens or tokens[0] != "#define":
continue
if len(tokens) > 2:
defines.append("%s=%s" % (tokens[1], tokens[2]))
else:
defines.append(tokens[1])
return defines
def _dump_includes(includes_map):
result = []
for includes in includes_map.values():
for include in includes:
if include not in result:
result.append(include)
return result
@staticmethod
def is_flag_set(flag, flags):
@ -129,19 +163,28 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
return raw_line
def clean_up(self):
pass
for f in self._tmp_files:
if os.path.isfile(f):
os.remove(f)
def get_project_target_files(self):
allowed_extensions = (".h", ".hpp", ".c", ".cc", ".cpp", ".ino")
result = []
@staticmethod
def get_project_target_files(patterns):
c_extension = (".c",)
cpp_extensions = (".cc", ".cpp", ".cxx", ".ino")
header_extensions = (".h", ".hh", ".hpp", ".hxx")
result = {"c": [], "c++": [], "headers": []}
def _add_file(path):
if not path.endswith(allowed_extensions):
return
result.append(os.path.realpath(path))
if path.endswith(header_extensions):
result["headers"].append(os.path.realpath(path))
elif path.endswith(c_extension):
result["c"].append(os.path.realpath(path))
elif path.endswith(cpp_extensions):
result["c++"].append(os.path.realpath(path))
for pattern in self.options["patterns"]:
for item in glob.glob(pattern):
for pattern in patterns:
for item in compat.glob_recursive(pattern):
if not os.path.isdir(item):
_add_file(item)
for root, _, files in os.walk(item, followlinks=True):
@ -150,27 +193,23 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
return result
def get_source_language(self):
with fs.cd(get_project_dir()):
for _, __, files in os.walk(self.config.get_optional_dir("src")):
for name in files:
if "." not in name:
continue
if os.path.splitext(name)[1].lower() in (".cpp", ".cxx", ".ino"):
return "c++"
return "c"
def check(self, on_defect_callback=None):
self._on_defect_callback = on_defect_callback
cmd = self.configure_command()
if self.options.get("verbose"):
click.echo(" ".join(cmd))
if cmd:
if self.options.get("verbose"):
click.echo(" ".join(cmd))
proc.exec_command(
cmd,
stdout=proc.LineBufferedAsyncPipe(self.on_tool_output),
stderr=proc.LineBufferedAsyncPipe(self.on_tool_output),
)
proc.exec_command(
cmd,
stdout=proc.LineBufferedAsyncPipe(self.on_tool_output),
stderr=proc.LineBufferedAsyncPipe(self.on_tool_output),
)
else:
if self.options.get("verbose"):
click.echo("Error: Couldn't configure command")
self._bad_input = True
self.clean_up()

View File

@ -17,7 +17,7 @@ from os.path import join
from platformio.commands.check.defect import DefectItem
from platformio.commands.check.tools.base import CheckToolBase
from platformio.managers.core import get_core_package_dir
from platformio.package.manager.core import get_core_package_dir
class ClangtidyCheckTool(CheckToolBase):
@ -57,11 +57,25 @@ class ClangtidyCheckTool(CheckToolBase):
if not self.is_flag_set("--checks", flags):
cmd.append("--checks=*")
cmd.extend(flags)
cmd.extend(self.get_project_target_files())
cmd.append("--")
project_files = self.get_project_target_files(self.options["patterns"])
cmd.extend(["-D%s" % d for d in self.cpp_defines + self.toolchain_defines])
cmd.extend(["-I%s" % inc for inc in self.cpp_includes])
src_files = []
for scope in project_files:
src_files.extend(project_files[scope])
cmd.extend(flags + src_files + ["--"])
cmd.extend(
["-D%s" % d for d in self.cpp_defines + self.toolchain_defines["c++"]]
)
includes = []
for inc in self.cpp_includes:
if self.options.get("skip_packages") and inc.lower().startswith(
self.config.get_optional_dir("packages").lower()
):
continue
includes.append(inc)
cmd.extend(["-I%s" % inc for inc in includes])
return cmd

View File

@ -12,18 +12,20 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from os import remove
from os.path import isfile, join
from tempfile import NamedTemporaryFile
import os
import click
from platformio import proc
from platformio.commands.check.defect import DefectItem
from platformio.commands.check.tools.base import CheckToolBase
from platformio.managers.core import get_core_package_dir
from platformio.package.manager.core import get_core_package_dir
class CppcheckCheckTool(CheckToolBase):
def __init__(self, *args, **kwargs):
self._tmp_files = []
self._field_delimiter = "<&PIO&>"
self._buffer = ""
self.defect_fields = [
"severity",
"message",
@ -55,13 +57,15 @@ class CppcheckCheckTool(CheckToolBase):
return line
def parse_defect(self, raw_line):
if "<&PIO&>" not in raw_line or any(
f not in raw_line for f in self.defect_fields
):
if self._field_delimiter not in raw_line:
return None
self._buffer += raw_line
if any(f not in self._buffer for f in self.defect_fields):
return None
args = dict()
for field in raw_line.split("<&PIO&>"):
for field in self._buffer.split(self._field_delimiter):
field = field.strip().replace('"', "")
name, value = field.split("=", 1)
args[name] = value
@ -74,20 +78,46 @@ class CppcheckCheckTool(CheckToolBase):
else:
args["severity"] = DefectItem.SEVERITY_LOW
# Skip defects found in third-party software, but keep in mind that such defects
# might break checking process so defects from project files are not reported
breaking_defect_ids = ("preprocessorErrorDirective", "syntaxError")
if (
args.get("file", "")
.lower()
.startswith(self.config.get_optional_dir("packages").lower())
):
if args["id"] in breaking_defect_ids:
if self.options.get("verbose"):
click.echo(
"Error: Found a breaking defect '%s' in %s:%s\n"
"Please note: check results might not be valid!\n"
"Try adding --skip-packages"
% (args.get("message"), args.get("file"), args.get("line"))
)
click.echo()
self._bad_input = True
return None
self._buffer = ""
return DefectItem(**args)
def configure_command(self):
tool_path = join(get_core_package_dir("tool-cppcheck"), "cppcheck")
def configure_command(
self, language, src_files
): # pylint: disable=arguments-differ
tool_path = os.path.join(get_core_package_dir("tool-cppcheck"), "cppcheck")
cmd = [
tool_path,
"--addon-python=%s" % proc.get_pythonexe_path(),
"--error-exitcode=1",
"--verbose" if self.options.get("verbose") else "--quiet",
]
cmd.append(
'--template="%s"'
% "<&PIO&>".join(["{0}={{{0}}}".format(f) for f in self.defect_fields])
% self._field_delimiter.join(
["{0}={{{0}}}".format(f) for f in self.defect_fields]
)
)
flags = self.get_flags("cppcheck")
@ -108,51 +138,112 @@ class CppcheckCheckTool(CheckToolBase):
cmd.append("--enable=%s" % ",".join(enabled_checks))
if not self.is_flag_set("--language", flags):
if self.get_source_language() == "c++":
cmd.append("--language=c++")
cmd.append("--language=" + language)
if not self.is_flag_set("--std", flags):
for f in self.cxx_flags + self.cc_flags:
if "-std" in f:
# Standards with GNU extensions are not allowed
cmd.append("-" + f.replace("gnu", "c"))
build_flags = self.cxx_flags if language == "c++" else self.cc_flags
for flag in build_flags:
if "-std" in flag:
# Standards with GNU extensions are not allowed
cmd.append("-" + flag.replace("gnu", "c"))
cmd.extend(
["-D%s" % d for d in self.cpp_defines + self.toolchain_defines[language]]
)
cmd.extend(["-D%s" % d for d in self.cpp_defines + self.toolchain_defines])
cmd.extend(flags)
cmd.append("--file-list=%s" % self._generate_src_file())
cmd.extend(
"--include=" + inc
for inc in self.get_forced_includes(build_flags, self.cpp_includes)
)
cmd.append("--file-list=%s" % self._generate_src_file(src_files))
cmd.append("--includes-file=%s" % self._generate_inc_file())
core_dir = self.config.get_optional_dir("packages")
cmd.append("--suppress=*:%s*" % core_dir)
cmd.append("--suppress=unmatchedSuppression:%s*" % core_dir)
return cmd
def _create_tmp_file(self, data):
with NamedTemporaryFile("w", delete=False) as fp:
fp.write(data)
self._tmp_files.append(fp.name)
return fp.name
@staticmethod
def get_forced_includes(build_flags, includes):
def _extract_filepath(flag, include_options, build_flags):
path = ""
for option in include_options:
if not flag.startswith(option):
continue
if flag.split(option)[1].strip():
path = flag.split(option)[1].strip()
elif build_flags.index(flag) + 1 < len(build_flags):
path = build_flags[build_flags.index(flag) + 1]
return path
def _generate_src_file(self):
src_files = [
f for f in self.get_project_target_files() if not f.endswith((".h", ".hpp"))
]
def _search_include_dir(filepath, include_paths):
for inc_path in include_paths:
path = os.path.join(inc_path, filepath)
if os.path.isfile(path):
return path
return ""
result = []
include_options = ("-include", "-imacros")
for f in build_flags:
if f.startswith(include_options):
filepath = _extract_filepath(f, include_options, build_flags)
if not os.path.isabs(filepath):
filepath = _search_include_dir(filepath, includes)
if os.path.isfile(filepath):
result.append(filepath)
return result
def _generate_src_file(self, src_files):
return self._create_tmp_file("\n".join(src_files))
def _generate_inc_file(self):
return self._create_tmp_file("\n".join(self.cpp_includes))
result = []
for inc in self.cpp_includes:
if self.options.get("skip_packages") and inc.lower().startswith(
self.config.get_optional_dir("packages").lower()
):
continue
result.append(inc)
return self._create_tmp_file("\n".join(result))
def clean_up(self):
for f in self._tmp_files:
if isfile(f):
remove(f)
super(CppcheckCheckTool, self).clean_up()
# delete temporary dump files generated by addons
if not self.is_flag_set("--addon", self.get_flags("cppcheck")):
return
for f in self.get_project_target_files():
dump_file = f + ".dump"
if isfile(dump_file):
remove(dump_file)
for files in self.get_project_target_files(self.options["patterns"]).values():
for f in files:
dump_file = f + ".dump"
if os.path.isfile(dump_file):
os.remove(dump_file)
def check(self, on_defect_callback=None):
self._on_defect_callback = on_defect_callback
project_files = self.get_project_target_files(self.options["patterns"])
languages = ("c", "c++")
if not any([project_files[t] for t in languages]):
click.echo("Error: Nothing to check.")
return True
for language in languages:
if not project_files[language]:
continue
cmd = self.configure_command(language, project_files[language])
if not cmd:
self._bad_input = True
continue
if self.options.get("verbose"):
click.echo(" ".join(cmd))
proc.exec_command(
cmd,
stdout=proc.LineBufferedAsyncPipe(self.on_tool_output),
stderr=proc.LineBufferedAsyncPipe(self.on_tool_output),
)
self.clean_up()
return self._bad_input

View File

@ -22,7 +22,7 @@ import click
from platformio import proc, util
from platformio.commands.check.defect import DefectItem
from platformio.commands.check.tools.base import CheckToolBase
from platformio.managers.core import get_core_package_dir
from platformio.package.manager.core import get_core_package_dir
class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-attributes
@ -140,9 +140,7 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at
os.remove(self._tmp_output_file)
if not os.path.isfile(self._tmp_preprocessed_file):
click.echo(
"Error: Missing preprocessed file '%s'" % (self._tmp_preprocessed_file)
)
click.echo("Error: Missing preprocessed file for '%s'" % src_file)
return ""
cmd = [
@ -175,6 +173,9 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at
return os.path.join(self._tmp_dir, next(tempfile._get_candidate_names()))
def _prepare_preprocessed_file(self, src_file):
if os.path.isfile(self._tmp_preprocessed_file):
os.remove(self._tmp_preprocessed_file)
flags = self.cxx_flags
compiler = self.cxx_path
if src_file.endswith(".c"):
@ -186,40 +187,46 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at
cmd.extend(["-D%s" % d for d in self.cpp_defines])
cmd.append('@"%s"' % self._tmp_cmd_file)
# Explicitly specify C++ as the language used in .ino files
if src_file.endswith(".ino"):
cmd.insert(1, "-xc++")
result = proc.exec_command(" ".join(cmd), shell=True)
if result["returncode"] != 0:
if result["returncode"] != 0 or result["err"]:
if self.options.get("verbose"):
click.echo(" ".join(cmd))
click.echo(result["err"])
self._bad_input = True
def clean_up(self):
super(PvsStudioCheckTool, self).clean_up()
if os.path.isdir(self._tmp_dir):
shutil.rmtree(self._tmp_dir)
def check(self, on_defect_callback=None):
self._on_defect_callback = on_defect_callback
src_files = [
f for f in self.get_project_target_files() if not f.endswith((".h", ".hpp"))
]
for src_file in src_files:
self._prepare_preprocessed_file(src_file)
cmd = self.configure_command(src_file)
if self.options.get("verbose"):
click.echo(" ".join(cmd))
if not cmd:
self._bad_input = True
for scope, files in self.get_project_target_files(
self.options["patterns"]
).items():
if scope not in ("c", "c++"):
continue
for src_file in files:
self._prepare_preprocessed_file(src_file)
cmd = self.configure_command(src_file)
if self.options.get("verbose"):
click.echo(" ".join(cmd))
if not cmd:
self._bad_input = True
continue
result = proc.exec_command(cmd)
# pylint: disable=unsupported-membership-test
if result["returncode"] != 0 or "License was not entered" in result["err"]:
self._bad_input = True
click.echo(result["err"])
continue
result = proc.exec_command(cmd)
# pylint: disable=unsupported-membership-test
if result["returncode"] != 0 or "license" in result["err"].lower():
self._bad_input = True
click.echo(result["err"])
continue
self._process_defects(self.parse_defects(self._tmp_output_file))
self._process_defects(self.parse_defects(self._tmp_output_file))
self.clean_up()

View File

@ -12,7 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from glob import glob
from os import getenv, makedirs, remove
from os.path import basename, isdir, isfile, join, realpath
from shutil import copyfile, copytree
@ -20,11 +19,10 @@ from tempfile import mkdtemp
import click
from platformio import app, fs
from platformio import app, compat, fs
from platformio.commands.project import project_init as cmd_project_init
from platformio.commands.project import validate_boards
from platformio.commands.run.command import cli as cmd_run
from platformio.compat import glob_escape
from platformio.exception import CIBuildEnvsEmpty
from platformio.project.config import ProjectConfig
@ -36,7 +34,7 @@ def validate_path(ctx, param, value): # pylint: disable=unused-argument
if p.startswith("~"):
value[i] = fs.expanduser(p)
value[i] = realpath(value[i])
if not glob(value[i]):
if not compat.glob_recursive(value[i]):
invalid_path = p
break
try:
@ -46,7 +44,7 @@ def validate_path(ctx, param, value): # pylint: disable=unused-argument
raise click.BadParameter("Found invalid path: %s" % invalid_path)
@click.command("ci", short_help="Continuous Integration")
@click.command("ci", short_help="Continuous integration")
@click.argument("src", nargs=-1, callback=validate_path)
@click.option("-l", "--lib", multiple=True, callback=validate_path, metavar="DIRECTORY")
@click.option("--exclude", multiple=True)
@ -98,7 +96,7 @@ def cli( # pylint: disable=too-many-arguments, too-many-branches
continue
contents = []
for p in patterns:
contents += glob(p)
contents += compat.glob_recursive(p)
_copy_contents(join(build_dir, dir_name), contents)
if project_conf and isfile(project_conf):
@ -159,7 +157,7 @@ def _copy_contents(dst_dir, contents):
def _exclude_contents(dst_dir, patterns):
contents = []
for p in patterns:
contents += glob(join(glob_escape(dst_dir), p))
contents += compat.glob_recursive(join(compat.glob_escape(dst_dir), p))
for path in contents:
path = realpath(path)
if isdir(path):

View File

@ -21,10 +21,13 @@ from os.path import isfile
import click
from platformio import app, exception, fs, proc, util
from platformio import app, exception, fs, proc
from platformio.commands.debug import helpers
from platformio.commands.debug.exception import DebugInvalidOptionsError
from platformio.managers.core import inject_contrib_pysite
from platformio.commands.platform import platform_install as cmd_platform_install
from platformio.package.manager.core import inject_contrib_pysite
from platformio.platform.exception import UnknownPlatform
from platformio.platform.factory import PlatformFactory
from platformio.project.config import ProjectConfig
from platformio.project.exception import ProjectEnvsNotAvailableError
from platformio.project.helpers import is_platformio_project, load_project_ide_data
@ -33,7 +36,7 @@ from platformio.project.helpers import is_platformio_project, load_project_ide_d
@click.command(
"debug",
context_settings=dict(ignore_unknown_options=True),
short_help="PIO Unified Debugger",
short_help="Unified debugger",
)
@click.option(
"-d",
@ -73,18 +76,29 @@ def cli(ctx, project_dir, project_conf, environment, verbose, interface, __unpro
env_options = config.items(env=env_name, as_dict=True)
if not set(env_options.keys()) >= set(["platform", "board"]):
raise ProjectEnvsNotAvailableError()
debug_options = helpers.validate_debug_options(ctx, env_options)
try:
platform = PlatformFactory.new(env_options["platform"])
except UnknownPlatform:
ctx.invoke(
cmd_platform_install,
platforms=[env_options["platform"]],
skip_default_package=True,
)
platform = PlatformFactory.new(env_options["platform"])
debug_options = helpers.configure_initial_debug_options(platform, env_options)
assert debug_options
if not interface:
return helpers.predebug_project(ctx, project_dir, env_name, False, verbose)
configuration = load_project_ide_data(project_dir, env_name)
if not configuration:
raise DebugInvalidOptionsError("Could not load debug configuration")
ide_data = load_project_ide_data(project_dir, env_name)
if not ide_data:
raise DebugInvalidOptionsError("Could not load a build configuration")
if "--version" in __unprocessed:
result = proc.exec_command([configuration["gdb_path"], "--version"])
result = proc.exec_command([ide_data["gdb_path"], "--version"])
if result["returncode"] == 0:
return click.echo(result["out"])
raise exception.PlatformioException("\n".join([result["out"], result["err"]]))
@ -99,23 +113,25 @@ def cli(ctx, project_dir, project_conf, environment, verbose, interface, __unpro
nl=False,
)
debug_options["load_cmds"] = helpers.configure_esp32_load_cmds(
debug_options, configuration
)
try:
debug_options = platform.configure_debug_options(debug_options, ide_data)
except NotImplementedError:
# legacy for ESP32 dev-platform <=2.0.0
debug_options["load_cmds"] = helpers.configure_esp32_load_cmds(
debug_options, ide_data
)
rebuild_prog = False
preload = debug_options["load_cmds"] == ["preload"]
load_mode = debug_options["load_mode"]
if load_mode == "always":
rebuild_prog = preload or not helpers.has_debug_symbols(
configuration["prog_path"]
)
rebuild_prog = preload or not helpers.has_debug_symbols(ide_data["prog_path"])
elif load_mode == "modified":
rebuild_prog = helpers.is_prog_obsolete(
configuration["prog_path"]
) or not helpers.has_debug_symbols(configuration["prog_path"])
ide_data["prog_path"]
) or not helpers.has_debug_symbols(ide_data["prog_path"])
else:
rebuild_prog = not isfile(configuration["prog_path"])
rebuild_prog = not isfile(ide_data["prog_path"])
if preload or (not rebuild_prog and load_mode != "always"):
# don't load firmware through debug server
@ -130,7 +146,7 @@ def cli(ctx, project_dir, project_conf, environment, verbose, interface, __unpro
nl=False,
)
stream = helpers.GDBMIConsoleStream()
with util.capture_std_streams(stream):
with proc.capture_std_streams(stream):
helpers.predebug_project(ctx, project_dir, env_name, preload, verbose)
stream.close()
else:
@ -139,19 +155,19 @@ def cli(ctx, project_dir, project_conf, environment, verbose, interface, __unpro
# save SHA sum of newly created prog
if load_mode == "modified":
helpers.is_prog_obsolete(configuration["prog_path"])
helpers.is_prog_obsolete(ide_data["prog_path"])
if not isfile(configuration["prog_path"]):
if not isfile(ide_data["prog_path"]):
raise DebugInvalidOptionsError("Program/firmware is missed")
# run debugging client
inject_contrib_pysite()
# pylint: disable=import-outside-toplevel
from platformio.commands.debug.client import GDBClient, reactor
from platformio.commands.debug.process.client import GDBClient, reactor
client = GDBClient(project_dir, __unprocessed, debug_options, env_options)
client.spawn(configuration["gdb_path"], configuration["prog_path"])
client.spawn(ide_data["gdb_path"], ide_data["prog_path"])
signal.signal(signal.SIGINT, lambda *args, **kwargs: None)
reactor.run()

View File

@ -20,13 +20,11 @@ from hashlib import sha1
from io import BytesIO
from os.path import isfile
from platformio import exception, fs, util
from platformio import fs, util
from platformio.commands import PlatformioCLI
from platformio.commands.debug.exception import DebugInvalidOptionsError
from platformio.commands.platform import platform_install as cmd_platform_install
from platformio.commands.run.command import cli as cmd_run
from platformio.compat import is_bytes
from platformio.managers.platform import PlatformFactory
from platformio.project.config import ProjectConfig
from platformio.project.options import ProjectOptions
@ -88,21 +86,11 @@ def predebug_project(ctx, project_dir, env_name, preload, verbose):
time.sleep(5)
def validate_debug_options(cmd_ctx, env_options):
def configure_initial_debug_options(platform, env_options):
def _cleanup_cmds(items):
items = ProjectConfig.parse_multi_values(items)
return ["$LOAD_CMDS" if item == "$LOAD_CMD" else item for item in items]
try:
platform = PlatformFactory.newPlatform(env_options["platform"])
except exception.UnknownPlatform:
cmd_ctx.invoke(
cmd_platform_install,
platforms=[env_options["platform"]],
skip_default_package=True,
)
platform = PlatformFactory.newPlatform(env_options["platform"])
board_config = platform.board_config(env_options["board"])
tool_name = board_config.get_debug_tool_name(env_options.get("debug_tool"))
tool_settings = board_config.get("debug", {}).get("tools", {}).get(tool_name, {})
@ -194,13 +182,16 @@ def validate_debug_options(cmd_ctx, env_options):
def configure_esp32_load_cmds(debug_options, configuration):
"""
DEPRECATED: Moved to ESP32 dev-platform
See platform.py::configure_debug_options
"""
flash_images = configuration.get("extra", {}).get("flash_images")
ignore_conds = [
debug_options["load_cmds"] != ["load"],
"xtensa-esp32" not in configuration.get("cc_path", ""),
not configuration.get("flash_extra_images"),
not all(
[isfile(item["path"]) for item in configuration.get("flash_extra_images")]
),
not flash_images,
not all([isfile(item["path"]) for item in flash_images]),
]
if any(ignore_conds):
return debug_options["load_cmds"]
@ -209,7 +200,7 @@ def configure_esp32_load_cmds(debug_options, configuration):
'monitor program_esp32 "{{{path}}}" {offset} verify'.format(
path=fs.to_unix_path(item["path"]), offset=item["offset"]
)
for item in configuration.get("flash_extra_images")
for item in flash_images
]
mon_cmds.append(
'monitor program_esp32 "{%s.bin}" 0x10000 verify'

View File

@ -0,0 +1,13 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@ -26,12 +26,13 @@ from twisted.internet import reactor # pylint: disable=import-error
from twisted.internet import stdio # pylint: disable=import-error
from twisted.internet import task # pylint: disable=import-error
from platformio import app, fs, proc, telemetry, util
from platformio import fs, proc, telemetry, util
from platformio.cache import ContentCache
from platformio.commands.debug import helpers
from platformio.commands.debug.exception import DebugInvalidOptionsError
from platformio.commands.debug.initcfgs import get_gdb_init_config
from platformio.commands.debug.process import BaseProcess
from platformio.commands.debug.server import DebugServer
from platformio.commands.debug.process.base import BaseProcess
from platformio.commands.debug.process.server import DebugServer
from platformio.compat import hashlib_encode_data, is_bytes
from platformio.project.helpers import get_project_cache_dir
@ -194,7 +195,7 @@ class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
# go to init break automatically
if self.INIT_COMPLETED_BANNER.encode() in data:
telemetry.send_event(
"Debug", "Started", telemetry.encode_run_environment(self.env_options)
"Debug", "Started", telemetry.dump_run_environment(self.env_options)
)
self._auto_continue_timer = task.LoopingCall(self._auto_exec_continue)
self._auto_continue_timer.start(0.1)
@ -231,8 +232,11 @@ class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
self._target_is_run = True
def _handle_error(self, data):
self._errors_buffer += data
if self.PIO_SRC_NAME.encode() not in data or b"Error in sourced" not in data:
self._errors_buffer = (self._errors_buffer + data)[-8192:] # keep last 8 KBytes
if not (
self.PIO_SRC_NAME.encode() in self._errors_buffer
and b"Error in sourced" in self._errors_buffer
):
return
last_erros = self._errors_buffer.decode()
@ -240,7 +244,7 @@ class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
last_erros = re.sub(r'((~|&)"|\\n\"|\\t)', " ", last_erros, flags=re.M)
err = "%s -> %s" % (
telemetry.encode_run_environment(self.env_options),
telemetry.dump_run_environment(self.env_options),
last_erros,
)
telemetry.send_exception("DebugInitError: %s" % err)
@ -249,7 +253,7 @@ class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
def _kill_previous_session(self):
assert self._session_id
pid = None
with app.ContentCache() as cc:
with ContentCache() as cc:
pid = cc.get(self._session_id)
cc.delete(self._session_id)
if not pid:
@ -266,11 +270,11 @@ class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
def _lock_session(self, pid):
if not self._session_id:
return
with app.ContentCache() as cc:
with ContentCache() as cc:
cc.set(self._session_id, str(pid), "1h")
def _unlock_session(self):
if not self._session_id:
return
with app.ContentCache() as cc:
with ContentCache() as cc:
cc.delete(self._session_id)

View File

@ -22,7 +22,7 @@ from twisted.internet import reactor # pylint: disable=import-error
from platformio import fs, util
from platformio.commands.debug.exception import DebugInvalidOptionsError
from platformio.commands.debug.helpers import escape_gdbmi_stream, is_gdbmi_mode
from platformio.commands.debug.process import BaseProcess
from platformio.commands.debug.process.base import BaseProcess
from platformio.proc import where_is_program

View File

@ -22,11 +22,11 @@ from serial.tools import miniterm
from platformio import exception, fs, util
from platformio.commands.device import helpers as device_helpers
from platformio.compat import dump_json_to_unicode
from platformio.managers.platform import PlatformFactory
from platformio.platform.factory import PlatformFactory
from platformio.project.exception import NotPlatformIOProjectError
@click.group(short_help="Monitor device or list existing")
@click.group(short_help="Device manager & serial/socket monitor")
def cli():
pass
@ -192,7 +192,7 @@ def device_monitor(**kwargs): # pylint: disable=too-many-branches
platform = None
if "platform" in project_options:
with fs.cd(kwargs["project_dir"]):
platform = PlatformFactory.newPlatform(project_options["platform"])
platform = PlatformFactory.new(project_options["platform"])
device_helpers.register_platform_filters(
platform, kwargs["project_dir"], kwargs["environment"]
)
@ -203,7 +203,9 @@ def device_monitor(**kwargs): # pylint: disable=too-many-branches
kwargs["port"] = ports[0]["port"]
elif "platform" in project_options and "board" in project_options:
board_hwids = device_helpers.get_board_hwids(
kwargs["project_dir"], platform, project_options["board"],
kwargs["project_dir"],
platform,
project_options["board"],
)
for item in ports:
for hwid in board_hwids:

View File

@ -22,13 +22,16 @@ class Timestamp(DeviceMonitorFilter):
def __init__(self, *args, **kwargs):
super(Timestamp, self).__init__(*args, **kwargs)
self._first_text_received = False
self._line_started = False
def rx(self, text):
if self._first_text_received and "\n" not in text:
if self._line_started and "\n" not in text:
return text
timestamp = datetime.now().strftime("%H:%M:%S.%f")[:-3]
if not self._first_text_received:
self._first_text_received = True
return "%s > %s" % (timestamp, text)
if not self._line_started:
self._line_started = True
text = "%s > %s" % (timestamp, text)
if text.endswith("\n"):
self._line_started = False
return text[:-1].replace("\n", "\n%s > " % timestamp) + "\n"
return text.replace("\n", "\n%s > " % timestamp)

View File

@ -22,14 +22,10 @@ import click
from platformio import exception
from platformio.compat import WINDOWS
from platformio.managers.core import (
build_contrib_pysite_deps,
get_core_package_dir,
inject_contrib_pysite,
)
from platformio.package.manager.core import get_core_package_dir, inject_contrib_pysite
@click.command("home", short_help="PIO Home")
@click.command("home", short_help="UI to manage PlatformIO")
@click.option("--port", type=int, default=8008, help="HTTP port, default=8008")
@click.option(
"--host",
@ -55,12 +51,7 @@ def cli(port, host, no_open, shutdown_timeout):
# import contrib modules
inject_contrib_pysite()
try:
from autobahn.twisted.resource import WebSocketResource
except: # pylint: disable=bare-except
build_contrib_pysite_deps(get_core_package_dir("contrib-pysite"))
from autobahn.twisted.resource import WebSocketResource
from autobahn.twisted.resource import WebSocketResource
from twisted.internet import reactor
from twisted.web import server
from twisted.internet.error import CannotListenError
@ -71,6 +62,7 @@ def cli(port, host, no_open, shutdown_timeout):
from platformio.commands.home.rpc.handlers.os import OSRPC
from platformio.commands.home.rpc.handlers.piocore import PIOCoreRPC
from platformio.commands.home.rpc.handlers.project import ProjectRPC
from platformio.commands.home.rpc.handlers.account import AccountRPC
from platformio.commands.home.rpc.server import JSONRPCServerFactory
from platformio.commands.home.web import WebRoot
@ -81,6 +73,7 @@ def cli(port, host, no_open, shutdown_timeout):
factory.addHandler(OSRPC(), namespace="os")
factory.addHandler(PIOCoreRPC(), namespace="core")
factory.addHandler(ProjectRPC(), namespace="project")
factory.addHandler(AccountRPC(), namespace="account")
contrib_dir = get_core_package_dir("contrib-piohome")
if not isdir(contrib_dir):

View File

@ -12,10 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=keyword-arg-before-vararg, arguments-differ
import os
import socket
# pylint: disable=keyword-arg-before-vararg,arguments-differ,signature-differs
import requests
from twisted.internet import defer # pylint: disable=import-error
@ -52,18 +49,3 @@ def get_core_fullpath():
return where_is_program(
"platformio" + (".exe" if "windows" in util.get_systype() else "")
)
@util.memoized(expire="10s")
def is_twitter_blocked():
ip = "104.244.42.1"
timeout = 2
try:
if os.getenv("HTTP_PROXY", os.getenv("HTTPS_PROXY")):
requests.get("http://%s" % ip, allow_redirects=False, timeout=timeout)
else:
socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect((ip, 80))
return False
except: # pylint: disable=bare-except
pass
return True

View File

@ -0,0 +1,29 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import jsonrpc # pylint: disable=import-error
from platformio.clients.account import AccountClient
class AccountRPC(object):
@staticmethod
def call_client(method, *args, **kwargs):
try:
client = AccountClient()
return getattr(client, method)(*args, **kwargs)
except Exception as e: # pylint: disable=bare-except
raise jsonrpc.exceptions.JSONRPCDispatchException(
code=4003, message="PIO Account Call Error", data=str(e)
)

View File

@ -17,15 +17,15 @@ import time
from twisted.internet import defer, reactor # pylint: disable=import-error
from platformio import app
from platformio.cache import ContentCache
from platformio.commands.home.rpc.handlers.os import OSRPC
class MiscRPC(object):
def load_latest_tweets(self, data_url):
cache_key = app.ContentCache.key_from_args(data_url, "tweets")
cache_valid = "7d"
with app.ContentCache() as cc:
cache_key = ContentCache.key_from_args(data_url, "tweets")
cache_valid = "180d"
with ContentCache() as cc:
cache_data = cc.get(cache_key)
if cache_data:
cache_data = json.loads(cache_data)
@ -43,7 +43,7 @@ class MiscRPC(object):
@defer.inlineCallbacks
def _preload_latest_tweets(data_url, cache_key, cache_valid):
result = json.loads((yield OSRPC.fetch_content(data_url)))
with app.ContentCache() as cc:
with ContentCache() as cc:
cc.set(
cache_key,
json.dumps({"time": int(time.time()), "result": result}),

View File

@ -14,7 +14,6 @@
from __future__ import absolute_import
import glob
import io
import os
import shutil
@ -23,9 +22,11 @@ from functools import cmp_to_key
import click
from twisted.internet import defer # pylint: disable=import-error
from platformio import app, fs, util
from platformio import __default_requests_timeout__, fs, util
from platformio.cache import ContentCache
from platformio.clients.http import ensure_internet_on
from platformio.commands.home import helpers
from platformio.compat import PY2, get_filesystem_encoding
from platformio.compat import PY2, get_filesystem_encoding, glob_recursive
class OSRPC(object):
@ -40,26 +41,30 @@ class OSRPC(object):
"Safari/603.3.8"
)
}
cache_key = app.ContentCache.key_from_args(uri, data) if cache_valid else None
with app.ContentCache() as cc:
cache_key = ContentCache.key_from_args(uri, data) if cache_valid else None
with ContentCache() as cc:
if cache_key:
result = cc.get(cache_key)
if result is not None:
defer.returnValue(result)
# check internet before and resolve issue with 60 seconds timeout
util.internet_on(raise_exception=True)
ensure_internet_on(raise_exception=True)
session = helpers.requests_session()
if data:
r = yield session.post(uri, data=data, headers=headers)
r = yield session.post(
uri, data=data, headers=headers, timeout=__default_requests_timeout__
)
else:
r = yield session.get(uri, headers=headers)
r = yield session.get(
uri, headers=headers, timeout=__default_requests_timeout__
)
r.raise_for_status()
result = r.text
if cache_valid:
with app.ContentCache() as cc:
with ContentCache() as cc:
cc.set(cache_key, result, cache_valid)
defer.returnValue(result)
@ -107,7 +112,7 @@ class OSRPC(object):
@staticmethod
def copy(src, dst):
return shutil.copytree(src, dst)
return shutil.copytree(src, dst, symlinks=True)
@staticmethod
def glob(pathnames, root=None):
@ -115,7 +120,9 @@ class OSRPC(object):
pathnames = [pathnames]
result = set()
for pathname in pathnames:
result |= set(glob.glob(os.path.join(root, pathname) if root else pathname))
result |= set(
glob_recursive(os.path.join(root, pathname) if root else pathname)
)
return list(result)
@staticmethod

View File

@ -27,7 +27,13 @@ from twisted.internet import utils # pylint: disable=import-error
from platformio import __main__, __version__, fs
from platformio.commands.home import helpers
from platformio.compat import PY2, get_filesystem_encoding, is_bytes, string_types
from platformio.compat import (
PY2,
get_filesystem_encoding,
get_locale_encoding,
is_bytes,
string_types,
)
try:
from thread import get_ident as thread_get_ident
@ -51,6 +57,8 @@ class MultiThreadingStdStream(object):
def write(self, value):
thread_id = thread_get_ident()
self._ensure_thread_buffer(thread_id)
if PY2 and isinstance(value, unicode): # pylint: disable=undefined-variable
value = value.encode()
return self._buffers[thread_id].write(
value.decode() if is_bytes(value) else value
)
@ -59,8 +67,8 @@ class MultiThreadingStdStream(object):
result = ""
try:
result = self.getvalue()
self.truncate(0)
self.seek(0)
self.truncate(0)
except AttributeError:
pass
return result
@ -93,10 +101,11 @@ class PIOCoreRPC(object):
else:
args[i] = str(arg)
options = options or {}
to_json = "--json-output" in args
try:
if args and args[0] in ("account", "remote"):
if options.get("force_subprocess"):
result = yield PIOCoreRPC._call_subprocess(args, options)
defer.returnValue(PIOCoreRPC._process_result(result, to_json))
else:
@ -115,7 +124,7 @@ class PIOCoreRPC(object):
@staticmethod
def _call_inline(args, options):
PIOCoreRPC.setup_multithreading_std_streams()
cwd = (options or {}).get("cwd") or os.getcwd()
cwd = options.get("cwd") or os.getcwd()
def _thread_task():
with fs.cd(cwd):
@ -141,13 +150,15 @@ class PIOCoreRPC(object):
@staticmethod
def _process_result(result, to_json=False):
out, err, code = result
if out and is_bytes(out):
out = out.decode(get_locale_encoding())
if err and is_bytes(err):
err = err.decode(get_locale_encoding())
text = ("%s\n\n%s" % (out, err)).strip()
if code != 0:
raise Exception(text)
if not to_json:
return text
if is_bytes(out):
out = out.decode()
try:
return json.loads(out)
except ValueError as e:

View File

@ -25,7 +25,7 @@ from platformio.commands.home.rpc.handlers.app import AppRPC
from platformio.commands.home.rpc.handlers.piocore import PIOCoreRPC
from platformio.compat import PY2, get_filesystem_encoding
from platformio.ide.projectgenerator import ProjectGenerator
from platformio.managers.platform import PlatformManager
from platformio.package.manager.platform import PlatformPackageManager
from platformio.project.config import ProjectConfig
from platformio.project.exception import ProjectError
from platformio.project.helpers import get_project_dir, is_platformio_project
@ -105,7 +105,7 @@ class ProjectRPC(object):
return (os.path.sep).join(path.split(os.path.sep)[-2:])
result = []
pm = PlatformManager()
pm = PlatformPackageManager()
for project_dir in AppRPC.load_state()["storage"]["recentProjects"]:
if not os.path.isdir(project_dir):
continue
@ -148,8 +148,9 @@ class ProjectRPC(object):
@staticmethod
def get_project_examples():
result = []
for manifest in PlatformManager().get_installed():
examples_dir = os.path.join(manifest["__pkg_dir"], "examples")
pm = PlatformPackageManager()
for pkg in pm.get_installed():
examples_dir = os.path.join(pkg.path, "examples")
if not os.path.isdir(examples_dir):
continue
items = []
@ -172,6 +173,7 @@ class ProjectRPC(object):
"description": project_description,
}
)
manifest = pm.load_manifest(pkg)
result.append(
{
"platform": {
@ -196,7 +198,9 @@ class ProjectRPC(object):
and state["storage"]["coreCaller"] in ProjectGenerator.get_supported_ides()
):
args.extend(["--ide", state["storage"]["coreCaller"]])
d = PIOCoreRPC.call(args, options={"cwd": project_dir})
d = PIOCoreRPC.call(
args, options={"cwd": project_dir, "force_subprocess": True}
)
d.addCallback(self._generate_project_main, project_dir, framework)
return d
@ -289,7 +293,9 @@ class ProjectRPC(object):
and state["storage"]["coreCaller"] in ProjectGenerator.get_supported_ides()
):
args.extend(["--ide", state["storage"]["coreCaller"]])
d = PIOCoreRPC.call(args, options={"cwd": project_dir})
d = PIOCoreRPC.call(
args, options={"cwd": project_dir, "force_subprocess": True}
)
d.addCallback(self._finalize_arduino_import, project_dir, arduino_project_dir)
return d
@ -300,7 +306,7 @@ class ProjectRPC(object):
src_dir = config.get_optional_dir("src")
if os.path.isdir(src_dir):
fs.rmtree(src_dir)
shutil.copytree(arduino_project_dir, src_dir)
shutil.copytree(arduino_project_dir, src_dir, symlinks=True)
return project_dir
@staticmethod
@ -313,7 +319,7 @@ class ProjectRPC(object):
AppRPC.load_state()["storage"]["projectsDir"],
time.strftime("%y%m%d-%H%M%S-") + os.path.basename(project_dir),
)
shutil.copytree(project_dir, new_project_dir)
shutil.copytree(project_dir, new_project_dir, symlinks=True)
state = AppRPC.load_state()
args = ["init"]
@ -322,6 +328,8 @@ class ProjectRPC(object):
and state["storage"]["coreCaller"] in ProjectGenerator.get_supported_ides()
):
args.extend(["--ide", state["storage"]["coreCaller"]])
d = PIOCoreRPC.call(args, options={"cwd": new_project_dir})
d = PIOCoreRPC.call(
args, options={"cwd": new_project_dir, "force_subprocess": True}
)
d.addCallback(lambda _: new_project_dir)
return d

View File

@ -0,0 +1,13 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@ -18,15 +18,15 @@ import os
import time
import click
import semantic_version
from tabulate import tabulate
from platformio import exception, fs, util
from platformio.commands import PlatformioCLI
from platformio.commands.lib.helpers import get_builtin_libs, save_project_libdeps
from platformio.compat import dump_json_to_unicode
from platformio.managers.lib import LibraryManager, get_builtin_libs, is_builtin_lib
from platformio.package.manifest.parser import ManifestParserFactory
from platformio.package.manifest.schema import ManifestSchema
from platformio.package.exception import NotGlobalLibDir, UnknownPackageError
from platformio.package.manager.library import LibraryPackageManager
from platformio.package.meta import PackageItem, PackageSpec
from platformio.proc import is_ci
from platformio.project.config import ProjectConfig
from platformio.project.helpers import get_project_dir, is_platformio_project
@ -46,7 +46,7 @@ def get_project_global_lib_dir():
return ProjectConfig.get_instance().get_optional_dir("globallib")
@click.group(short_help="Library Manager")
@click.group(short_help="Library manager")
@click.option(
"-d",
"--storage-dir",
@ -93,7 +93,7 @@ def cli(ctx, **options):
)
if not storage_dirs:
raise exception.NotGlobalLibDir(
raise NotGlobalLibDir(
get_project_dir(), get_project_global_lib_dir(), ctx.invoked_subcommand
)
@ -125,86 +125,99 @@ def cli(ctx, **options):
@cli.command("install", short_help="Install library")
@click.argument("libraries", required=False, nargs=-1, metavar="[LIBRARY...]")
@click.option(
"--save",
"--save/--no-save",
is_flag=True,
help="Save installed libraries into the `platformio.ini` dependency list",
default=True,
help="Save installed libraries into the `platformio.ini` dependency list"
" (enabled by default)",
)
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
@click.option(
"--interactive", is_flag=True, help="Allow to make a choice for all prompts"
"--interactive",
is_flag=True,
help="Deprecated! Please use a strict dependency specification (owner/libname)",
)
@click.option(
"-f", "--force", is_flag=True, help="Reinstall/redownload library if exists"
)
@click.pass_context
def lib_install( # pylint: disable=too-many-arguments
def lib_install( # pylint: disable=too-many-arguments,unused-argument
ctx, libraries, save, silent, interactive, force
):
storage_dirs = ctx.meta[CTX_META_STORAGE_DIRS_KEY]
storage_libdeps = ctx.meta.get(CTX_META_STORAGE_LIBDEPS_KEY, [])
installed_manifests = {}
installed_pkgs = {}
for storage_dir in storage_dirs:
if not silent and (libraries or storage_dir in storage_libdeps):
print_storage_header(storage_dirs, storage_dir)
lm = LibraryManager(storage_dir)
if libraries:
for library in libraries:
pkg_dir = lm.install(
library, silent=silent, interactive=interactive, force=force
)
installed_manifests[library] = lm.load_manifest(pkg_dir)
elif storage_dir in storage_libdeps:
builtin_lib_storages = None
for library in storage_libdeps[storage_dir]:
try:
pkg_dir = lm.install(
library, silent=silent, interactive=interactive, force=force
)
installed_manifests[library] = lm.load_manifest(pkg_dir)
except exception.LibNotFound as e:
if builtin_lib_storages is None:
builtin_lib_storages = get_builtin_libs()
if not silent or not is_builtin_lib(builtin_lib_storages, library):
click.secho("Warning! %s" % e, fg="yellow")
lm = LibraryPackageManager(storage_dir)
if not save or not libraries:
return
if libraries:
installed_pkgs = {
library: lm.install(library, silent=silent, force=force)
for library in libraries
}
elif storage_dir in storage_libdeps:
for library in storage_libdeps[storage_dir]:
lm.install(library, silent=silent, force=force)
if save and installed_pkgs:
_save_deps(ctx, installed_pkgs)
def _save_deps(ctx, pkgs, action="add"):
specs = []
for library, pkg in pkgs.items():
spec = PackageSpec(library)
if spec.external:
specs.append(spec)
else:
specs.append(
PackageSpec(
owner=pkg.metadata.spec.owner,
name=pkg.metadata.spec.name,
requirements=spec.requirements
or (
("^%s" % pkg.metadata.version)
if not pkg.metadata.version.build
else pkg.metadata.version
),
)
)
input_dirs = ctx.meta.get(CTX_META_INPUT_DIRS_KEY, [])
project_environments = ctx.meta[CTX_META_PROJECT_ENVIRONMENTS_KEY]
for input_dir in input_dirs:
config = ProjectConfig.get_instance(os.path.join(input_dir, "platformio.ini"))
config.validate(project_environments)
for env in config.envs():
if project_environments and env not in project_environments:
continue
config.expand_interpolations = False
lib_deps = config.get("env:" + env, "lib_deps", [])
for library in libraries:
if library in lib_deps:
continue
manifest = installed_manifests[library]
try:
assert library.lower() == manifest["name"].lower()
assert semantic_version.Version(manifest["version"])
lib_deps.append("{name}@^{version}".format(**manifest))
except (AssertionError, ValueError):
lib_deps.append(library)
config.set("env:" + env, "lib_deps", lib_deps)
config.save()
if not is_platformio_project(input_dir):
continue
save_project_libdeps(input_dir, specs, project_environments, action=action)
@cli.command("uninstall", short_help="Uninstall libraries")
@cli.command("uninstall", short_help="Remove libraries")
@click.argument("libraries", nargs=-1, metavar="[LIBRARY...]")
@click.option(
"--save/--no-save",
is_flag=True,
default=True,
help="Remove libraries from the `platformio.ini` dependency list and save changes"
" (enabled by default)",
)
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
@click.pass_context
def lib_uninstall(ctx, libraries):
def lib_uninstall(ctx, libraries, save, silent):
storage_dirs = ctx.meta[CTX_META_STORAGE_DIRS_KEY]
uninstalled_pkgs = {}
for storage_dir in storage_dirs:
print_storage_header(storage_dirs, storage_dir)
lm = LibraryManager(storage_dir)
for library in libraries:
lm.uninstall(library)
lm = LibraryPackageManager(storage_dir)
uninstalled_pkgs = {
library: lm.uninstall(library, silent=silent) for library in libraries
}
if save and uninstalled_pkgs:
_save_deps(ctx, uninstalled_pkgs, action="remove")
@cli.command("update", short_help="Update installed libraries")
@ -218,42 +231,58 @@ def lib_uninstall(ctx, libraries):
@click.option(
"--dry-run", is_flag=True, help="Do not update, only check for the new versions"
)
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
@click.option("--json-output", is_flag=True)
@click.pass_context
def lib_update(ctx, libraries, only_check, dry_run, json_output):
def lib_update( # pylint: disable=too-many-arguments
ctx, libraries, only_check, dry_run, silent, json_output
):
storage_dirs = ctx.meta[CTX_META_STORAGE_DIRS_KEY]
only_check = dry_run or only_check
json_result = {}
for storage_dir in storage_dirs:
if not json_output:
print_storage_header(storage_dirs, storage_dir)
lm = LibraryManager(storage_dir)
_libraries = libraries
if not _libraries:
_libraries = [manifest["__pkg_dir"] for manifest in lm.get_installed()]
lib_deps = ctx.meta.get(CTX_META_STORAGE_LIBDEPS_KEY, {}).get(storage_dir, [])
lm = LibraryPackageManager(storage_dir)
_libraries = libraries or lib_deps or lm.get_installed()
if only_check and json_output:
result = []
for library in _libraries:
pkg_dir = library if os.path.isdir(library) else None
requirements = None
url = None
if not pkg_dir:
name, requirements, url = lm.parse_pkg_uri(library)
pkg_dir = lm.get_package_dir(name, requirements, url)
if not pkg_dir:
spec = None
pkg = None
if isinstance(library, PackageItem):
pkg = library
else:
spec = PackageSpec(library)
pkg = lm.get_package(spec)
if not pkg:
continue
latest = lm.outdated(pkg_dir, requirements)
if not latest:
outdated = lm.outdated(pkg, spec)
if not outdated.is_outdated(allow_incompatible=True):
continue
manifest = lm.load_manifest(pkg_dir)
manifest["versionLatest"] = latest
manifest = lm.legacy_load_manifest(pkg)
manifest["versionWanted"] = (
str(outdated.wanted) if outdated.wanted else None
)
manifest["versionLatest"] = (
str(outdated.latest) if outdated.latest else None
)
result.append(manifest)
json_result[storage_dir] = result
else:
for library in _libraries:
lm.update(library, only_check=only_check)
to_spec = (
None if isinstance(library, PackageItem) else PackageSpec(library)
)
try:
lm.update(
library, to_spec=to_spec, only_check=only_check, silent=silent
)
except UnknownPackageError as e:
if library not in lib_deps:
raise e
if json_output:
return click.echo(
@ -274,8 +303,8 @@ def lib_list(ctx, json_output):
for storage_dir in storage_dirs:
if not json_output:
print_storage_header(storage_dirs, storage_dir)
lm = LibraryManager(storage_dir)
items = lm.get_installed()
lm = LibraryPackageManager(storage_dir)
items = lm.legacy_get_installed()
if json_output:
json_result[storage_dir] = items
elif items:
@ -299,6 +328,7 @@ def lib_list(ctx, json_output):
@click.option("--json-output", is_flag=True)
@click.option("--page", type=click.INT, default=1)
@click.option("--id", multiple=True)
@click.option("-o", "--owner", multiple=True)
@click.option("-n", "--name", multiple=True)
@click.option("-a", "--author", multiple=True)
@click.option("-k", "--keyword", multiple=True)
@ -311,6 +341,7 @@ def lib_list(ctx, json_output):
help="Do not prompt, automatically paginate with delay",
)
def lib_search(query, json_output, page, noninteractive, **filters):
regclient = LibraryPackageManager().get_registry_client_instance()
if not query:
query = []
if not isinstance(query, list):
@ -320,8 +351,11 @@ def lib_search(query, json_output, page, noninteractive, **filters):
for value in values:
query.append('%s:"%s"' % (key, value))
result = util.get_api_result(
"/v2/lib/search", dict(query=" ".join(query), page=page), cache_valid="1d"
result = regclient.fetch_json_data(
"get",
"/v2/lib/search",
params=dict(query=" ".join(query), page=page),
cache_valid="1d",
)
if json_output:
@ -370,9 +404,10 @@ def lib_search(query, json_output, page, noninteractive, **filters):
time.sleep(5)
elif not click.confirm("Show next libraries?"):
break
result = util.get_api_result(
result = regclient.fetch_json_data(
"get",
"/v2/lib/search",
{"query": " ".join(query), "page": int(result["page"]) + 1},
params=dict(query=" ".join(query), page=int(result["page"]) + 1),
cache_valid="1d",
)
@ -402,23 +437,20 @@ def lib_builtin(storage, json_output):
@click.argument("library", metavar="[LIBRARY]")
@click.option("--json-output", is_flag=True)
def lib_show(library, json_output):
lm = LibraryManager()
name, requirements, _ = lm.parse_pkg_uri(library)
lib_id = lm.search_lib_id(
{"name": name, "requirements": requirements},
silent=json_output,
interactive=not json_output,
)
lib = util.get_api_result("/lib/info/%d" % lib_id, cache_valid="1d")
lm = LibraryPackageManager()
lib_id = lm.reveal_registry_package_id(library, silent=json_output)
regclient = lm.get_registry_client_instance()
lib = regclient.fetch_json_data("get", "/v2/lib/info/%d" % lib_id, cache_valid="1h")
if json_output:
return click.echo(dump_json_to_unicode(lib))
click.secho(lib["name"], fg="cyan")
click.echo("=" * len(lib["name"]))
click.secho("#ID: %d" % lib["id"], bold=True)
title = "{ownername}/{name}".format(**lib)
click.secho(title, fg="cyan")
click.echo("=" * len(title))
click.echo(lib["description"])
click.echo()
click.secho("ID: %d" % lib["id"])
click.echo(
"Version: %s, released %s"
% (
@ -441,7 +473,7 @@ def lib_show(library, json_output):
for author in lib.get("authors", []):
_data = []
for key in ("name", "email", "url", "maintainer"):
if not author[key]:
if not author.get(key):
continue
if key == "email":
_data.append("<%s>" % author[key])
@ -491,29 +523,19 @@ def lib_show(library, json_output):
return True
@cli.command("register", short_help="Register a new library")
@cli.command("register", short_help="Deprecated")
@click.argument("config_url")
def lib_register(config_url):
if not config_url.startswith("http://") and not config_url.startswith("https://"):
raise exception.InvalidLibConfURL(config_url)
# Validate manifest
ManifestSchema().load_manifest(
ManifestParserFactory.new_from_url(config_url).as_dict()
def lib_register(config_url): # pylint: disable=unused-argument
raise exception.UserSideException(
"This command is deprecated. Please use `pio package publish` command."
)
result = util.get_api_result("/lib/register", data=dict(config_url=config_url))
if "message" in result and result["message"]:
click.secho(
result["message"],
fg="green" if "successed" in result and result["successed"] else "red",
)
@cli.command("stats", short_help="Library Registry Statistics")
@click.option("--json-output", is_flag=True)
def lib_stats(json_output):
result = util.get_api_result("/lib/stats", cache_valid="1h")
regclient = LibraryPackageManager().get_registry_client_instance()
result = regclient.fetch_json_data("get", "/v2/lib/stats", cache_valid="1h")
if json_output:
return click.echo(dump_json_to_unicode(result))

View File

@ -0,0 +1,102 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from platformio.compat import ci_strings_are_equal
from platformio.package.manager.platform import PlatformPackageManager
from platformio.package.meta import PackageSpec
from platformio.platform.factory import PlatformFactory
from platformio.project.config import ProjectConfig
from platformio.project.exception import InvalidProjectConfError
def get_builtin_libs(storage_names=None):
# pylint: disable=import-outside-toplevel
from platformio.package.manager.library import LibraryPackageManager
items = []
storage_names = storage_names or []
pm = PlatformPackageManager()
for pkg in pm.get_installed():
p = PlatformFactory.new(pkg)
for storage in p.get_lib_storages():
if storage_names and storage["name"] not in storage_names:
continue
lm = LibraryPackageManager(storage["path"])
items.append(
{
"name": storage["name"],
"path": storage["path"],
"items": lm.legacy_get_installed(),
}
)
return items
def is_builtin_lib(name, storages=None):
for storage in storages or get_builtin_libs():
for lib in storage["items"]:
if lib.get("name") == name:
return True
return False
def ignore_deps_by_specs(deps, specs):
result = []
for dep in deps:
depspec = PackageSpec(dep)
if depspec.external:
result.append(dep)
continue
ignore_conditions = []
for spec in specs:
if depspec.owner:
ignore_conditions.append(
ci_strings_are_equal(depspec.owner, spec.owner)
and ci_strings_are_equal(depspec.name, spec.name)
)
else:
ignore_conditions.append(ci_strings_are_equal(depspec.name, spec.name))
if not any(ignore_conditions):
result.append(dep)
return result
def save_project_libdeps(project_dir, specs, environments=None, action="add"):
config = ProjectConfig.get_instance(os.path.join(project_dir, "platformio.ini"))
config.validate(environments)
for env in config.envs():
if environments and env not in environments:
continue
config.expand_interpolations = False
candidates = []
try:
candidates = ignore_deps_by_specs(
config.get("env:" + env, "lib_deps"), specs
)
except InvalidProjectConfError:
pass
if action == "add":
candidates.extend(spec.as_dependency() for spec in specs)
if candidates:
result = []
for item in candidates:
item = item.strip()
if item and item not in result:
result.append(item)
config.set("env:" + env, "lib_deps", result)
elif config.has_option("env:" + env, "lib_deps"):
config.remove_option("env:" + env, "lib_deps")
config.save()

165
platformio/commands/org.py Normal file
View File

@ -0,0 +1,165 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=unused-argument
import json
import click
from tabulate import tabulate
from platformio.clients.account import AccountClient
from platformio.commands.account import validate_email, validate_username
@click.group("org", short_help="Manage organizations")
def cli():
pass
def validate_orgname(value):
return validate_username(value, "Organization name")
@cli.command("create", short_help="Create a new organization")
@click.argument(
"orgname",
callback=lambda _, __, value: validate_orgname(value),
)
@click.option(
"--email", callback=lambda _, __, value: validate_email(value) if value else value
)
@click.option(
"--displayname",
)
def org_create(orgname, email, displayname):
client = AccountClient()
client.create_org(orgname, email, displayname)
return click.secho(
"The organization `%s` has been successfully created." % orgname,
fg="green",
)
@cli.command("list", short_help="List organizations and their members")
@click.option("--json-output", is_flag=True)
def org_list(json_output):
client = AccountClient()
orgs = client.list_orgs()
if json_output:
return click.echo(json.dumps(orgs))
if not orgs:
return click.echo("You do not have any organization")
for org in orgs:
click.echo()
click.secho(org.get("orgname"), fg="cyan")
click.echo("-" * len(org.get("orgname")))
data = []
if org.get("displayname"):
data.append(("Display Name:", org.get("displayname")))
if org.get("email"):
data.append(("Email:", org.get("email")))
data.append(
(
"Owners:",
", ".join((owner.get("username") for owner in org.get("owners"))),
)
)
click.echo(tabulate(data, tablefmt="plain"))
return click.echo()
@cli.command("update", short_help="Update organization")
@click.argument("cur_orgname")
@click.option(
"--orgname",
callback=lambda _, __, value: validate_orgname(value),
help="A new orgname",
)
@click.option("--email")
@click.option("--displayname")
def org_update(cur_orgname, **kwargs):
client = AccountClient()
org = client.get_org(cur_orgname)
del org["owners"]
new_org = org.copy()
if not any(kwargs.values()):
for field in org:
new_org[field] = click.prompt(
field.replace("_", " ").capitalize(), default=org[field]
)
if field == "email":
validate_email(new_org[field])
if field == "orgname":
validate_orgname(new_org[field])
else:
new_org.update(
{key.replace("new_", ""): value for key, value in kwargs.items() if value}
)
client.update_org(cur_orgname, new_org)
return click.secho(
"The organization `%s` has been successfully updated." % cur_orgname,
fg="green",
)
@cli.command("destroy", short_help="Destroy organization")
@click.argument("orgname")
def account_destroy(orgname):
client = AccountClient()
click.confirm(
"Are you sure you want to delete the `%s` organization account?\n"
"Warning! All linked data will be permanently removed and can not be restored."
% orgname,
abort=True,
)
client.destroy_org(orgname)
return click.secho(
"Organization `%s` has been destroyed." % orgname,
fg="green",
)
@cli.command("add", short_help="Add a new owner to organization")
@click.argument(
"orgname",
)
@click.argument(
"username",
)
def org_add_owner(orgname, username):
client = AccountClient()
client.add_org_owner(orgname, username)
return click.secho(
"The new owner `%s` has been successfully added to the `%s` organization."
% (username, orgname),
fg="green",
)
@cli.command("remove", short_help="Remove an owner from organization")
@click.argument(
"orgname",
)
@click.argument(
"username",
)
def org_remove_owner(orgname, username):
client = AccountClient()
client.remove_org_owner(orgname, username)
return click.secho(
"The `%s` owner has been successfully removed from the `%s` organization."
% (username, orgname),
fg="green",
)

View File

@ -0,0 +1,119 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import tempfile
from datetime import datetime
import click
from platformio import fs
from platformio.clients.registry import RegistryClient
from platformio.compat import ensure_python3
from platformio.package.meta import PackageSpec, PackageType
from platformio.package.pack import PackagePacker
def validate_datetime(ctx, param, value): # pylint: disable=unused-argument
if not value:
return value
try:
datetime.strptime(value, "%Y-%m-%d %H:%M:%S")
except ValueError as e:
raise click.BadParameter(e)
return value
@click.group("package", short_help="Package manager")
def cli():
pass
@cli.command("pack", short_help="Create a tarball from a package")
@click.argument(
"package",
required=True,
default=os.getcwd,
metavar="<source directory, tar.gz or zip>",
)
@click.option(
"-o", "--output", help="A destination path (folder or a full path to file)"
)
def package_pack(package, output):
p = PackagePacker(package)
archive_path = p.pack(output)
click.secho('Wrote a tarball to "%s"' % archive_path, fg="green")
@cli.command("publish", short_help="Publish a package to the registry")
@click.argument(
"package",
required=True,
default=os.getcwd,
metavar="<source directory, tar.gz or zip>",
)
@click.option(
"--owner",
help="PIO Account username (can be organization username). "
"Default is set to a username of the authorized PIO Account",
)
@click.option(
"--released-at",
callback=validate_datetime,
help="Custom release date and time in the next format (UTC): 2014-06-13 17:08:52",
)
@click.option("--private", is_flag=True, help="Restricted access (not a public)")
@click.option(
"--notify/--no-notify",
default=True,
help="Notify by email when package is processed",
)
def package_publish(package, owner, released_at, private, notify):
assert ensure_python3()
with tempfile.TemporaryDirectory() as tmp_dir: # pylint: disable=no-member
with fs.cd(tmp_dir):
p = PackagePacker(package)
archive_path = p.pack()
response = RegistryClient().publish_package(
archive_path, owner, released_at, private, notify
)
os.remove(archive_path)
click.secho(response.get("message"), fg="green")
@cli.command("unpublish", short_help="Remove a pushed package from the registry")
@click.argument(
"package", required=True, metavar="[<organization>/]<pkgname>[@<version>]"
)
@click.option(
"--type",
type=click.Choice(list(PackageType.items().values())),
default="library",
help="Package type, default is set to `library`",
)
@click.option(
"--undo",
is_flag=True,
help="Undo a remove, putting a version back into the registry",
)
def package_unpublish(package, type, undo): # pylint: disable=redefined-builtin
spec = PackageSpec(package)
response = RegistryClient().unpublish_package(
type=type,
name=spec.name,
owner=spec.owner,
version=str(spec.requirements),
undo=undo,
)
click.secho(response.get("message"), fg="green")

View File

@ -12,18 +12,21 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from os.path import dirname, isdir
import os
import click
from platformio import app, exception, util
from platformio.cache import cleanup_content_cache
from platformio.commands.boards import print_boards
from platformio.compat import dump_json_to_unicode
from platformio.managers.platform import PlatformFactory, PlatformManager
from platformio.package.pack import PackagePacker
from platformio.package.manager.platform import PlatformPackageManager
from platformio.package.meta import PackageItem, PackageSpec
from platformio.package.version import get_original_version
from platformio.platform.exception import UnknownPlatform
from platformio.platform.factory import PlatformFactory
@click.group(short_help="Platform Manager")
@click.group(short_help="Platform manager")
def cli():
pass
@ -47,7 +50,7 @@ def _print_platforms(platforms):
if "version" in platform:
if "__src_url" in platform:
click.echo(
"Version: #%s (%s)" % (platform["version"], platform["__src_url"])
"Version: %s (%s)" % (platform["version"], platform["__src_url"])
)
else:
click.echo("Version: " + platform["version"])
@ -55,31 +58,27 @@ def _print_platforms(platforms):
def _get_registry_platforms():
platforms = util.get_api_result("/platforms", cache_valid="7d")
pm = PlatformManager()
for platform in platforms or []:
platform["versions"] = pm.get_all_repo_versions(platform["name"])
return platforms
regclient = PlatformPackageManager().get_registry_client_instance()
return regclient.fetch_json_data("get", "/v2/platforms", cache_valid="1d")
def _get_platform_data(*args, **kwargs):
try:
return _get_installed_platform_data(*args, **kwargs)
except exception.UnknownPlatform:
except UnknownPlatform:
return _get_registry_platform_data(*args, **kwargs)
def _get_installed_platform_data(platform, with_boards=True, expose_packages=True):
p = PlatformFactory.newPlatform(platform)
p = PlatformFactory.new(platform)
data = dict(
name=p.name,
title=p.title,
description=p.description,
version=p.version,
homepage=p.homepage,
url=p.homepage,
repository=p.repository_url,
url=p.vendor_url,
docs=p.docs_url,
license=p.license,
forDesktop=not p.is_embedded(),
frameworks=sorted(list(p.frameworks) if p.frameworks else []),
@ -91,7 +90,9 @@ def _get_installed_platform_data(platform, with_boards=True, expose_packages=Tru
# return data
# overwrite VCS version and add extra fields
manifest = PlatformManager().load_manifest(dirname(p.manifest_path))
manifest = PlatformPackageManager().legacy_load_manifest(
os.path.dirname(p.manifest_path)
)
assert manifest
for key in manifest:
if key == "version" or key.startswith("__"):
@ -104,13 +105,15 @@ def _get_installed_platform_data(platform, with_boards=True, expose_packages=Tru
return data
data["packages"] = []
installed_pkgs = p.get_installed_packages()
for name, opts in p.packages.items():
installed_pkgs = {
pkg.metadata.name: p.pm.load_manifest(pkg) for pkg in p.get_installed_packages()
}
for name, options in p.packages.items():
item = dict(
name=name,
type=p.get_package_type(name),
requirements=opts.get("version"),
optional=opts.get("optional") is True,
requirements=options.get("version"),
optional=options.get("optional") is True,
)
if name in installed_pkgs:
for key, value in installed_pkgs[name].items():
@ -118,7 +121,7 @@ def _get_installed_platform_data(platform, with_boards=True, expose_packages=Tru
continue
item[key] = value
if key == "version":
item["originalVersion"] = util.get_original_version(value)
item["originalVersion"] = get_original_version(value)
data["packages"].append(item)
return data
@ -137,6 +140,7 @@ def _get_registry_platform_data( # pylint: disable=unused-argument
return None
data = dict(
ownername=_data.get("ownername"),
name=_data["name"],
title=_data["title"],
description=_data["description"],
@ -147,13 +151,13 @@ def _get_registry_platform_data( # pylint: disable=unused-argument
forDesktop=_data["forDesktop"],
frameworks=_data["frameworks"],
packages=_data["packages"],
versions=_data["versions"],
versions=_data.get("versions"),
)
if with_boards:
data["boards"] = [
board
for board in PlatformManager().get_registered_boards()
for board in PlatformPackageManager().get_registered_boards()
if board["platform"] == _data["name"]
]
@ -187,8 +191,11 @@ def platform_search(query, json_output):
@click.argument("query", required=False)
@click.option("--json-output", is_flag=True)
def platform_frameworks(query, json_output):
regclient = PlatformPackageManager().get_registry_client_instance()
frameworks = []
for framework in util.get_api_result("/frameworks", cache_valid="7d"):
for framework in regclient.fetch_json_data(
"get", "/v2/frameworks", cache_valid="1d"
):
if query == "all":
query = ""
search_data = dump_json_to_unicode(framework)
@ -213,12 +220,10 @@ def platform_frameworks(query, json_output):
@click.option("--json-output", is_flag=True)
def platform_list(json_output):
platforms = []
pm = PlatformManager()
for manifest in pm.get_installed():
pm = PlatformPackageManager()
for pkg in pm.get_installed():
platforms.append(
_get_installed_platform_data(
manifest["__pkg_dir"], with_boards=False, expose_packages=False
)
_get_installed_platform_data(pkg, with_boards=False, expose_packages=False)
)
platforms = sorted(platforms, key=lambda manifest: manifest["name"])
@ -234,16 +239,15 @@ def platform_list(json_output):
def platform_show(platform, json_output): # pylint: disable=too-many-branches
data = _get_platform_data(platform)
if not data:
raise exception.UnknownPlatform(platform)
raise UnknownPlatform(platform)
if json_output:
return click.echo(dump_json_to_unicode(data))
dep = "{ownername}/{name}".format(**data) if "ownername" in data else data["name"]
click.echo(
"{name} ~ {title}".format(
name=click.style(data["name"], fg="cyan"), title=data["title"]
)
"{dep} ~ {title}".format(dep=click.style(dep, fg="cyan"), title=data["title"])
)
click.echo("=" * (3 + len(data["name"] + data["title"])))
click.echo("=" * (3 + len(dep + data["title"])))
click.echo(data["description"])
click.echo()
if "version" in data:
@ -300,6 +304,7 @@ def platform_show(platform, json_output): # pylint: disable=too-many-branches
@click.option("--without-package", multiple=True)
@click.option("--skip-default-package", is_flag=True)
@click.option("--with-all-packages", is_flag=True)
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
@click.option(
"-f",
"--force",
@ -312,21 +317,24 @@ def platform_install( # pylint: disable=too-many-arguments
without_package,
skip_default_package,
with_all_packages,
silent,
force,
):
pm = PlatformManager()
pm = PlatformPackageManager()
for platform in platforms:
if pm.install(
name=platform,
pkg = pm.install(
spec=platform,
with_packages=with_package,
without_packages=without_package,
skip_default_package=skip_default_package,
with_all_packages=with_all_packages,
silent=silent,
force=force,
):
)
if pkg and not silent:
click.secho(
"The platform '%s' has been successfully installed!\n"
"The rest of packages will be installed automatically "
"The rest of the packages will be installed later "
"depending on your build environment." % platform,
fg="green",
)
@ -335,11 +343,11 @@ def platform_install( # pylint: disable=too-many-arguments
@cli.command("uninstall", short_help="Uninstall development platform")
@click.argument("platforms", nargs=-1, required=True, metavar="[PLATFORM...]")
def platform_uninstall(platforms):
pm = PlatformManager()
pm = PlatformPackageManager()
for platform in platforms:
if pm.uninstall(platform):
click.secho(
"The platform '%s' has been successfully uninstalled!" % platform,
"The platform '%s' has been successfully removed!" % platform,
fg="green",
)
@ -358,66 +366,60 @@ def platform_uninstall(platforms):
@click.option(
"--dry-run", is_flag=True, help="Do not update, only check for the new versions"
)
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
@click.option("--json-output", is_flag=True)
def platform_update( # pylint: disable=too-many-locals
platforms, only_packages, only_check, dry_run, json_output
def platform_update( # pylint: disable=too-many-locals, too-many-arguments
platforms, only_packages, only_check, dry_run, silent, json_output
):
pm = PlatformManager()
pkg_dir_to_name = {}
if not platforms:
platforms = []
for manifest in pm.get_installed():
platforms.append(manifest["__pkg_dir"])
pkg_dir_to_name[manifest["__pkg_dir"]] = manifest.get(
"title", manifest["name"]
)
pm = PlatformPackageManager()
platforms = platforms or pm.get_installed()
only_check = dry_run or only_check
if only_check and json_output:
result = []
for platform in platforms:
pkg_dir = platform if isdir(platform) else None
requirements = None
url = None
if not pkg_dir:
name, requirements, url = pm.parse_pkg_uri(platform)
pkg_dir = pm.get_package_dir(name, requirements, url)
if not pkg_dir:
spec = None
pkg = None
if isinstance(platform, PackageItem):
pkg = platform
else:
spec = PackageSpec(platform)
pkg = pm.get_package(spec)
if not pkg:
continue
latest = pm.outdated(pkg_dir, requirements)
outdated = pm.outdated(pkg, spec)
if (
not latest
and not PlatformFactory.newPlatform(pkg_dir).are_outdated_packages()
not outdated.is_outdated(allow_incompatible=True)
and not PlatformFactory.new(pkg).are_outdated_packages()
):
continue
data = _get_installed_platform_data(
pkg_dir, with_boards=False, expose_packages=False
pkg, with_boards=False, expose_packages=False
)
if latest:
data["versionLatest"] = latest
if outdated.is_outdated(allow_incompatible=True):
data["versionLatest"] = (
str(outdated.latest) if outdated.latest else None
)
result.append(data)
return click.echo(dump_json_to_unicode(result))
# cleanup cached board and platform lists
app.clean_cache()
cleanup_content_cache("http")
for platform in platforms:
click.echo(
"Platform %s"
% click.style(pkg_dir_to_name.get(platform, platform), fg="cyan")
% click.style(
platform.metadata.name
if isinstance(platform, PackageItem)
else platform,
fg="cyan",
)
)
click.echo("--------")
pm.update(platform, only_packages=only_packages, only_check=only_check)
pm.update(
platform, only_packages=only_packages, only_check=only_check, silent=silent
)
click.echo()
return True
@cli.command(
"pack", short_help="Create a tarball from development platform/tool package"
)
@click.argument("package", required=True, metavar="[source directory, tar.gz or zip]")
def platform_pack(package):
p = PackagePacker(package)
tarball_path = p.pack()
click.secho('Wrote a tarball to "%s"' % tarball_path, fg="green")

View File

@ -12,23 +12,25 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=too-many-arguments,too-many-locals, too-many-branches
# pylint: disable=too-many-arguments,too-many-locals,too-many-branches,line-too-long
import json
import os
import click
from tabulate import tabulate
from platformio import exception, fs
from platformio import fs
from platformio.commands.platform import platform_install as cli_platform_install
from platformio.ide.projectgenerator import ProjectGenerator
from platformio.managers.platform import PlatformManager
from platformio.package.manager.platform import PlatformPackageManager
from platformio.platform.exception import UnknownBoard
from platformio.project.config import ProjectConfig
from platformio.project.exception import NotPlatformIOProjectError
from platformio.project.helpers import is_platformio_project
from platformio.project.helpers import is_platformio_project, load_project_ide_data
@click.group(short_help="Project Manager")
@click.group(short_help="Project manager")
def cli():
pass
@ -38,9 +40,7 @@ def cli():
"-d",
"--project-dir",
default=os.getcwd,
type=click.Path(
exists=True, file_okay=True, dir_okay=True, writable=True, resolve_path=True
),
type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True),
)
@click.option("--json-output", is_flag=True)
def project_config(project_dir, json_output):
@ -54,7 +54,6 @@ def project_config(project_dir, json_output):
"Computed project configuration for %s" % click.style(project_dir, fg="cyan")
)
for section, options in config.as_tuple():
click.echo()
click.secho(section, fg="cyan")
click.echo("-" * len(section))
click.echo(
@ -66,15 +65,55 @@ def project_config(project_dir, json_output):
tablefmt="plain",
)
)
click.echo()
return None
@cli.command("data", short_help="Dump data intended for IDE extensions/plugins")
@click.option(
"-d",
"--project-dir",
default=os.getcwd,
type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True),
)
@click.option("-e", "--environment", multiple=True)
@click.option("--json-output", is_flag=True)
def project_data(project_dir, environment, json_output):
if not is_platformio_project(project_dir):
raise NotPlatformIOProjectError(project_dir)
with fs.cd(project_dir):
config = ProjectConfig.get_instance()
config.validate(environment)
environment = list(environment or config.envs())
if json_output:
return click.echo(json.dumps(load_project_ide_data(project_dir, environment)))
for envname in environment:
click.echo("Environment: " + click.style(envname, fg="cyan", bold=True))
click.echo("=" * (13 + len(envname)))
click.echo(
tabulate(
[
(click.style(name, bold=True), "=", json.dumps(value, indent=2))
for name, value in load_project_ide_data(
project_dir, envname
).items()
],
tablefmt="plain",
)
)
click.echo()
return None
def validate_boards(ctx, param, value): # pylint: disable=W0613
pm = PlatformManager()
pm = PlatformPackageManager()
for id_ in value:
try:
pm.board_config(id_)
except exception.UnknownBoard:
except UnknownBoard:
raise click.BadParameter(
"`%s`. Please search for board ID using `platformio boards` "
"command" % id_
@ -93,6 +132,7 @@ def validate_boards(ctx, param, value): # pylint: disable=W0613
)
@click.option("-b", "--board", multiple=True, metavar="ID", callback=validate_boards)
@click.option("--ide", type=click.Choice(ProjectGenerator.get_supported_ides()))
@click.option("-e", "--environment", help="Update using existing environment")
@click.option("-O", "--project-option", multiple=True)
@click.option("--env-prefix", default="")
@click.option("-s", "--silent", is_flag=True)
@ -102,6 +142,7 @@ def project_init(
project_dir,
board,
ide,
environment,
project_option,
env_prefix,
silent,
@ -133,17 +174,25 @@ def project_init(
if is_new_project:
init_base_project(project_dir)
if board:
fill_project_envs(
if environment:
update_project_env(project_dir, environment, project_option)
elif board:
update_board_envs(
ctx, project_dir, board, project_option, env_prefix, ide is not None
)
if ide:
pg = ProjectGenerator(project_dir, ide, board)
with fs.cd(project_dir):
config = ProjectConfig.get_instance(
os.path.join(project_dir, "platformio.ini")
)
config.validate()
pg = ProjectGenerator(
config, environment or get_best_envname(config, board), ide
)
pg.generate()
if is_new_project:
init_ci_conf(project_dir)
init_cvs_ignore(project_dir)
if silent:
@ -233,7 +282,6 @@ https://gcc.gnu.org/onlinedocs/cpp/Header-Files.html
def init_lib_readme(lib_dir):
# pylint: disable=line-too-long
with open(os.path.join(lib_dir, "README"), "w") as fp:
fp.write(
"""
@ -290,7 +338,7 @@ def init_test_readme(test_dir):
with open(os.path.join(test_dir, "README"), "w") as fp:
fp.write(
"""
This directory is intended for PIO Unit Testing and project tests.
This directory is intended for PlatformIO Unit Testing and project tests.
Unit Testing is a software testing method by which individual units of
source code, sets of one or more MCU program modules together with associated
@ -298,89 +346,12 @@ control data, usage procedures, and operating procedures, are tested to
determine whether they are fit for use. Unit testing finds problems early
in the development cycle.
More information about PIO Unit Testing:
More information about PlatformIO Unit Testing:
- https://docs.platformio.org/page/plus/unit-testing.html
""",
)
def init_ci_conf(project_dir):
conf_path = os.path.join(project_dir, ".travis.yml")
if os.path.isfile(conf_path):
return
with open(conf_path, "w") as fp:
fp.write(
"""# Continuous Integration (CI) is the practice, in software
# engineering, of merging all developer working copies with a shared mainline
# several times a day < https://docs.platformio.org/page/ci/index.html >
#
# Documentation:
#
# * Travis CI Embedded Builds with PlatformIO
# < https://docs.travis-ci.com/user/integration/platformio/ >
#
# * PlatformIO integration with Travis CI
# < https://docs.platformio.org/page/ci/travis.html >
#
# * User Guide for `platformio ci` command
# < https://docs.platformio.org/page/userguide/cmd_ci.html >
#
#
# Please choose one of the following templates (proposed below) and uncomment
# it (remove "# " before each line) or use own configuration according to the
# Travis CI documentation (see above).
#
#
# Template #1: General project. Test it using existing `platformio.ini`.
#
# language: python
# python:
# - "2.7"
#
# sudo: false
# cache:
# directories:
# - "~/.platformio"
#
# install:
# - pip install -U platformio
# - platformio update
#
# script:
# - platformio run
#
# Template #2: The project is intended to be used as a library with examples.
#
# language: python
# python:
# - "2.7"
#
# sudo: false
# cache:
# directories:
# - "~/.platformio"
#
# env:
# - PLATFORMIO_CI_SRC=path/to/test/file.c
# - PLATFORMIO_CI_SRC=examples/file.ino
# - PLATFORMIO_CI_SRC=path/to/test/directory
#
# install:
# - pip install -U platformio
# - platformio update
#
# script:
# - platformio ci --lib="." --board=ID_1 --board=ID_2 --board=ID_N
""",
)
def init_cvs_ignore(project_dir):
conf_path = os.path.join(project_dir, ".gitignore")
if os.path.isfile(conf_path):
@ -389,7 +360,7 @@ def init_cvs_ignore(project_dir):
fp.write(".pio\n")
def fill_project_envs(
def update_board_envs(
ctx, project_dir, board_ids, project_option, env_prefix, force_download
):
config = ProjectConfig(
@ -401,7 +372,7 @@ def fill_project_envs(
if all(cond):
used_boards.append(config.get(section, "board"))
pm = PlatformManager()
pm = PlatformPackageManager()
used_platforms = []
modified = False
for id_ in board_ids:
@ -438,9 +409,51 @@ def fill_project_envs(
def _install_dependent_platforms(ctx, platforms):
installed_platforms = [p["name"] for p in PlatformManager().get_installed()]
installed_platforms = [
pkg.metadata.name for pkg in PlatformPackageManager().get_installed()
]
if set(platforms) <= set(installed_platforms):
return
ctx.invoke(
cli_platform_install, platforms=list(set(platforms) - set(installed_platforms))
)
def update_project_env(project_dir, environment, project_option):
if not project_option:
return
config = ProjectConfig(
os.path.join(project_dir, "platformio.ini"), parse_extra=False
)
section = "env:%s" % environment
if not config.has_section(section):
config.add_section(section)
for item in project_option:
if "=" not in item:
continue
_name, _value = item.split("=", 1)
config.set(section, _name.strip(), _value.strip())
config.save()
def get_best_envname(config, board_ids=None):
envname = None
default_envs = config.default_envs()
if default_envs:
envname = default_envs[0]
if not board_ids:
return envname
for env in config.envs():
if not board_ids:
return env
if not envname:
envname = env
items = config.items(env=env, as_dict=True)
if "board" in items and items.get("board") in board_ids:
return env
return envname

View File

@ -0,0 +1,13 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@ -0,0 +1,13 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@ -0,0 +1,91 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.internet import defer # pylint: disable=import-error
from twisted.spread import pb # pylint: disable=import-error
class AsyncCommandBase(object):
MAX_BUFFER_SIZE = 1024 * 1024 # 1Mb
def __init__(self, options=None, on_end_callback=None):
self.options = options or {}
self.on_end_callback = on_end_callback
self._buffer = b""
self._return_code = None
self._d = None
self._paused = False
try:
self.start()
except Exception as e:
raise pb.Error(str(e))
@property
def id(self):
return id(self)
def pause(self):
self._paused = True
self.stop()
def unpause(self):
self._paused = False
self.start()
def start(self):
raise NotImplementedError
def stop(self):
self.transport.loseConnection() # pylint: disable=no-member
def _ac_ended(self):
if self.on_end_callback:
self.on_end_callback()
if not self._d or self._d.called:
self._d = None
return
if self._buffer:
self._d.callback(self._buffer)
else:
self._d.callback(None)
def _ac_ondata(self, data):
self._buffer += data
if len(self._buffer) > self.MAX_BUFFER_SIZE:
self._buffer = self._buffer[-1 * self.MAX_BUFFER_SIZE :]
if self._paused:
return
if self._d and not self._d.called:
self._d.callback(self._buffer)
self._buffer = b""
def ac_read(self):
if self._buffer:
result = self._buffer
self._buffer = b""
return result
if self._return_code is None:
self._d = defer.Deferred()
return self._d
return None
def ac_write(self, data):
self.transport.write(data) # pylint: disable=no-member
return len(data)
def ac_close(self):
self.stop()
return self._return_code

View File

@ -0,0 +1,42 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from twisted.internet import protocol, reactor # pylint: disable=import-error
from platformio.commands.remote.ac.base import AsyncCommandBase
class ProcessAsyncCmd(protocol.ProcessProtocol, AsyncCommandBase):
def start(self):
env = dict(os.environ).copy()
env.update({"PLATFORMIO_FORCE_ANSI": "true"})
reactor.spawnProcess(
self, self.options["executable"], self.options["args"], env
)
def outReceived(self, data):
self._ac_ondata(data)
def errReceived(self, data):
self._ac_ondata(data)
def processExited(self, reason):
self._return_code = reason.value.exitCode
def processEnded(self, reason):
if self._return_code is None:
self._return_code = reason.value.exitCode
self._ac_ended()

View File

@ -0,0 +1,66 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import zlib
from io import BytesIO
from platformio.commands.remote.ac.base import AsyncCommandBase
from platformio.commands.remote.projectsync import PROJECT_SYNC_STAGE, ProjectSync
class ProjectSyncAsyncCmd(AsyncCommandBase):
def __init__(self, *args, **kwargs):
self.psync = None
self._upstream = None
super(ProjectSyncAsyncCmd, self).__init__(*args, **kwargs)
def start(self):
project_dir = os.path.join(
self.options["agent_working_dir"], "projects", self.options["id"]
)
self.psync = ProjectSync(project_dir)
for name in self.options["items"]:
self.psync.add_item(os.path.join(project_dir, name), name)
def stop(self):
self.psync = None
self._upstream = None
self._return_code = PROJECT_SYNC_STAGE.COMPLETED.value
def ac_write(self, data):
stage = PROJECT_SYNC_STAGE.lookupByValue(data.get("stage"))
if stage is PROJECT_SYNC_STAGE.DBINDEX:
self.psync.rebuild_dbindex()
return zlib.compress(json.dumps(self.psync.get_dbindex()).encode())
if stage is PROJECT_SYNC_STAGE.DELETE:
return self.psync.delete_dbindex(
json.loads(zlib.decompress(data["dbindex"]))
)
if stage is PROJECT_SYNC_STAGE.UPLOAD:
if not self._upstream:
self._upstream = BytesIO()
self._upstream.write(data["chunk"])
if self._upstream.tell() == data["total"]:
self.psync.decompress_items(self._upstream)
self._upstream = None
return PROJECT_SYNC_STAGE.EXTRACTED.value
return PROJECT_SYNC_STAGE.UPLOAD.value
return None

View File

@ -0,0 +1,60 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from time import sleep
from twisted.internet import protocol, reactor # pylint: disable=import-error
from twisted.internet.serialport import SerialPort # pylint: disable=import-error
from platformio.commands.remote.ac.base import AsyncCommandBase
class SerialPortAsyncCmd(protocol.Protocol, AsyncCommandBase):
def start(self):
SerialPort(
self,
reactor=reactor,
**{
"deviceNameOrPortNumber": self.options["port"],
"baudrate": self.options["baud"],
"parity": self.options["parity"],
"rtscts": 1 if self.options["rtscts"] else 0,
"xonxoff": 1 if self.options["xonxoff"] else 0,
}
)
def connectionMade(self):
self.reset_device()
if self.options.get("rts", None) is not None:
self.transport.setRTS(self.options.get("rts"))
if self.options.get("dtr", None) is not None:
self.transport.setDTR(self.options.get("dtr"))
def reset_device(self):
self.transport.flushInput()
self.transport.setDTR(False)
self.transport.setRTS(False)
sleep(0.1)
self.transport.setDTR(True)
self.transport.setRTS(True)
sleep(0.1)
def dataReceived(self, data):
self._ac_ondata(data)
def connectionLost(self, reason): # pylint: disable=unused-argument
if self._paused:
return
self._return_code = 0
self._ac_ended()

View File

@ -0,0 +1,13 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@ -0,0 +1,38 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
import click
from platformio.commands.remote.client.base import RemoteClientBase
class AgentListClient(RemoteClientBase):
def agent_pool_ready(self):
d = self.agentpool.callRemote("list", True)
d.addCallback(self._cbResult)
d.addErrback(self.cb_global_error)
def _cbResult(self, result):
for item in result:
click.secho(item["name"], fg="cyan")
click.echo("-" * len(item["name"]))
click.echo("ID: %s" % item["id"])
click.echo(
"Started: %s"
% datetime.fromtimestamp(item["started"]).strftime("%Y-%m-%d %H:%M:%S")
)
click.echo("")
self.disconnect()

View File

@ -0,0 +1,222 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from os.path import getatime, getmtime, isdir, isfile, join
from twisted.logger import LogLevel # pylint: disable=import-error
from twisted.spread import pb # pylint: disable=import-error
from platformio import proc, util
from platformio.commands.remote.ac.process import ProcessAsyncCmd
from platformio.commands.remote.ac.psync import ProjectSyncAsyncCmd
from platformio.commands.remote.ac.serial import SerialPortAsyncCmd
from platformio.commands.remote.client.base import RemoteClientBase
from platformio.project.config import ProjectConfig
from platformio.project.exception import NotPlatformIOProjectError
from platformio.project.helpers import get_project_core_dir
class RemoteAgentService(RemoteClientBase):
def __init__(self, name, share, working_dir=None):
RemoteClientBase.__init__(self)
self.log_level = LogLevel.info
self.working_dir = working_dir or join(get_project_core_dir(), "remote")
if not isdir(self.working_dir):
os.makedirs(self.working_dir)
if name:
self.name = str(name)[:50]
self.join_options.update(
{"agent": True, "share": [s.lower().strip()[:50] for s in share]}
)
self._acs = {}
def agent_pool_ready(self):
pass
def cb_disconnected(self, reason):
for ac in self._acs.values():
ac.ac_close()
RemoteClientBase.cb_disconnected(self, reason)
def remote_acread(self, ac_id):
self.log.debug("Async Read: {id}", id=ac_id)
if ac_id not in self._acs:
raise pb.Error("Invalid Async Identifier")
return self._acs[ac_id].ac_read()
def remote_acwrite(self, ac_id, data):
self.log.debug("Async Write: {id}", id=ac_id)
if ac_id not in self._acs:
raise pb.Error("Invalid Async Identifier")
return self._acs[ac_id].ac_write(data)
def remote_acclose(self, ac_id):
self.log.debug("Async Close: {id}", id=ac_id)
if ac_id not in self._acs:
raise pb.Error("Invalid Async Identifier")
return_code = self._acs[ac_id].ac_close()
del self._acs[ac_id]
return return_code
def remote_cmd(self, cmd, options):
self.log.info("Remote command received: {cmd}", cmd=cmd)
self.log.debug("Command options: {options!r}", options=options)
callback = "_process_cmd_%s" % cmd.replace(".", "_")
return getattr(self, callback)(options)
def _defer_async_cmd(self, ac, pass_agent_name=True):
self._acs[ac.id] = ac
if pass_agent_name:
return (self.id, ac.id, self.name)
return (self.id, ac.id)
def _process_cmd_device_list(self, _):
return (self.name, util.get_serialports())
def _process_cmd_device_monitor(self, options):
if not options["port"]:
for item in util.get_serialports():
if "VID:PID" in item["hwid"]:
options["port"] = item["port"]
break
# terminate opened monitors
if options["port"]:
for ac in list(self._acs.values()):
if (
isinstance(ac, SerialPortAsyncCmd)
and ac.options["port"] == options["port"]
):
self.log.info(
"Terminate previously opened monitor at {port}",
port=options["port"],
)
ac.ac_close()
del self._acs[ac.id]
if not options["port"]:
raise pb.Error("Please specify serial port using `--port` option")
self.log.info("Starting serial monitor at {port}", port=options["port"])
return self._defer_async_cmd(SerialPortAsyncCmd(options), pass_agent_name=False)
def _process_cmd_psync(self, options):
for ac in list(self._acs.values()):
if (
isinstance(ac, ProjectSyncAsyncCmd)
and ac.options["id"] == options["id"]
):
self.log.info("Terminate previous Project Sync process")
ac.ac_close()
del self._acs[ac.id]
options["agent_working_dir"] = self.working_dir
return self._defer_async_cmd(
ProjectSyncAsyncCmd(options), pass_agent_name=False
)
def _process_cmd_run(self, options):
return self._process_cmd_run_or_test("run", options)
def _process_cmd_test(self, options):
return self._process_cmd_run_or_test("test", options)
def _process_cmd_run_or_test( # pylint: disable=too-many-locals,too-many-branches
self, command, options
):
assert options and "project_id" in options
project_dir = join(self.working_dir, "projects", options["project_id"])
origin_pio_ini = join(project_dir, "platformio.ini")
back_pio_ini = join(project_dir, "platformio.ini.bak")
# remove insecure project options
try:
conf = ProjectConfig(origin_pio_ini)
if isfile(back_pio_ini):
os.remove(back_pio_ini)
os.rename(origin_pio_ini, back_pio_ini)
# cleanup
if conf.has_section("platformio"):
for opt in conf.options("platformio"):
if opt.endswith("_dir"):
conf.remove_option("platformio", opt)
else:
conf.add_section("platformio")
conf.set("platformio", "build_dir", ".pio/build")
conf.save(origin_pio_ini)
# restore A/M times
os.utime(origin_pio_ini, (getatime(back_pio_ini), getmtime(back_pio_ini)))
except NotPlatformIOProjectError as e:
raise pb.Error(str(e))
cmd_args = ["platformio", "--force", command, "-d", project_dir]
for env in options.get("environment", []):
cmd_args.extend(["-e", env])
for target in options.get("target", []):
cmd_args.extend(["-t", target])
for ignore in options.get("ignore", []):
cmd_args.extend(["-i", ignore])
if options.get("upload_port", False):
cmd_args.extend(["--upload-port", options.get("upload_port")])
if options.get("test_port", False):
cmd_args.extend(["--test-port", options.get("test_port")])
if options.get("disable_auto_clean", False):
cmd_args.append("--disable-auto-clean")
if options.get("without_building", False):
cmd_args.append("--without-building")
if options.get("without_uploading", False):
cmd_args.append("--without-uploading")
if options.get("silent", False):
cmd_args.append("-s")
if options.get("verbose", False):
cmd_args.append("-v")
paused_acs = []
for ac in self._acs.values():
if not isinstance(ac, SerialPortAsyncCmd):
continue
self.log.info("Pause active monitor at {port}", port=ac.options["port"])
ac.pause()
paused_acs.append(ac)
def _cb_on_end():
if isfile(back_pio_ini):
if isfile(origin_pio_ini):
os.remove(origin_pio_ini)
os.rename(back_pio_ini, origin_pio_ini)
for ac in paused_acs:
ac.unpause()
self.log.info(
"Unpause active monitor at {port}", port=ac.options["port"]
)
return self._defer_async_cmd(
ProcessAsyncCmd(
{"executable": proc.where_is_program("platformio"), "args": cmd_args},
on_end_callback=_cb_on_end,
)
)
def _process_cmd_update(self, options):
cmd_args = ["platformio", "--force", "update"]
if options.get("only_check"):
cmd_args.append("--only-check")
return self._defer_async_cmd(
ProcessAsyncCmd(
{"executable": proc.where_is_program("platformio"), "args": cmd_args}
)
)

View File

@ -0,0 +1,65 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import click
from twisted.spread import pb # pylint: disable=import-error
from platformio.commands.remote.client.base import RemoteClientBase
class AsyncClientBase(RemoteClientBase):
def __init__(self, command, agents, options):
RemoteClientBase.__init__(self)
self.command = command
self.agents = agents
self.options = options
self._acs_total = 0
self._acs_ended = 0
def agent_pool_ready(self):
pass
def cb_async_result(self, result):
if self._acs_total == 0:
self._acs_total = len(result)
for (success, value) in result:
if not success:
raise pb.Error(value)
self.acread_data(*value)
def acread_data(self, agent_id, ac_id, agent_name=None):
d = self.agentpool.callRemote("acread", agent_id, ac_id)
d.addCallback(self.cb_acread_result, agent_id, ac_id, agent_name)
d.addErrback(self.cb_global_error)
def cb_acread_result(self, result, agent_id, ac_id, agent_name):
if result is None:
self.acclose(agent_id, ac_id)
else:
if self._acs_total > 1 and agent_name:
click.echo("[%s] " % agent_name, nl=False)
click.echo(result, nl=False)
self.acread_data(agent_id, ac_id, agent_name)
def acclose(self, agent_id, ac_id):
d = self.agentpool.callRemote("acclose", agent_id, ac_id)
d.addCallback(self.cb_acclose_result)
d.addErrback(self.cb_global_error)
def cb_acclose_result(self, exit_code):
self._acs_ended += 1
if self._acs_ended != self._acs_total:
return
self.disconnect(exit_code)

View File

@ -0,0 +1,193 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from time import time
import click
from twisted.internet import defer, endpoints, reactor # pylint: disable=import-error
from twisted.logger import ILogObserver # pylint: disable=import-error
from twisted.logger import Logger # pylint: disable=import-error
from twisted.logger import LogLevel # pylint: disable=import-error
from twisted.logger import formatEvent # pylint: disable=import-error
from twisted.python import failure # pylint: disable=import-error
from twisted.spread import pb # pylint: disable=import-error
from zope.interface import provider # pylint: disable=import-error
from platformio import __pioremote_endpoint__, __version__, app, exception, maintenance
from platformio.commands.remote.factory.client import RemoteClientFactory
from platformio.commands.remote.factory.ssl import SSLContextFactory
class RemoteClientBase( # pylint: disable=too-many-instance-attributes
pb.Referenceable
):
PING_DELAY = 60
PING_MAX_FAILURES = 3
DEBUG = False
def __init__(self):
self.log_level = LogLevel.warn
self.log = Logger(namespace="remote", observer=self._log_observer)
self.id = app.get_host_id()
self.name = app.get_host_name()
self.join_options = {"corever": __version__}
self.perspective = None
self.agentpool = None
self._ping_id = 0
self._ping_caller = None
self._ping_counter = 0
self._reactor_stopped = False
self._exit_code = 0
@provider(ILogObserver)
def _log_observer(self, event):
if not self.DEBUG and (
event["log_namespace"] != self.log.namespace
or self.log_level > event["log_level"]
):
return
msg = formatEvent(event)
click.echo(
"%s [%s] %s"
% (
datetime.fromtimestamp(event["log_time"]).strftime("%Y-%m-%d %H:%M:%S"),
event["log_level"].name,
msg,
)
)
def connect(self):
self.log.info("Name: {name}", name=self.name)
self.log.info("Connecting to PlatformIO Remote Development Cloud")
# pylint: disable=protected-access
proto, options = endpoints._parse(__pioremote_endpoint__)
proto = proto[0]
factory = RemoteClientFactory()
factory.remote_client = self
factory.sslContextFactory = None
if proto == "ssl":
factory.sslContextFactory = SSLContextFactory(options["host"])
reactor.connectSSL(
options["host"],
int(options["port"]),
factory,
factory.sslContextFactory,
)
elif proto == "tcp":
reactor.connectTCP(options["host"], int(options["port"]), factory)
else:
raise exception.PlatformioException("Unknown PIO Remote Cloud protocol")
reactor.run()
if self._exit_code != 0:
raise exception.ReturnErrorCode(self._exit_code)
def cb_client_authorization_failed(self, err):
msg = "Bad account credentials"
if err.check(pb.Error):
msg = err.getErrorMessage()
self.log.error(msg)
self.disconnect(exit_code=1)
def cb_client_authorization_made(self, perspective):
self.log.info("Successfully authorized")
self.perspective = perspective
d = perspective.callRemote("join", self.id, self.name, self.join_options)
d.addCallback(self._cb_client_join_made)
d.addErrback(self.cb_global_error)
def _cb_client_join_made(self, result):
code = result[0]
if code == 1:
self.agentpool = result[1]
self.agent_pool_ready()
self.restart_ping()
elif code == 2:
self.remote_service(*result[1:])
def remote_service(self, command, options):
if command == "disconnect":
self.log.error(
"PIO Remote Cloud disconnected: {msg}", msg=options.get("message")
)
self.disconnect()
def restart_ping(self, reset_counter=True):
# stop previous ping callers
self.stop_ping(reset_counter)
self._ping_caller = reactor.callLater(self.PING_DELAY, self._do_ping)
def _do_ping(self):
self._ping_counter += 1
self._ping_id = int(time())
d = self.perspective.callRemote("service", "ping", {"id": self._ping_id})
d.addCallback(self._cb_pong)
d.addErrback(self._cb_pong)
def stop_ping(self, reset_counter=True):
if reset_counter:
self._ping_counter = 0
if not self._ping_caller or not self._ping_caller.active():
return
self._ping_caller.cancel()
self._ping_caller = None
def _cb_pong(self, result):
if not isinstance(result, failure.Failure) and self._ping_id == result:
self.restart_ping()
return
if self._ping_counter >= self.PING_MAX_FAILURES:
self.stop_ping()
self.perspective.broker.transport.loseConnection()
else:
self.restart_ping(reset_counter=False)
def agent_pool_ready(self):
raise NotImplementedError
def disconnect(self, exit_code=None):
self.stop_ping()
if exit_code is not None:
self._exit_code = exit_code
if reactor.running and not self._reactor_stopped:
self._reactor_stopped = True
reactor.stop()
def cb_disconnected(self, _):
self.stop_ping()
self.perspective = None
self.agentpool = None
def cb_global_error(self, err):
if err.check(pb.PBConnectionLost, defer.CancelledError):
return
msg = err.getErrorMessage()
if err.check(pb.DeadReferenceError):
msg = "Remote Client has been terminated"
elif "PioAgentNotStartedError" in str(err.type):
msg = (
"Could not find active agents. Please start it before on "
"a remote machine using `pio remote agent start` command.\n"
"See http://docs.platformio.org/page/plus/pio-remote.html"
)
else:
maintenance.on_platformio_exception(Exception(err.type))
click.secho(msg, fg="red", err=True)
self.disconnect(exit_code=1)

View File

@ -0,0 +1,54 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import click
from platformio.commands.remote.client.base import RemoteClientBase
class DeviceListClient(RemoteClientBase):
def __init__(self, agents, json_output):
RemoteClientBase.__init__(self)
self.agents = agents
self.json_output = json_output
def agent_pool_ready(self):
d = self.agentpool.callRemote("cmd", self.agents, "device.list")
d.addCallback(self._cbResult)
d.addErrback(self.cb_global_error)
def _cbResult(self, result):
data = {}
for (success, value) in result:
if not success:
click.secho(value, fg="red", err=True)
continue
(agent_name, devlist) = value
data[agent_name] = devlist
if self.json_output:
click.echo(json.dumps(data))
else:
for agent_name, devlist in data.items():
click.echo("Agent %s" % click.style(agent_name, fg="cyan", bold=True))
click.echo("=" * (6 + len(agent_name)))
for item in devlist:
click.secho(item["port"], fg="cyan")
click.echo("-" * len(item["port"]))
click.echo("Hardware ID: %s" % item["hwid"])
click.echo("Description: %s" % item["description"])
click.echo("")
self.disconnect()

View File

@ -0,0 +1,236 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from fnmatch import fnmatch
import click
from twisted.internet import protocol, reactor, task # pylint: disable=import-error
from twisted.spread import pb # pylint: disable=import-error
from platformio.commands.remote.client.base import RemoteClientBase
class SMBridgeProtocol(protocol.Protocol): # pylint: disable=no-init
def connectionMade(self):
self.factory.add_client(self)
def connectionLost(self, reason): # pylint: disable=unused-argument
self.factory.remove_client(self)
def dataReceived(self, data):
self.factory.send_to_server(data)
class SMBridgeFactory(protocol.ServerFactory):
def __init__(self, cdm):
self.cdm = cdm
self._clients = []
def buildProtocol(self, addr): # pylint: disable=unused-argument
p = SMBridgeProtocol()
p.factory = self # pylint: disable=attribute-defined-outside-init
return p
def add_client(self, client):
self.cdm.log.debug("SMBridge: Client connected")
self._clients.append(client)
self.cdm.acread_data()
def remove_client(self, client):
self.cdm.log.debug("SMBridge: Client disconnected")
self._clients.remove(client)
if not self._clients:
self.cdm.client_terminal_stopped()
def has_clients(self):
return len(self._clients)
def send_to_clients(self, data):
if not self._clients:
return None
for client in self._clients:
client.transport.write(data)
return len(data)
def send_to_server(self, data):
self.cdm.acwrite_data(data)
class DeviceMonitorClient( # pylint: disable=too-many-instance-attributes
RemoteClientBase
):
MAX_BUFFER_SIZE = 1024 * 1024
def __init__(self, agents, **kwargs):
RemoteClientBase.__init__(self)
self.agents = agents
self.cmd_options = kwargs
self._bridge_factory = SMBridgeFactory(self)
self._agent_id = None
self._ac_id = None
self._d_acread = None
self._d_acwrite = None
self._acwrite_buffer = ""
def agent_pool_ready(self):
d = task.deferLater(
reactor, 1, self.agentpool.callRemote, "cmd", self.agents, "device.list"
)
d.addCallback(self._cb_device_list)
d.addErrback(self.cb_global_error)
def _cb_device_list(self, result):
devices = []
hwid_devindexes = []
for (success, value) in result:
if not success:
click.secho(value, fg="red", err=True)
continue
(agent_name, ports) = value
for item in ports:
if "VID:PID" in item["hwid"]:
hwid_devindexes.append(len(devices))
devices.append((agent_name, item))
if len(result) == 1 and self.cmd_options["port"]:
if set(["*", "?", "[", "]"]) & set(self.cmd_options["port"]):
for agent, item in devices:
if fnmatch(item["port"], self.cmd_options["port"]):
return self.start_remote_monitor(agent, item["port"])
return self.start_remote_monitor(result[0][1][0], self.cmd_options["port"])
device = None
if len(hwid_devindexes) == 1:
device = devices[hwid_devindexes[0]]
else:
click.echo("Available ports:")
for i, device in enumerate(devices):
click.echo(
"{index}. {host}{port} \t{description}".format(
index=i + 1,
host=device[0] + ":" if len(result) > 1 else "",
port=device[1]["port"],
description=device[1]["description"]
if device[1]["description"] != "n/a"
else "",
)
)
device_index = click.prompt(
"Please choose a port (number in the list above)",
type=click.Choice([str(i + 1) for i, _ in enumerate(devices)]),
)
device = devices[int(device_index) - 1]
self.start_remote_monitor(device[0], device[1]["port"])
return None
def start_remote_monitor(self, agent, port):
options = {"port": port}
for key in ("baud", "parity", "rtscts", "xonxoff", "rts", "dtr"):
options[key] = self.cmd_options[key]
click.echo(
"Starting Serial Monitor on {host}:{port}".format(
host=agent, port=options["port"]
)
)
d = self.agentpool.callRemote("cmd", [agent], "device.monitor", options)
d.addCallback(self.cb_async_result)
d.addErrback(self.cb_global_error)
def cb_async_result(self, result):
if len(result) != 1:
raise pb.Error("Invalid response from Remote Cloud")
success, value = result[0]
if not success:
raise pb.Error(value)
reconnected = self._agent_id is not None
self._agent_id, self._ac_id = value
if reconnected:
self.acread_data(force=True)
self.acwrite_data("", force=True)
return
# start bridge
port = reactor.listenTCP(0, self._bridge_factory)
address = port.getHost()
self.log.debug("Serial Bridge is started on {address!r}", address=address)
if "sock" in self.cmd_options:
with open(os.path.join(self.cmd_options["sock"], "sock"), "w") as fp:
fp.write("socket://localhost:%d" % address.port)
def client_terminal_stopped(self):
try:
d = self.agentpool.callRemote("acclose", self._agent_id, self._ac_id)
d.addCallback(lambda r: self.disconnect())
d.addErrback(self.cb_global_error)
except (AttributeError, pb.DeadReferenceError):
self.disconnect(exit_code=1)
def acread_data(self, force=False):
if force and self._d_acread:
self._d_acread.cancel()
self._d_acread = None
if (
self._d_acread and not self._d_acread.called
) or not self._bridge_factory.has_clients():
return
try:
self._d_acread = self.agentpool.callRemote(
"acread", self._agent_id, self._ac_id
)
self._d_acread.addCallback(self.cb_acread_result)
self._d_acread.addErrback(self.cb_global_error)
except (AttributeError, pb.DeadReferenceError):
self.disconnect(exit_code=1)
def cb_acread_result(self, result):
if result is None:
self.disconnect(exit_code=1)
else:
self._bridge_factory.send_to_clients(result)
self.acread_data()
def acwrite_data(self, data, force=False):
if force and self._d_acwrite:
self._d_acwrite.cancel()
self._d_acwrite = None
self._acwrite_buffer += data
if len(self._acwrite_buffer) > self.MAX_BUFFER_SIZE:
self._acwrite_buffer = self._acwrite_buffer[-1 * self.MAX_BUFFER_SIZE :]
if (self._d_acwrite and not self._d_acwrite.called) or not self._acwrite_buffer:
return
data = self._acwrite_buffer
self._acwrite_buffer = ""
try:
d = self.agentpool.callRemote("acwrite", self._agent_id, self._ac_id, data)
d.addCallback(self.cb_acwrite_result)
d.addErrback(self.cb_global_error)
except (AttributeError, pb.DeadReferenceError):
self.disconnect(exit_code=1)
def cb_acwrite_result(self, result):
assert result > 0
if self._acwrite_buffer:
self.acwrite_data("")

View File

@ -0,0 +1,272 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import hashlib
import json
import os
import zlib
from io import BytesIO
from twisted.spread import pb # pylint: disable=import-error
from platformio import fs
from platformio.commands.remote.client.async_base import AsyncClientBase
from platformio.commands.remote.projectsync import PROJECT_SYNC_STAGE, ProjectSync
from platformio.compat import hashlib_encode_data
from platformio.project.config import ProjectConfig
class RunOrTestClient(AsyncClientBase):
MAX_ARCHIVE_SIZE = 50 * 1024 * 1024 # 50Mb
UPLOAD_CHUNK_SIZE = 256 * 1024 # 256Kb
PSYNC_SRC_EXTS = [
"c",
"cpp",
"S",
"spp",
"SPP",
"sx",
"s",
"asm",
"ASM",
"h",
"hpp",
"ipp",
"ino",
"pde",
"json",
"properties",
]
PSYNC_SKIP_DIRS = (".git", ".svn", ".hg", "example", "examples", "test", "tests")
def __init__(self, *args, **kwargs):
AsyncClientBase.__init__(self, *args, **kwargs)
self.project_id = self.generate_project_id(self.options["project_dir"])
self.psync = ProjectSync(self.options["project_dir"])
def generate_project_id(self, path):
h = hashlib.sha1(hashlib_encode_data(self.id))
h.update(hashlib_encode_data(path))
return "%s-%s" % (os.path.basename(path), h.hexdigest())
def add_project_items(self, psync):
with fs.cd(self.options["project_dir"]):
cfg = ProjectConfig.get_instance(
os.path.join(self.options["project_dir"], "platformio.ini")
)
psync.add_item(cfg.path, "platformio.ini")
psync.add_item(cfg.get_optional_dir("shared"), "shared")
psync.add_item(cfg.get_optional_dir("boards"), "boards")
if self.options["force_remote"]:
self._add_project_source_items(cfg, psync)
else:
self._add_project_binary_items(cfg, psync)
if self.command == "test":
psync.add_item(cfg.get_optional_dir("test"), "test")
def _add_project_source_items(self, cfg, psync):
psync.add_item(cfg.get_optional_dir("lib"), "lib")
psync.add_item(
cfg.get_optional_dir("include"),
"include",
cb_filter=self._cb_tarfile_filter,
)
psync.add_item(
cfg.get_optional_dir("src"), "src", cb_filter=self._cb_tarfile_filter
)
if set(["buildfs", "uploadfs", "uploadfsota"]) & set(
self.options.get("target", [])
):
psync.add_item(cfg.get_optional_dir("data"), "data")
@staticmethod
def _add_project_binary_items(cfg, psync):
build_dir = cfg.get_optional_dir("build")
for env_name in os.listdir(build_dir):
env_dir = os.path.join(build_dir, env_name)
if not os.path.isdir(env_dir):
continue
for fname in os.listdir(env_dir):
bin_file = os.path.join(env_dir, fname)
bin_exts = (".elf", ".bin", ".hex", ".eep", "program")
if os.path.isfile(bin_file) and fname.endswith(bin_exts):
psync.add_item(
bin_file, os.path.join(".pio", "build", env_name, fname)
)
def _cb_tarfile_filter(self, path):
if (
os.path.isdir(path)
and os.path.basename(path).lower() in self.PSYNC_SKIP_DIRS
):
return None
if os.path.isfile(path) and not self.is_file_with_exts(
path, self.PSYNC_SRC_EXTS
):
return None
return path
@staticmethod
def is_file_with_exts(path, exts):
if path.endswith(tuple(".%s" % e for e in exts)):
return True
return False
def agent_pool_ready(self):
self.psync_init()
def psync_init(self):
self.add_project_items(self.psync)
d = self.agentpool.callRemote(
"cmd",
self.agents,
"psync",
dict(id=self.project_id, items=[i[1] for i in self.psync.get_items()]),
)
d.addCallback(self.cb_psync_init_result)
d.addErrback(self.cb_global_error)
# build db index while wait for result from agent
self.psync.rebuild_dbindex()
def cb_psync_init_result(self, result):
self._acs_total = len(result)
for (success, value) in result:
if not success:
raise pb.Error(value)
agent_id, ac_id = value
try:
d = self.agentpool.callRemote(
"acwrite",
agent_id,
ac_id,
dict(stage=PROJECT_SYNC_STAGE.DBINDEX.value),
)
d.addCallback(self.cb_psync_dbindex_result, agent_id, ac_id)
d.addErrback(self.cb_global_error)
except (AttributeError, pb.DeadReferenceError):
self.disconnect(exit_code=1)
def cb_psync_dbindex_result(self, result, agent_id, ac_id):
result = set(json.loads(zlib.decompress(result)))
dbindex = set(self.psync.get_dbindex())
delete = list(result - dbindex)
delta = list(dbindex - result)
self.log.debug(
"PSync: stats, total={total}, delete={delete}, delta={delta}",
total=len(dbindex),
delete=len(delete),
delta=len(delta),
)
if not delete and not delta:
return self.psync_finalize(agent_id, ac_id)
if not delete:
return self.psync_upload(agent_id, ac_id, delta)
try:
d = self.agentpool.callRemote(
"acwrite",
agent_id,
ac_id,
dict(
stage=PROJECT_SYNC_STAGE.DELETE.value,
dbindex=zlib.compress(json.dumps(delete).encode()),
),
)
d.addCallback(self.cb_psync_delete_result, agent_id, ac_id, delta)
d.addErrback(self.cb_global_error)
except (AttributeError, pb.DeadReferenceError):
self.disconnect(exit_code=1)
return None
def cb_psync_delete_result(self, result, agent_id, ac_id, dbindex):
assert result
self.psync_upload(agent_id, ac_id, dbindex)
def psync_upload(self, agent_id, ac_id, dbindex):
assert dbindex
fileobj = BytesIO()
compressed = self.psync.compress_items(fileobj, dbindex, self.MAX_ARCHIVE_SIZE)
fileobj.seek(0)
self.log.debug(
"PSync: upload project, size={size}", size=len(fileobj.getvalue())
)
self.psync_upload_chunk(
agent_id, ac_id, list(set(dbindex) - set(compressed)), fileobj
)
def psync_upload_chunk(self, agent_id, ac_id, dbindex, fileobj):
offset = fileobj.tell()
total = fileobj.seek(0, os.SEEK_END)
# unwind
fileobj.seek(offset)
chunk = fileobj.read(self.UPLOAD_CHUNK_SIZE)
assert chunk
try:
d = self.agentpool.callRemote(
"acwrite",
agent_id,
ac_id,
dict(
stage=PROJECT_SYNC_STAGE.UPLOAD.value,
chunk=chunk,
length=len(chunk),
total=total,
),
)
d.addCallback(
self.cb_psync_upload_chunk_result, agent_id, ac_id, dbindex, fileobj
)
d.addErrback(self.cb_global_error)
except (AttributeError, pb.DeadReferenceError):
self.disconnect(exit_code=1)
def cb_psync_upload_chunk_result( # pylint: disable=too-many-arguments
self, result, agent_id, ac_id, dbindex, fileobj
):
result = PROJECT_SYNC_STAGE.lookupByValue(result)
self.log.debug("PSync: upload chunk result {r}", r=str(result))
assert result & (PROJECT_SYNC_STAGE.UPLOAD | PROJECT_SYNC_STAGE.EXTRACTED)
if result is PROJECT_SYNC_STAGE.EXTRACTED:
if dbindex:
self.psync_upload(agent_id, ac_id, dbindex)
else:
self.psync_finalize(agent_id, ac_id)
else:
self.psync_upload_chunk(agent_id, ac_id, dbindex, fileobj)
def psync_finalize(self, agent_id, ac_id):
try:
d = self.agentpool.callRemote("acclose", agent_id, ac_id)
d.addCallback(self.cb_psync_completed_result, agent_id)
d.addErrback(self.cb_global_error)
except (AttributeError, pb.DeadReferenceError):
self.disconnect(exit_code=1)
def cb_psync_completed_result(self, result, agent_id):
assert PROJECT_SYNC_STAGE.lookupByValue(result)
options = self.options.copy()
del options["project_dir"]
options["project_id"] = self.project_id
d = self.agentpool.callRemote("cmd", [agent_id], self.command, options)
d.addCallback(self.cb_async_result)
d.addErrback(self.cb_global_error)

View File

@ -0,0 +1,22 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio.commands.remote.client.async_base import AsyncClientBase
class UpdateCoreClient(AsyncClientBase):
def agent_pool_ready(self):
d = self.agentpool.callRemote("cmd", self.agents, self.command, self.options)
d.addCallback(self.cb_async_result)
d.addErrback(self.cb_global_error)

View File

@ -12,30 +12,37 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=too-many-arguments, import-outside-toplevel
# pylint: disable=inconsistent-return-statements
import os
import sys
import subprocess
import threading
from tempfile import mkdtemp
from time import sleep
import click
from platformio import exception, fs
from platformio import fs, proc
from platformio.commands.device import helpers as device_helpers
from platformio.commands.device.command import device_monitor as cmd_device_monitor
from platformio.managers.core import pioplus_call
from platformio.commands.run.command import cli as cmd_run
from platformio.commands.test.command import cli as cmd_test
from platformio.compat import ensure_python3
from platformio.package.manager.core import inject_contrib_pysite
from platformio.project.exception import NotPlatformIOProjectError
# pylint: disable=unused-argument
@click.group("remote", short_help="PIO Remote")
@click.group("remote", short_help="Remote development")
@click.option("-a", "--agent", multiple=True)
def cli(**kwargs):
pass
@click.pass_context
def cli(ctx, agent):
assert ensure_python3()
ctx.obj = agent
inject_contrib_pysite(verify_openssl=True)
@cli.group("agent", short_help="Start new agent or list active")
@cli.group("agent", short_help="Start a new agent or list active")
def remote_agent():
pass
@ -49,18 +56,17 @@ def remote_agent():
envvar="PLATFORMIO_REMOTE_AGENT_DIR",
type=click.Path(file_okay=False, dir_okay=True, writable=True, resolve_path=True),
)
def remote_agent_start(**kwargs):
pioplus_call(sys.argv[1:])
def remote_agent_start(name, share, working_dir):
from platformio.commands.remote.client.agent_service import RemoteAgentService
@remote_agent.command("reload", short_help="Reload agents")
def remote_agent_reload():
pioplus_call(sys.argv[1:])
RemoteAgentService(name, share, working_dir).connect()
@remote_agent.command("list", short_help="List active agents")
def remote_agent_list():
pioplus_call(sys.argv[1:])
from platformio.commands.remote.client.agent_list import AgentListClient
AgentListClient().connect()
@cli.command("update", short_help="Update installed Platforms, Packages and Libraries")
@ -73,8 +79,11 @@ def remote_agent_list():
@click.option(
"--dry-run", is_flag=True, help="Do not update, only check for the new versions"
)
def remote_update(only_check, dry_run):
pioplus_call(sys.argv[1:])
@click.pass_obj
def remote_update(agents, only_check, dry_run):
from platformio.commands.remote.client.update_core import UpdateCoreClient
UpdateCoreClient("update", agents, dict(only_check=only_check or dry_run)).connect()
@cli.command("run", short_help="Process project environments remotely")
@ -93,8 +102,65 @@ def remote_update(only_check, dry_run):
@click.option("-r", "--force-remote", is_flag=True)
@click.option("-s", "--silent", is_flag=True)
@click.option("-v", "--verbose", is_flag=True)
def remote_run(**kwargs):
pioplus_call(sys.argv[1:])
@click.pass_obj
@click.pass_context
def remote_run(
ctx,
agents,
environment,
target,
upload_port,
project_dir,
disable_auto_clean,
force_remote,
silent,
verbose,
):
from platformio.commands.remote.client.run_or_test import RunOrTestClient
cr = RunOrTestClient(
"run",
agents,
dict(
environment=environment,
target=target,
upload_port=upload_port,
project_dir=project_dir,
disable_auto_clean=disable_auto_clean,
force_remote=force_remote,
silent=silent,
verbose=verbose,
),
)
if force_remote:
return cr.connect()
click.secho("Building project locally", bold=True)
local_targets = []
if "clean" in target:
local_targets = ["clean"]
elif set(["buildfs", "uploadfs", "uploadfsota"]) & set(target):
local_targets = ["buildfs"]
else:
local_targets = ["checkprogsize", "buildprog"]
ctx.invoke(
cmd_run,
environment=environment,
target=local_targets,
project_dir=project_dir,
# disable_auto_clean=True,
silent=silent,
verbose=verbose,
)
if any(["upload" in t for t in target] + ["program" in target]):
click.secho("Uploading firmware remotely", bold=True)
cr.options["target"] += ("nobuild",)
cr.options["disable_auto_clean"] = True
cr.connect()
return True
@cli.command("test", short_help="Remote Unit Testing")
@ -114,8 +180,59 @@ def remote_run(**kwargs):
@click.option("--without-building", is_flag=True)
@click.option("--without-uploading", is_flag=True)
@click.option("--verbose", "-v", is_flag=True)
def remote_test(**kwargs):
pioplus_call(sys.argv[1:])
@click.pass_obj
@click.pass_context
def remote_test(
ctx,
agents,
environment,
ignore,
upload_port,
test_port,
project_dir,
force_remote,
without_building,
without_uploading,
verbose,
):
from platformio.commands.remote.client.run_or_test import RunOrTestClient
cr = RunOrTestClient(
"test",
agents,
dict(
environment=environment,
ignore=ignore,
upload_port=upload_port,
test_port=test_port,
project_dir=project_dir,
force_remote=force_remote,
without_building=without_building,
without_uploading=without_uploading,
verbose=verbose,
),
)
if force_remote:
return cr.connect()
click.secho("Building project locally", bold=True)
ctx.invoke(
cmd_test,
environment=environment,
ignore=ignore,
project_dir=project_dir,
without_uploading=True,
without_testing=True,
verbose=verbose,
)
click.secho("Testing project remotely", bold=True)
cr.options["without_building"] = True
cr.connect()
return True
@cli.group("device", short_help="Monitor remote device or list existing")
@ -125,8 +242,11 @@ def remote_device():
@remote_device.command("list", short_help="List remote devices")
@click.option("--json-output", is_flag=True)
def device_list(json_output):
pioplus_call(sys.argv[1:])
@click.pass_obj
def device_list(agents, json_output):
from platformio.commands.remote.client.device_list import DeviceListClient
DeviceListClient(agents, json_output).connect()
@remote_device.command("monitor", short_help="Monitor remote device")
@ -193,8 +313,20 @@ def device_list(json_output):
"--environment",
help="Load configuration from `platformio.ini` and specified environment",
)
@click.option(
"--sock",
type=click.Path(
exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True
),
)
@click.pass_obj
@click.pass_context
def device_monitor(ctx, **kwargs):
def device_monitor(ctx, agents, **kwargs):
from platformio.commands.remote.client.device_monitor import DeviceMonitorClient
if kwargs["sock"]:
return DeviceMonitorClient(agents, **kwargs).connect()
project_options = {}
try:
with fs.cd(kwargs["project_dir"]):
@ -206,15 +338,12 @@ def device_monitor(ctx, **kwargs):
kwargs["baud"] = kwargs["baud"] or 9600
def _tx_target(sock_dir):
pioplus_argv = ["remote", "device", "monitor"]
pioplus_argv.extend(device_helpers.options_to_argv(kwargs, project_options))
pioplus_argv.extend(["--sock", sock_dir])
try:
pioplus_call(pioplus_argv)
except exception.ReturnErrorCode:
pass
subcmd_argv = ["remote", "device", "monitor"]
subcmd_argv.extend(device_helpers.options_to_argv(kwargs, project_options))
subcmd_argv.extend(["--sock", sock_dir])
subprocess.call([proc.where_is_program("platformio")] + subcmd_argv)
sock_dir = mkdtemp(suffix="pioplus")
sock_dir = mkdtemp(suffix="pio")
sock_file = os.path.join(sock_dir, "sock")
try:
t = threading.Thread(target=_tx_target, args=(sock_dir,))
@ -229,3 +358,5 @@ def device_monitor(ctx, **kwargs):
t.join(2)
finally:
fs.rmtree(sock_dir)
return True

View File

@ -0,0 +1,13 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@ -0,0 +1,86 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.cred import credentials # pylint: disable=import-error
from twisted.internet import defer, protocol, reactor # pylint: disable=import-error
from twisted.spread import pb # pylint: disable=import-error
from platformio.app import get_host_id
from platformio.clients.account import AccountClient
class RemoteClientFactory(pb.PBClientFactory, protocol.ReconnectingClientFactory):
def clientConnectionMade(self, broker):
if self.sslContextFactory and not self.sslContextFactory.certificate_verified:
self.remote_client.log.error(
"A remote cloud could not prove that its security certificate is "
"from {host}. This may cause a misconfiguration or an attacker "
"intercepting your connection.",
host=self.sslContextFactory.host,
)
return self.remote_client.disconnect()
pb.PBClientFactory.clientConnectionMade(self, broker)
protocol.ReconnectingClientFactory.resetDelay(self)
self.remote_client.log.info("Successfully connected")
self.remote_client.log.info("Authenticating")
auth_token = None
try:
auth_token = AccountClient().fetch_authentication_token()
except Exception as e: # pylint:disable=broad-except
d = defer.Deferred()
d.addErrback(self.clientAuthorizationFailed)
d.errback(pb.Error(e))
return d
d = self.login(
credentials.UsernamePassword(
auth_token.encode(),
get_host_id().encode(),
),
client=self.remote_client,
)
d.addCallback(self.remote_client.cb_client_authorization_made)
d.addErrback(self.clientAuthorizationFailed)
return d
def clientAuthorizationFailed(self, err):
AccountClient.delete_local_session()
self.remote_client.cb_client_authorization_failed(err)
def clientConnectionFailed(self, connector, reason):
self.remote_client.log.warn(
"Could not connect to PIO Remote Cloud. Reconnecting..."
)
self.remote_client.cb_disconnected(reason)
protocol.ReconnectingClientFactory.clientConnectionFailed(
self, connector, reason
)
def clientConnectionLost( # pylint: disable=arguments-differ
self, connector, unused_reason
):
if not reactor.running:
self.remote_client.log.info("Successfully disconnected")
return
self.remote_client.log.warn(
"Connection is lost to PIO Remote Cloud. Reconnecting"
)
pb.PBClientFactory.clientConnectionLost(
self, connector, unused_reason, reconnecting=1
)
self.remote_client.cb_disconnected(unused_reason)
protocol.ReconnectingClientFactory.clientConnectionLost(
self, connector, unused_reason
)

View File

@ -0,0 +1,41 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import certifi
from OpenSSL import SSL # pylint: disable=import-error
from twisted.internet import ssl # pylint: disable=import-error
class SSLContextFactory(ssl.ClientContextFactory):
def __init__(self, host):
self.host = host
self.certificate_verified = False
def getContext(self):
ctx = super(SSLContextFactory, self).getContext()
ctx.set_verify(
SSL.VERIFY_PEER | SSL.VERIFY_FAIL_IF_NO_PEER_CERT, self.verifyHostname
)
ctx.load_verify_locations(certifi.where())
return ctx
def verifyHostname( # pylint: disable=unused-argument,too-many-arguments
self, connection, x509, errno, depth, status
):
cn = x509.get_subject().commonName
if cn.startswith("*"):
cn = cn[1:]
if self.host.endswith(cn):
self.certificate_verified = True
return status

View File

@ -0,0 +1,117 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import tarfile
from binascii import crc32
from os.path import getmtime, getsize, isdir, isfile, join
from twisted.python import constants # pylint: disable=import-error
from platformio.compat import hashlib_encode_data
class PROJECT_SYNC_STAGE(constants.Flags):
INIT = constants.FlagConstant()
DBINDEX = constants.FlagConstant()
DELETE = constants.FlagConstant()
UPLOAD = constants.FlagConstant()
EXTRACTED = constants.FlagConstant()
COMPLETED = constants.FlagConstant()
class ProjectSync(object):
def __init__(self, path):
self.path = path
if not isdir(self.path):
os.makedirs(self.path)
self.items = []
self._db = {}
def add_item(self, path, relpath, cb_filter=None):
self.items.append((path, relpath, cb_filter))
def get_items(self):
return self.items
def rebuild_dbindex(self):
self._db = {}
for (path, relpath, cb_filter) in self.items:
if cb_filter and not cb_filter(path):
continue
self._insert_to_db(path, relpath)
if not isdir(path):
continue
for (root, _, files) in os.walk(path, followlinks=True):
for name in files:
self._insert_to_db(
join(root, name), join(relpath, root[len(path) + 1 :], name)
)
def _insert_to_db(self, path, relpath):
if not isfile(path):
return
index_hash = "%s-%s-%s" % (relpath, getmtime(path), getsize(path))
index = crc32(hashlib_encode_data(index_hash))
self._db[index] = (path, relpath)
def get_dbindex(self):
return list(self._db.keys())
def delete_dbindex(self, dbindex):
for index in dbindex:
if index not in self._db:
continue
path = self._db[index][0]
if isfile(path):
os.remove(path)
del self._db[index]
self.delete_empty_folders()
return True
def delete_empty_folders(self):
deleted = False
for item in self.items:
if not isdir(item[0]):
continue
for root, dirs, files in os.walk(item[0]):
if not dirs and not files and root != item[0]:
deleted = True
os.rmdir(root)
if deleted:
return self.delete_empty_folders()
return True
def compress_items(self, fileobj, dbindex, max_size):
compressed = []
total_size = 0
tar_opts = dict(fileobj=fileobj, mode="w:gz", bufsize=0, dereference=True)
with tarfile.open(**tar_opts) as tgz:
for index in dbindex:
compressed.append(index)
if index not in self._db:
continue
path, relpath = self._db[index]
tgz.add(path, relpath)
total_size += getsize(path)
if total_size > max_size:
break
return compressed
def decompress_items(self, fileobj):
fileobj.seek(0)
with tarfile.open(fileobj=fileobj, mode="r:gz") as tgz:
tgz.extractall(self.path)
return True

View File

@ -12,9 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import operator
import os
from multiprocessing import cpu_count
from os import getcwd
from os.path import isfile
from time import time
import click
@ -26,7 +26,7 @@ from platformio.commands.run.helpers import clean_build_dir, handle_legacy_libde
from platformio.commands.run.processor import EnvironmentProcessor
from platformio.commands.test.processor import CTX_META_TEST_IS_RUNNING
from platformio.project.config import ProjectConfig
from platformio.project.helpers import find_project_dir_above
from platformio.project.helpers import find_project_dir_above, load_project_ide_data
# pylint: disable=too-many-arguments,too-many-locals,too-many-branches
@ -36,14 +36,14 @@ except NotImplementedError:
DEFAULT_JOB_NUMS = 1
@click.command("run", short_help="Process project environments")
@click.command("run", short_help="Run project targets (build, upload, clean, etc.)")
@click.option("-e", "--environment", multiple=True)
@click.option("-t", "--target", multiple=True)
@click.option("--upload-port")
@click.option(
"-d",
"--project-dir",
default=getcwd,
default=os.getcwd,
type=click.Path(
exists=True, file_okay=True, dir_okay=True, writable=True, resolve_path=True
),
@ -68,6 +68,7 @@ except NotImplementedError:
@click.option("-s", "--silent", is_flag=True)
@click.option("-v", "--verbose", is_flag=True)
@click.option("--disable-auto-clean", is_flag=True)
@click.option("--list-targets", is_flag=True)
@click.pass_context
def cli(
ctx,
@ -80,11 +81,12 @@ def cli(
silent,
verbose,
disable_auto_clean,
list_targets,
):
app.set_session_var("custom_project_conf", project_conf)
# find project directory on upper level
if isfile(project_dir):
if os.path.isfile(project_dir):
project_dir = find_project_dir_above(project_dir)
is_test_running = CTX_META_TEST_IS_RUNNING in ctx.meta
@ -93,6 +95,9 @@ def cli(
config = ProjectConfig.get_instance(project_conf)
config.validate(environment)
if list_targets:
return print_target_list(list(environment) or config.envs())
# clean obsolete build dir
if not disable_auto_clean:
build_dir = config.get_optional_dir("build")
@ -142,7 +147,7 @@ def cli(
command_failed = any(r.get("succeeded") is False for r in results)
if not is_test_running and (command_failed or not silent) and len(results) > 1:
print_processing_summary(results)
print_processing_summary(results, verbose)
if command_failed:
raise exception.ReturnErrorCode(1)
@ -215,7 +220,7 @@ def print_processing_footer(result):
)
def print_processing_summary(results):
def print_processing_summary(results, verbose=False):
tabular_data = []
succeeded_nums = 0
failed_nums = 0
@ -227,6 +232,8 @@ def print_processing_summary(results):
failed_nums += 1
status_str = click.style("FAILED", fg="red")
elif result.get("succeeded") is None:
if not verbose:
continue
status_str = "IGNORED"
else:
succeeded_nums += 1
@ -261,3 +268,33 @@ def print_processing_summary(results):
is_error=failed_nums,
fg="red" if failed_nums else "green",
)
def print_target_list(envs):
tabular_data = []
for env, data in load_project_ide_data(os.getcwd(), envs).items():
tabular_data.extend(
sorted(
[
(
click.style(env, fg="cyan"),
t["group"],
click.style(t.get("name"), fg="yellow"),
t["title"],
t.get("description"),
)
for t in data.get("targets", [])
],
key=operator.itemgetter(1, 2),
)
)
tabular_data.append((None, None, None, None, None))
click.echo(
tabulate(
tabular_data,
headers=[
click.style(s, bold=True)
for s in ("Environment", "Group", "Name", "Title", "Description")
],
),
)

View File

@ -12,10 +12,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio import exception, telemetry
from platformio.commands.platform import platform_install as cmd_platform_install
from platformio.commands.test.processor import CTX_META_TEST_RUNNING_NAME
from platformio.managers.platform import PlatformFactory
from platformio.platform.exception import UnknownPlatform
from platformio.platform.factory import PlatformFactory
from platformio.project.exception import UndefinedEnvPlatformError
# pylint: disable=too-many-instance-attributes
@ -62,21 +62,19 @@ class EnvironmentProcessor(object):
build_vars = self.get_build_variables()
build_targets = list(self.get_build_targets())
telemetry.send_run_environment(self.options, build_targets)
# skip monitor target, we call it above
if "monitor" in build_targets:
build_targets.remove("monitor")
try:
p = PlatformFactory.newPlatform(self.options["platform"])
except exception.UnknownPlatform:
p = PlatformFactory.new(self.options["platform"])
except UnknownPlatform:
self.cmd_ctx.invoke(
cmd_platform_install,
platforms=[self.options["platform"]],
skip_default_package=True,
)
p = PlatformFactory.newPlatform(self.options["platform"])
p = PlatformFactory.new(self.options["platform"])
result = p.run(build_vars, build_targets, self.silent, self.verbose, self.jobs)
return result["returncode"] == 0

View File

@ -27,7 +27,7 @@ def format_value(raw):
return str(raw)
@click.group(short_help="Manage PlatformIO settings")
@click.group(short_help="Manage system settings")
def cli():
pass

View File

@ -0,0 +1,13 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@ -0,0 +1,184 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import platform
import subprocess
import sys
import click
from tabulate import tabulate
from platformio import __version__, compat, fs, proc, util
from platformio.commands.system.completion import (
get_completion_install_path,
install_completion_code,
uninstall_completion_code,
)
from platformio.package.manager.library import LibraryPackageManager
from platformio.package.manager.platform import PlatformPackageManager
from platformio.package.manager.tool import ToolPackageManager
from platformio.project.config import ProjectConfig
from platformio.project.helpers import get_project_cache_dir
@click.group("system", short_help="Miscellaneous system commands")
def cli():
pass
@cli.command("info", short_help="Display system-wide information")
@click.option("--json-output", is_flag=True)
def system_info(json_output):
project_config = ProjectConfig()
data = {}
data["core_version"] = {"title": "PlatformIO Core", "value": __version__}
data["python_version"] = {
"title": "Python",
"value": "{0}.{1}.{2}-{3}.{4}".format(*list(sys.version_info)),
}
data["system"] = {"title": "System Type", "value": util.get_systype()}
data["platform"] = {"title": "Platform", "value": platform.platform(terse=True)}
data["filesystem_encoding"] = {
"title": "File System Encoding",
"value": compat.get_filesystem_encoding(),
}
data["locale_encoding"] = {
"title": "Locale Encoding",
"value": compat.get_locale_encoding(),
}
data["core_dir"] = {
"title": "PlatformIO Core Directory",
"value": project_config.get_optional_dir("core"),
}
data["platformio_exe"] = {
"title": "PlatformIO Core Executable",
"value": proc.where_is_program(
"platformio.exe" if proc.WINDOWS else "platformio"
),
}
data["python_exe"] = {
"title": "Python Executable",
"value": proc.get_pythonexe_path(),
}
data["global_lib_nums"] = {
"title": "Global Libraries",
"value": len(LibraryPackageManager().get_installed()),
}
data["dev_platform_nums"] = {
"title": "Development Platforms",
"value": len(PlatformPackageManager().get_installed()),
}
data["package_tool_nums"] = {
"title": "Tools & Toolchains",
"value": len(
ToolPackageManager(
project_config.get_optional_dir("packages")
).get_installed()
),
}
click.echo(
json.dumps(data)
if json_output
else tabulate([(item["title"], item["value"]) for item in data.values()])
)
@cli.command("prune", short_help="Remove unused data")
@click.option("--force", "-f", is_flag=True, help="Do not prompt for confirmation")
def system_prune(force):
click.secho("WARNING! This will remove:", fg="yellow")
click.echo(" - cached API requests")
click.echo(" - cached package downloads")
click.echo(" - temporary data")
if not force:
click.confirm("Do you want to continue?", abort=True)
reclaimed_total = 0
cache_dir = get_project_cache_dir()
if os.path.isdir(cache_dir):
reclaimed_total += fs.calculate_folder_size(cache_dir)
fs.rmtree(cache_dir)
click.secho(
"Total reclaimed space: %s" % fs.humanize_file_size(reclaimed_total), fg="green"
)
@cli.group("completion", short_help="Shell completion support")
def completion():
# pylint: disable=import-error,import-outside-toplevel
try:
import click_completion # pylint: disable=unused-import,unused-variable
except ImportError:
click.echo("Installing dependent packages...")
subprocess.check_call(
[proc.get_pythonexe_path(), "-m", "pip", "install", "click-completion"],
)
@completion.command("install", short_help="Install shell completion files/code")
@click.option(
"--shell",
default=None,
type=click.Choice(["fish", "bash", "zsh", "powershell", "auto"]),
help="The shell type, default=auto",
)
@click.option(
"--path",
type=click.Path(file_okay=True, dir_okay=False, readable=True, resolve_path=True),
help="Custom installation path of the code to be evaluated by the shell. "
"The standard installation path is used by default.",
)
def completion_install(shell, path):
import click_completion # pylint: disable=import-outside-toplevel,import-error
shell = shell or click_completion.get_auto_shell()
path = path or get_completion_install_path(shell)
install_completion_code(shell, path)
click.echo(
"PlatformIO CLI completion has been installed for %s shell to %s \n"
"Please restart a current shell session."
% (click.style(shell, fg="cyan"), click.style(path, fg="blue"))
)
@completion.command("uninstall", short_help="Uninstall shell completion files/code")
@click.option(
"--shell",
default=None,
type=click.Choice(["fish", "bash", "zsh", "powershell", "auto"]),
help="The shell type, default=auto",
)
@click.option(
"--path",
type=click.Path(file_okay=True, dir_okay=False, readable=True, resolve_path=True),
help="Custom installation path of the code to be evaluated by the shell. "
"The standard installation path is used by default.",
)
def completion_uninstall(shell, path):
import click_completion # pylint: disable=import-outside-toplevel,import-error
shell = shell or click_completion.get_auto_shell()
path = path or get_completion_install_path(shell)
uninstall_completion_code(shell, path)
click.echo(
"PlatformIO CLI completion has been uninstalled for %s shell from %s \n"
"Please restart a current shell session."
% (click.style(shell, fg="cyan"), click.style(path, fg="blue"))
)

View File

@ -0,0 +1,73 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
import click
def get_completion_install_path(shell):
home_dir = os.path.expanduser("~")
prog_name = click.get_current_context().find_root().info_name
if shell == "fish":
return os.path.join(
home_dir, ".config", "fish", "completions", "%s.fish" % prog_name
)
if shell == "bash":
return os.path.join(home_dir, ".bash_completion")
if shell == "zsh":
return os.path.join(home_dir, ".zshrc")
if shell == "powershell":
return subprocess.check_output(
["powershell", "-NoProfile", "echo $profile"]
).strip()
raise click.ClickException("%s is not supported." % shell)
def is_completion_code_installed(shell, path):
if shell == "fish" or not os.path.exists(path):
return False
import click_completion # pylint: disable=import-error,import-outside-toplevel
with open(path) as fp:
return click_completion.get_code(shell=shell) in fp.read()
def install_completion_code(shell, path):
import click_completion # pylint: disable=import-error,import-outside-toplevel
if is_completion_code_installed(shell, path):
return None
return click_completion.install(shell=shell, path=path, append=shell != "fish")
def uninstall_completion_code(shell, path):
if not os.path.exists(path):
return True
if shell == "fish":
os.remove(path)
return True
import click_completion # pylint: disable=import-error,import-outside-toplevel
with open(path, "r+") as fp:
contents = fp.read()
fp.seek(0)
fp.truncate()
fp.write(contents.replace(click_completion.get_code(shell=shell), ""))
return True

212
platformio/commands/team.py Normal file
View File

@ -0,0 +1,212 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=unused-argument
import json
import re
import click
from tabulate import tabulate
from platformio.clients.account import AccountClient
def validate_orgname_teamname(value, teamname_validate=False):
if ":" not in value:
raise click.BadParameter(
"Please specify organization and team name in the next"
" format - orgname:teamname. For example, mycompany:DreamTeam"
)
teamname = str(value.strip().split(":", 1)[1])
if teamname_validate:
validate_teamname(teamname)
return value
def validate_teamname(value):
if not value:
return value
value = str(value).strip()
if not re.match(r"^[a-z\d](?:[a-z\d]|[\-_ ](?=[a-z\d])){0,19}$", value, flags=re.I):
raise click.BadParameter(
"Invalid team name format. "
"Team name must only contain alphanumeric characters, "
"single hyphens, underscores, spaces. It can not "
"begin or end with a hyphen or a underscore and must"
" not be longer than 20 characters."
)
return value
@click.group("team", short_help="Manage organization teams")
def cli():
pass
@cli.command("create", short_help="Create a new team")
@click.argument(
"orgname_teamname",
metavar="ORGNAME:TEAMNAME",
callback=lambda _, __, value: validate_orgname_teamname(
value, teamname_validate=True
),
)
@click.option(
"--description",
)
def team_create(orgname_teamname, description):
orgname, teamname = orgname_teamname.split(":", 1)
client = AccountClient()
client.create_team(orgname, teamname, description)
return click.secho(
"The team %s has been successfully created." % teamname,
fg="green",
)
@cli.command("list", short_help="List teams")
@click.argument("orgname", required=False)
@click.option("--json-output", is_flag=True)
def team_list(orgname, json_output):
client = AccountClient()
data = {}
if not orgname:
for item in client.list_orgs():
teams = client.list_teams(item.get("orgname"))
data[item.get("orgname")] = teams
else:
teams = client.list_teams(orgname)
data[orgname] = teams
if json_output:
return click.echo(json.dumps(data[orgname] if orgname else data))
if not any(data.values()):
return click.secho("You do not have any teams.", fg="yellow")
for org_name in data:
for team in data[org_name]:
click.echo()
click.secho("%s:%s" % (org_name, team.get("name")), fg="cyan")
click.echo("-" * len("%s:%s" % (org_name, team.get("name"))))
table_data = []
if team.get("description"):
table_data.append(("Description:", team.get("description")))
table_data.append(
(
"Members:",
", ".join(
(member.get("username") for member in team.get("members"))
)
if team.get("members")
else "-",
)
)
click.echo(tabulate(table_data, tablefmt="plain"))
return click.echo()
@cli.command("update", short_help="Update team")
@click.argument(
"orgname_teamname",
metavar="ORGNAME:TEAMNAME",
callback=lambda _, __, value: validate_orgname_teamname(value),
)
@click.option(
"--name",
callback=lambda _, __, value: validate_teamname(value),
help="A new team name",
)
@click.option(
"--description",
)
def team_update(orgname_teamname, **kwargs):
orgname, teamname = orgname_teamname.split(":", 1)
client = AccountClient()
team = client.get_team(orgname, teamname)
del team["id"]
del team["members"]
new_team = team.copy()
if not any(kwargs.values()):
for field in team:
new_team[field] = click.prompt(
field.replace("_", " ").capitalize(), default=team[field]
)
if field == "name":
validate_teamname(new_team[field])
else:
new_team.update({key: value for key, value in kwargs.items() if value})
client.update_team(orgname, teamname, new_team)
return click.secho(
"The team %s has been successfully updated." % teamname,
fg="green",
)
@cli.command("destroy", short_help="Destroy a team")
@click.argument(
"orgname_teamname",
metavar="ORGNAME:TEAMNAME",
callback=lambda _, __, value: validate_orgname_teamname(value),
)
def team_destroy(orgname_teamname):
orgname, teamname = orgname_teamname.split(":", 1)
click.confirm(
click.style(
"Are you sure you want to destroy the %s team?" % teamname, fg="yellow"
),
abort=True,
)
client = AccountClient()
client.destroy_team(orgname, teamname)
return click.secho(
"The team %s has been successfully destroyed." % teamname,
fg="green",
)
@cli.command("add", short_help="Add a new member to team")
@click.argument(
"orgname_teamname",
metavar="ORGNAME:TEAMNAME",
callback=lambda _, __, value: validate_orgname_teamname(value),
)
@click.argument(
"username",
)
def team_add_member(orgname_teamname, username):
orgname, teamname = orgname_teamname.split(":", 1)
client = AccountClient()
client.add_team_member(orgname, teamname, username)
return click.secho(
"The new member %s has been successfully added to the %s team."
% (username, teamname),
fg="green",
)
@cli.command("remove", short_help="Remove a member from team")
@click.argument(
"orgname_teamname",
metavar="ORGNAME:TEAMNAME",
callback=lambda _, __, value: validate_orgname_teamname(value),
)
@click.argument("username")
def team_remove_owner(orgname_teamname, username):
orgname, teamname = orgname_teamname.split(":", 1)
client = AccountClient()
client.remove_team_member(orgname, teamname, username)
return click.secho(
"The %s member has been successfully removed from the %s team."
% (username, teamname),
fg="green",
)

View File

@ -28,7 +28,7 @@ from platformio.commands.test.native import NativeTestProcessor
from platformio.project.config import ProjectConfig
@click.command("test", short_help="Unit Testing")
@click.command("test", short_help="Unit testing")
@click.option("--environment", "-e", multiple=True, metavar="<environment>")
@click.option(
"--filter",

View File

@ -19,7 +19,7 @@ import serial
from platformio import exception, util
from platformio.commands.test.processor import TestProcessorBase
from platformio.managers.platform import PlatformFactory
from platformio.platform.factory import PlatformFactory
class EmbeddedTestProcessor(TestProcessorBase):
@ -108,7 +108,7 @@ class EmbeddedTestProcessor(TestProcessorBase):
return self.env_options.get("test_port")
assert set(["platform", "board"]) & set(self.env_options.keys())
p = PlatformFactory.newPlatform(self.env_options["platform"])
p = PlatformFactory.new(self.env_options["platform"])
board_hwids = p.board_config(self.env_options["board"]).get("build.hwids", [])
port = None
elapsed = 0

View File

@ -13,7 +13,7 @@
# limitations under the License.
import atexit
from os import remove
from os import listdir, remove
from os.path import isdir, isfile, join
from string import Template
@ -25,42 +25,59 @@ TRANSPORT_OPTIONS = {
"arduino": {
"include": "#include <Arduino.h>",
"object": "",
"putchar": "Serial.write(c)",
"flush": "Serial.flush()",
"begin": "Serial.begin($baudrate)",
"end": "Serial.end()",
"putchar": "Serial.write(c);",
"flush": "Serial.flush();",
"begin": "Serial.begin($baudrate);",
"end": "Serial.end();",
"language": "cpp",
},
"mbed": {
"include": "#include <mbed.h>",
"object": "Serial pc(USBTX, USBRX);",
"putchar": "pc.putc(c)",
"object": (
"#if MBED_MAJOR_VERSION == 6\nUnbufferedSerial pc(USBTX, USBRX);\n"
"#else\nRawSerial pc(USBTX, USBRX);\n#endif"
),
"putchar": (
"#if MBED_MAJOR_VERSION == 6\npc.write(&c, 1);\n"
"#else\npc.putc(c);\n#endif"
),
"flush": "",
"begin": "pc.baud($baudrate)",
"begin": "pc.baud($baudrate);",
"end": "",
"language": "cpp",
},
"espidf": {
"include": "#include <stdio.h>",
"object": "",
"putchar": "putchar(c)",
"flush": "fflush(stdout)",
"putchar": "putchar(c);",
"flush": "fflush(stdout);",
"begin": "",
"end": "",
},
"zephyr": {
"include": "#include <sys/printk.h>",
"object": "",
"putchar": 'printk("%c", c);',
"flush": "",
"begin": "",
"end": "",
},
"native": {
"include": "#include <stdio.h>",
"object": "",
"putchar": "putchar(c)",
"flush": "fflush(stdout)",
"putchar": "putchar(c);",
"flush": "fflush(stdout);",
"begin": "",
"end": "",
},
"custom": {
"include": '#include "unittest_transport.h"',
"object": "",
"putchar": "unittest_uart_putchar(c)",
"flush": "unittest_uart_flush()",
"begin": "unittest_uart_begin()",
"end": "unittest_uart_end()",
"putchar": "unittest_uart_putchar(c);",
"flush": "unittest_uart_flush();",
"begin": "unittest_uart_begin();",
"end": "unittest_uart_end();",
"language": "cpp",
},
}
@ -80,7 +97,7 @@ class TestProcessorBase(object):
self.env_name = envname
self.env_options = options["project_config"].items(env=envname, as_dict=True)
self._run_failed = False
self._outputcpp_generated = False
self._output_file_generated = False
def get_transport(self):
transport = None
@ -105,11 +122,11 @@ class TestProcessorBase(object):
click.secho(text, bold=self.options.get("verbose"))
def build_or_upload(self, target):
if not self._outputcpp_generated:
self.generate_outputcpp(
if not self._output_file_generated:
self.generate_output_file(
self.options["project_config"].get_optional_dir("test")
)
self._outputcpp_generated = True
self._output_file_generated = True
if self.test_name != "*":
self.cmd_ctx.meta[CTX_META_TEST_RUNNING_NAME] = self.test_name
@ -121,6 +138,7 @@ class TestProcessorBase(object):
return self.cmd_ctx.invoke(
cmd_run,
project_dir=self.options["project_dir"],
project_conf=self.options["project_config"].path,
upload_port=self.options["upload_port"],
verbose=self.options["verbose"],
silent=self.options["silent"],
@ -147,10 +165,10 @@ class TestProcessorBase(object):
else:
click.echo(line)
def generate_outputcpp(self, test_dir):
def generate_output_file(self, test_dir):
assert isdir(test_dir)
cpp_tpl = "\n".join(
file_tpl = "\n".join(
[
"$include",
"#include <output_export.h>",
@ -163,42 +181,50 @@ class TestProcessorBase(object):
"void output_start(unsigned int baudrate)",
"#endif",
"{",
" $begin;",
" $begin",
"}",
"",
"void output_char(int c)",
"{",
" $putchar;",
" $putchar",
"}",
"",
"void output_flush(void)",
"{",
" $flush;",
" $flush",
"}",
"",
"void output_complete(void)",
"{",
" $end;",
" $end",
"}",
]
)
def delete_tmptest_file(file_):
try:
remove(file_)
except: # pylint: disable=bare-except
if isfile(file_):
click.secho(
"Warning: Could not remove temporary file '%s'. "
"Please remove it manually." % file_,
fg="yellow",
)
tmp_file_prefix = "tmp_pio_test_transport"
tpl = Template(cpp_tpl).substitute(TRANSPORT_OPTIONS[self.get_transport()])
def delete_tmptest_files(test_dir):
for item in listdir(test_dir):
if item.startswith(tmp_file_prefix) and isfile(join(test_dir, item)):
try:
remove(join(test_dir, item))
except: # pylint: disable=bare-except
click.secho(
"Warning: Could not remove temporary file '%s'. "
"Please remove it manually." % join(test_dir, item),
fg="yellow",
)
transport_options = TRANSPORT_OPTIONS[self.get_transport()]
tpl = Template(file_tpl).substitute(transport_options)
data = Template(tpl).substitute(baudrate=self.get_baudrate())
tmp_file = join(test_dir, "output_export.cpp")
delete_tmptest_files(test_dir)
tmp_file = join(
test_dir,
"%s.%s" % (tmp_file_prefix, transport_options.get("language", "c")),
)
with open(tmp_file, "w") as fp:
fp.write(data)
atexit.register(delete_tmptest_file, tmp_file)
atexit.register(delete_tmptest_files, test_dir)

View File

@ -14,12 +14,12 @@
import click
from platformio import app
from platformio.commands.lib import CTX_META_STORAGE_DIRS_KEY
from platformio.commands.lib import lib_update as cmd_lib_update
from platformio.cache import cleanup_content_cache
from platformio.commands.lib.command import CTX_META_STORAGE_DIRS_KEY
from platformio.commands.lib.command import lib_update as cmd_lib_update
from platformio.commands.platform import platform_update as cmd_platform_update
from platformio.managers.core import update_core_packages
from platformio.managers.lib import LibraryManager
from platformio.package.manager.core import update_core_packages
from platformio.package.manager.library import LibraryPackageManager
@click.command(
@ -38,7 +38,7 @@ from platformio.managers.lib import LibraryManager
@click.pass_context
def cli(ctx, core_packages, only_check, dry_run):
# cleanup lib search results, cached board and platform lists
app.clean_cache()
cleanup_content_cache("http")
only_check = dry_run or only_check
@ -55,5 +55,5 @@ def cli(ctx, core_packages, only_check, dry_run):
click.echo()
click.echo("Library Manager")
click.echo("===============")
ctx.meta[CTX_META_STORAGE_DIRS_KEY] = [LibraryManager().package_dir]
ctx.meta[CTX_META_STORAGE_DIRS_KEY] = [LibraryPackageManager().package_dir]
ctx.invoke(cmd_lib_update, only_check=only_check)

View File

@ -12,14 +12,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import re
from zipfile import ZipFile
import click
import requests
from platformio import VERSION, __version__, app, exception, util
from platformio import VERSION, __version__, app, exception
from platformio.clients.http import fetch_remote_content
from platformio.compat import WINDOWS
from platformio.proc import exec_command, get_pythonexe_path
from platformio.project.helpers import get_project_cache_dir
@ -130,13 +131,11 @@ def get_latest_version():
def get_develop_latest_version():
version = None
r = requests.get(
content = fetch_remote_content(
"https://raw.githubusercontent.com/platformio/platformio"
"/develop/platformio/__init__.py",
headers=util.get_request_defheaders(),
"/develop/platformio/__init__.py"
)
r.raise_for_status()
for line in r.text.split("\n"):
for line in content.split("\n"):
line = line.strip()
if not line.startswith("VERSION"):
continue
@ -152,8 +151,5 @@ def get_develop_latest_version():
def get_pypi_latest_version():
r = requests.get(
"https://pypi.org/pypi/platformio/json", headers=util.get_request_defheaders()
)
r.raise_for_status()
return r.json()["info"]["version"]
content = fetch_remote_content("https://pypi.org/pypi/platformio/json")
return json.loads(content)["info"]["version"]

View File

@ -13,8 +13,9 @@
# limitations under the License.
# pylint: disable=unused-import, no-name-in-module, import-error,
# pylint: disable=no-member, undefined-variable
# pylint: disable=no-member, undefined-variable, unexpected-keyword-arg
import glob
import inspect
import json
import locale
@ -22,9 +23,12 @@ import os
import re
import sys
from platformio.exception import UserSideException
PY2 = sys.version_info[0] == 2
CYGWIN = sys.platform.startswith("cygwin")
WINDOWS = sys.platform.startswith("win")
MACOS = sys.platform.startswith("darwin")
def get_filesystem_encoding():
@ -49,6 +53,25 @@ def get_object_members(obj, ignore_private=True):
}
def ci_strings_are_equal(a, b):
if a == b:
return True
if not a or not b:
return False
return a.strip().lower() == b.strip().lower()
def ensure_python3(raise_exception=True):
if not raise_exception or not PY2:
return not PY2
raise UserSideException(
"Python 3.5 or later is required for this operation. \n"
"Please install the latest Python 3 and reinstall PlatformIO Core using "
"installation script:\n"
"https://docs.platformio.org/page/core/installation.html"
)
if PY2:
import imp
@ -75,12 +98,15 @@ if PY2:
if isinstance(obj, unicode):
return obj
return json.dumps(
obj, encoding=get_filesystem_encoding(), ensure_ascii=False, sort_keys=True
obj, encoding=get_filesystem_encoding(), ensure_ascii=False
).encode("utf8")
_magic_check = re.compile("([*?[])")
_magic_check_bytes = re.compile(b"([*?[])")
def glob_recursive(pathname):
return glob.glob(pathname)
def glob_escape(pathname):
"""Escape all special characters."""
# https://github.com/python/cpython/blob/master/Lib/glob.py#L161
@ -120,7 +146,10 @@ else:
def dump_json_to_unicode(obj):
if isinstance(obj, string_types):
return obj
return json.dumps(obj, ensure_ascii=False, sort_keys=True)
return json.dumps(obj)
def glob_recursive(pathname):
return glob.glob(pathname, recursive=True)
def load_python_module(name, pathname):
spec = importlib.util.spec_from_file_location(name, pathname)

View File

@ -30,10 +30,6 @@ class ReturnErrorCode(PlatformioException):
MESSAGE = "{0}"
class LockFileTimeoutError(PlatformioException):
pass
class MinitermException(PlatformioException):
pass
@ -47,141 +43,6 @@ class AbortedByUser(UserSideException):
MESSAGE = "Aborted by user"
#
# Development Platform
#
class UnknownPlatform(PlatformioException):
MESSAGE = "Unknown development platform '{0}'"
class IncompatiblePlatform(PlatformioException):
MESSAGE = "Development platform '{0}' is not compatible with PIO Core v{1}"
class PlatformNotInstalledYet(PlatformioException):
MESSAGE = (
"The platform '{0}' has not been installed yet. "
"Use `platformio platform install {0}` command"
)
class UnknownBoard(PlatformioException):
MESSAGE = "Unknown board ID '{0}'"
class InvalidBoardManifest(PlatformioException):
MESSAGE = "Invalid board JSON manifest '{0}'"
class UnknownFramework(PlatformioException):
MESSAGE = "Unknown framework '{0}'"
# Package Manager
class PlatformIOPackageException(PlatformioException):
pass
class UnknownPackage(PlatformIOPackageException):
MESSAGE = "Detected unknown package '{0}'"
class MissingPackageManifest(PlatformIOPackageException):
MESSAGE = "Could not find one of '{0}' manifest files in the package"
class UndefinedPackageVersion(PlatformIOPackageException):
MESSAGE = (
"Could not find a version that satisfies the requirement '{0}'"
" for your system '{1}'"
)
class PackageInstallError(PlatformIOPackageException):
MESSAGE = (
"Could not install '{0}' with version requirements '{1}' "
"for your system '{2}'.\n\n"
"Please try this solution -> http://bit.ly/faq-package-manager"
)
class ExtractArchiveItemError(PlatformIOPackageException):
MESSAGE = (
"Could not extract `{0}` to `{1}`. Try to disable antivirus "
"tool or check this solution -> http://bit.ly/faq-package-manager"
)
class UnsupportedArchiveType(PlatformIOPackageException):
MESSAGE = "Can not unpack file '{0}'"
class FDUnrecognizedStatusCode(PlatformIOPackageException):
MESSAGE = "Got an unrecognized status code '{0}' when downloaded {1}"
class FDSizeMismatch(PlatformIOPackageException):
MESSAGE = (
"The size ({0:d} bytes) of downloaded file '{1}' "
"is not equal to remote size ({2:d} bytes)"
)
class FDSHASumMismatch(PlatformIOPackageException):
MESSAGE = (
"The 'sha1' sum '{0}' of downloaded file '{1}' is not equal to remote '{2}'"
)
#
# Library
#
class LibNotFound(PlatformioException):
MESSAGE = (
"Library `{0}` has not been found in PlatformIO Registry.\n"
"You can ignore this message, if `{0}` is a built-in library "
"(included in framework, SDK). E.g., SPI, Wire, etc."
)
class NotGlobalLibDir(UserSideException):
MESSAGE = (
"The `{0}` is not a PlatformIO project.\n\n"
"To manage libraries in global storage `{1}`,\n"
"please use `platformio lib --global {2}` or specify custom storage "
"`platformio lib --storage-dir /path/to/storage/ {2}`.\n"
"Check `platformio lib --help` for details."
)
class InvalidLibConfURL(PlatformioException):
MESSAGE = "Invalid library config URL '{0}'"
#
# UDEV Rules
#
@ -194,8 +55,8 @@ class InvalidUdevRules(PlatformioException):
class MissedUdevRules(InvalidUdevRules):
MESSAGE = (
"Warning! Please install `99-platformio-udev.rules`. \nMode details: "
"https://docs.platformio.org/en/latest/faq.html#platformio-udev-rules"
"Warning! Please install `99-platformio-udev.rules`. \nMore details: "
"https://docs.platformio.org/page/faq.html#platformio-udev-rules"
)
@ -203,8 +64,8 @@ class OutdatedUdevRules(InvalidUdevRules):
MESSAGE = (
"Warning! Your `{0}` are outdated. Please update or reinstall them."
"\n Mode details: https://docs.platformio.org"
"/en/latest/faq.html#platformio-udev-rules"
"\nMore details: "
"https://docs.platformio.org/page/faq.html#platformio-udev-rules"
)
@ -223,31 +84,12 @@ class GetLatestVersionError(PlatformioException):
MESSAGE = "Can not retrieve the latest PlatformIO version"
class APIRequestError(PlatformioException):
MESSAGE = "[API] {0}"
class InternetIsOffline(UserSideException):
MESSAGE = (
"You are not connected to the Internet.\n"
"If you build a project first time, we need Internet connection "
"to install all dependencies and toolchains."
)
class BuildScriptNotFound(PlatformioException):
MESSAGE = "Invalid path '{0}' to build script"
class InvalidSettingName(PlatformioException):
class InvalidSettingName(UserSideException):
MESSAGE = "Invalid setting with the name '{0}'"
class InvalidSettingValue(PlatformioException):
class InvalidSettingValue(UserSideException):
MESSAGE = "Invalid value '{0}' for the setting '{1}'"
@ -257,7 +99,7 @@ class InvalidJSONFile(PlatformioException):
MESSAGE = "Could not load broken JSON: {0}"
class CIBuildEnvsEmpty(PlatformioException):
class CIBuildEnvsEmpty(UserSideException):
MESSAGE = (
"Can't find PlatformIO build environments.\n"
@ -295,7 +137,7 @@ class CygwinEnvDetected(PlatformioException):
)
class TestDirNotExists(PlatformioException):
class TestDirNotExists(UserSideException):
MESSAGE = (
"A test folder '{0}' does not exist.\nPlease create 'test' "

View File

@ -12,18 +12,19 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import hashlib
import io
import json
import os
import re
import shutil
import stat
import sys
from glob import glob
import click
from platformio import exception
from platformio.compat import WINDOWS, glob_escape
from platformio.compat import WINDOWS, glob_escape, glob_recursive
class cd(object):
@ -56,7 +57,7 @@ def load_json(file_path):
raise exception.InvalidJSONFile(file_path)
def format_filesize(filesize):
def humanize_file_size(filesize):
base = 1024
unit = 0
suffix = "B"
@ -73,6 +74,28 @@ def format_filesize(filesize):
return "%d%sB" % ((base * filesize / unit), suffix)
def calculate_file_hashsum(algorithm, path):
h = hashlib.new(algorithm)
with io.open(path, "rb", buffering=0) as fp:
while True:
chunk = fp.read(io.DEFAULT_BUFFER_SIZE)
if not chunk:
break
h.update(chunk)
return h.hexdigest()
def calculate_folder_size(path):
assert os.path.isdir(path)
result = 0
for root, __, files in os.walk(path):
for f in files:
file_path = os.path.join(root, f)
if not os.path.islink(file_path):
result += os.path.getsize(file_path)
return result
def ensure_udev_rules():
from platformio.util import get_systype # pylint: disable=import-outside-toplevel
@ -135,7 +158,7 @@ def match_src_files(src_dir, src_filter=None, src_exts=None, followlinks=True):
src_filter = src_filter.replace("/", os.sep).replace("\\", os.sep)
for (action, pattern) in re.findall(r"(\+|\-)<([^>]+)>", src_filter):
items = set()
for item in glob(os.path.join(glob_escape(src_dir), pattern)):
for item in glob_recursive(os.path.join(glob_escape(src_dir), pattern)):
if os.path.isdir(item):
for root, _, files in os.walk(item, followlinks=followlinks):
for f in files:
@ -164,6 +187,10 @@ def expanduser(path):
return os.environ["USERPROFILE"] + path[1:]
def change_filemtime(path, mtime):
os.utime(path, (mtime, mtime))
def rmtree(path):
def _onerror(func, path, __):
try:

View File

@ -15,58 +15,56 @@
import codecs
import os
import sys
from os.path import basename, isdir, isfile, join, realpath, relpath
import bottle
from platformio import fs, util
from platformio.proc import where_is_program
from platformio.project.config import ProjectConfig
from platformio.project.helpers import load_project_ide_data
class ProjectGenerator(object):
def __init__(self, project_dir, ide, boards):
self.config = ProjectConfig.get_instance(join(project_dir, "platformio.ini"))
self.config.validate()
self.project_dir = project_dir
def __init__(self, config, env_name, ide):
self.config = config
self.project_dir = os.path.dirname(config.path)
self.env_name = str(env_name)
self.ide = str(ide)
self.env_name = str(self.get_best_envname(boards))
@staticmethod
def get_supported_ides():
tpls_dir = join(fs.get_source_dir(), "ide", "tpls")
return sorted([d for d in os.listdir(tpls_dir) if isdir(join(tpls_dir, d))])
tpls_dir = os.path.join(fs.get_source_dir(), "ide", "tpls")
return sorted(
[
d
for d in os.listdir(tpls_dir)
if os.path.isdir(os.path.join(tpls_dir, d))
]
)
def get_best_envname(self, boards=None):
envname = None
default_envs = self.config.default_envs()
if default_envs:
envname = default_envs[0]
if not boards:
return envname
for env in self.config.envs():
if not boards:
return env
if not envname:
envname = env
items = self.config.items(env=env, as_dict=True)
if "board" in items and items.get("board") in boards:
return env
return envname
@staticmethod
def filter_includes(includes_map, ignore_scopes=None, to_unix_path=True):
ignore_scopes = ignore_scopes or []
result = []
for scope, includes in includes_map.items():
if scope in ignore_scopes:
continue
for include in includes:
if to_unix_path:
include = fs.to_unix_path(include)
if include not in result:
result.append(include)
return result
def _load_tplvars(self):
tpl_vars = {
"config": self.config,
"systype": util.get_systype(),
"project_name": basename(self.project_dir),
"project_name": os.path.basename(self.project_dir),
"project_dir": self.project_dir,
"env_name": self.env_name,
"user_home_dir": realpath(fs.expanduser("~")),
"user_home_dir": os.path.realpath(fs.expanduser("~")),
"platformio_path": sys.argv[0]
if isfile(sys.argv[0])
if os.path.isfile(sys.argv[0])
else where_is_program("platformio"),
"env_path": os.getenv("PATH"),
"env_pathsep": os.pathsep,
@ -83,7 +81,7 @@ class ProjectGenerator(object):
"src_files": self.get_src_files(),
"project_src_dir": self.config.get_optional_dir("src"),
"project_lib_dir": self.config.get_optional_dir("lib"),
"project_libdeps_dir": join(
"project_libdeps_dir": os.path.join(
self.config.get_optional_dir("libdeps"), self.env_name
),
}
@ -92,12 +90,13 @@ class ProjectGenerator(object):
for key, value in tpl_vars.items():
if key.endswith(("_path", "_dir")):
tpl_vars[key] = fs.to_unix_path(value)
for key in ("includes", "src_files", "libsource_dirs"):
for key in ("src_files", "libsource_dirs"):
if key not in tpl_vars:
continue
tpl_vars[key] = [fs.to_unix_path(inc) for inc in tpl_vars[key]]
tpl_vars["to_unix_path"] = fs.to_unix_path
tpl_vars["filter_includes"] = self.filter_includes
return tpl_vars
def get_src_files(self):
@ -105,12 +104,12 @@ class ProjectGenerator(object):
with fs.cd(self.project_dir):
for root, _, files in os.walk(self.config.get_optional_dir("src")):
for f in files:
result.append(relpath(join(root, f)))
result.append(os.path.relpath(os.path.join(root, f)))
return result
def get_tpls(self):
tpls = []
tpls_dir = join(fs.get_source_dir(), "ide", "tpls", self.ide)
tpls_dir = os.path.join(fs.get_source_dir(), "ide", "tpls", self.ide)
for root, _, files in os.walk(tpls_dir):
for f in files:
if not f.endswith(".tpl"):
@ -118,7 +117,7 @@ class ProjectGenerator(object):
_relpath = root.replace(tpls_dir, "")
if _relpath.startswith(os.sep):
_relpath = _relpath[1:]
tpls.append((_relpath, join(root, f)))
tpls.append((_relpath, os.path.join(root, f)))
return tpls
def generate(self):
@ -126,21 +125,21 @@ class ProjectGenerator(object):
for tpl_relpath, tpl_path in self.get_tpls():
dst_dir = self.project_dir
if tpl_relpath:
dst_dir = join(self.project_dir, tpl_relpath)
if not isdir(dst_dir):
dst_dir = os.path.join(self.project_dir, tpl_relpath)
if not os.path.isdir(dst_dir):
os.makedirs(dst_dir)
file_name = basename(tpl_path)[:-4]
file_name = os.path.basename(tpl_path)[:-4]
contents = self._render_tpl(tpl_path, tpl_vars)
self._merge_contents(join(dst_dir, file_name), contents)
self._merge_contents(os.path.join(dst_dir, file_name), contents)
@staticmethod
def _render_tpl(tpl_path, tpl_vars):
with codecs.open(tpl_path, "r", encoding="utf8") as fp:
return bottle.SimpleTemplate(fp.read()).render(**tpl_vars)
return bottle.template(fp.read(), **tpl_vars)
@staticmethod
def _merge_contents(dst_path, contents):
if basename(dst_path) == ".gitignore" and isfile(dst_path):
if os.path.basename(dst_path) == ".gitignore" and os.path.isfile(dst_path):
return
with codecs.open(dst_path, "w", encoding="utf8") as fp:
fp.write(contents)

View File

@ -1,4 +1,4 @@
% for include in includes:
% for include in filter_includes(includes):
-I{{include}}
% end
% for define in defines:

View File

@ -4,6 +4,6 @@
"gccDefaultCFlags": "-fsyntax-only {{! to_unix_path(cc_flags).replace(' -MMD ', ' ').replace('"', '\\"') }} {{ !_defines.replace('"', '\\"') }}",
"gccDefaultCppFlags": "-fsyntax-only {{! to_unix_path(cxx_flags).replace(' -MMD ', ' ').replace('"', '\\"') }} {{ !_defines.replace('"', '\\"') }}",
"gccErrorLimit": 15,
"gccIncludePaths": "{{ ','.join(includes) }}",
"gccIncludePaths": "{{ ','.join(filter_includes(includes)) }}",
"gccSuppressWarnings": false
}

View File

@ -5,10 +5,12 @@
# please create `CMakeListsUser.txt` in the root of project.
# The `CMakeListsUser.txt` will not be overwritten by PlatformIO.
% from platformio.project.helpers import (load_project_ide_data)
%
% import os
% import re
%
% from platformio.compat import WINDOWS
% from platformio.project.helpers import (load_project_ide_data)
%
% def _normalize_path(path):
% if project_dir in path:
% path = path.replace(project_dir, "${CMAKE_CURRENT_LIST_DIR}")
@ -22,12 +24,30 @@
% return path
% end
%
% def _fix_lib_dirs(lib_dirs):
% result = []
% for lib_dir in lib_dirs:
% if not os.path.isabs(lib_dir):
% lib_dir = os.path.join(project_dir, lib_dir)
% end
% result.append(to_unix_path(os.path.normpath(lib_dir)))
% end
% return result
% end
%
% def _escape(text):
% return to_unix_path(text).replace('"', '\\"')
% end
%
% def _get_lib_dirs(envname):
% env_libdeps_dir = os.path.join(config.get_optional_dir("libdeps"), envname)
% env_lib_extra_dirs = config.get("env:" + envname, "lib_extra_dirs", [])
% return _fix_lib_dirs([env_libdeps_dir] + env_lib_extra_dirs)
% end
%
% envs = config.envs()
% if len(envs) > 1:
set(CMAKE_CONFIGURATION_TYPES "{{ ";".join(envs) }};" CACHE STRING "Build Types reflect PlatformIO Environments" FORCE)
% else:
@ -43,7 +63,7 @@ SET(CMAKE_CXX_COMPILER "{{ _normalize_path(cxx_path) }}")
SET(CMAKE_CXX_FLAGS "{{ _normalize_path(to_unix_path(cxx_flags)) }}")
SET(CMAKE_C_FLAGS "{{ _normalize_path(to_unix_path(cc_flags)) }}")
% STD_RE = re.compile(r"\-std=[a-z\+]+(\d+)")
% STD_RE = re.compile(r"\-std=[a-z\+]+(\w+)")
% cc_stds = STD_RE.findall(cc_flags)
% cxx_stds = STD_RE.findall(cxx_flags)
% if cc_stds:
@ -54,13 +74,19 @@ set(CMAKE_CXX_STANDARD {{ cxx_stds[-1] }})
% end
if (CMAKE_BUILD_TYPE MATCHES "{{ env_name }}")
%for define in defines:
% for define in defines:
add_definitions(-D'{{!re.sub(r"([\"\(\)#])", r"\\\1", define)}}')
%end
% end
%for include in includes:
include_directories("{{ _normalize_path(to_unix_path(include)) }}")
%end
% for include in filter_includes(includes):
include_directories("{{ _normalize_path(include) }}")
% end
FILE(GLOB_RECURSE EXTRA_LIB_SOURCES
% for dir in _get_lib_dirs(env_name):
{{ _normalize_path(dir) + "/*.*" }}
% end
)
endif()
% leftover_envs = list(set(envs) ^ set([env_name]))
@ -76,9 +102,22 @@ if (CMAKE_BUILD_TYPE MATCHES "{{ env }}")
add_definitions(-D'{{!re.sub(r"([\"\(\)#])", r"\\\1", define)}}')
% end
% for include in data["includes"]:
% for include in filter_includes(data["includes"]):
include_directories("{{ _normalize_path(to_unix_path(include)) }}")
% end
FILE(GLOB_RECURSE EXTRA_LIB_SOURCES
% for dir in _get_lib_dirs(env):
{{ _normalize_path(dir) + "/*.*" }}
% end
)
endif()
% end
FILE(GLOB_RECURSE SRC_LIST "{{ _normalize_path(project_src_dir) }}/*.*" "{{ _normalize_path(project_lib_dir) }}/*.*" "{{ _normalize_path(project_libdeps_dir) }}/*.*")
FILE(GLOB_RECURSE SRC_LIST
% for path in (project_src_dir, project_lib_dir):
{{ _normalize_path(path) + "/*.*" }}
% end
)
list(APPEND SRC_LIST ${EXTRA_LIB_SOURCES})

Some files were not shown because too many files have changed in this diff Show More