Compare commits

..

581 Commits

Author SHA1 Message Date
Ivan Kravets
e37d34b92f Merge branch 'release/3.6.5' 2019-03-07 14:00:57 +02:00
Ivan Kravets
b562541f20 Bump version to 3.6.5 2019-03-07 13:57:32 +02:00
Ivan Kravets
736a1404b4 YAPF 0.26.0 2019-03-07 12:55:03 +02:00
Ivan Kravets
9639626ab3 Fix an issue when `$PROJECT_HASH template was not expanded for the other directory ***_dir` options in "platformio.ini" // Resolve #2170 2019-03-07 12:54:40 +02:00
Ivan Kravets
b99494671a Update "dl.bintray.com" IP address 2019-03-06 22:39:38 +02:00
Ivan Kravets
9778778830 PyLin fix 2019-02-24 11:45:29 +02:00
Ivan Kravets
d2c2171ef9 Project Generator: add new targets for CLion IDE "BUILD_VERBOSE" and "MONITOR" (serial port monitor) // Resolve #359 2019-02-23 22:43:30 +02:00
Ivan Kravets
856798488b Fix an issue when platformio ci recompiles project if `--keep-build-dir` option is passed // Resolve #2109 2019-02-23 22:41:45 +02:00
Ivan Kravets
00ba88911f Fix an issue with slow updating of PlatformIO Core packages on Windows 2019-02-23 22:41:15 +02:00
Ivan Kravets
639c086728 Bump version to 3.6.5a1 2019-01-23 21:08:39 +02:00
Ivan Kravets
4504080027 Merge branch 'release/v3.6.4' 2019-01-23 20:51:48 +02:00
Ivan Kravets
367e4d663c Bump version to 3.6.4 2019-01-23 20:47:18 +02:00
Ivan Kravets
28bca48eca Ignore examples for ststm8 on Linux 2019-01-23 17:55:07 +02:00
Ivan Kravets
69065d8bd6 Fix "ValueError: invalid literal for int() with base 10" // Resolve #2058 2019-01-22 22:16:58 +02:00
Ivan Kravets
db0bbcc043 CLion: Improve project portability using "${CMAKE_CURRENT_LIST_DIR}" instead of USER_HOME 2019-01-17 18:05:05 +02:00
Ivan Kravets
b594c11718 Fix cmd_lib test 2019-01-17 17:56:35 +02:00
Ivan Kravets
d627a42268 Fix PyLint warning 2019-01-11 14:07:46 +02:00
Ivan Kravets
f058b8f18f Fix PY3 Lint "consider-using-set-comprehension" 2019-01-11 13:02:12 +02:00
Ivan Kravets
13430aa628 Use GCC C++ compiler for Eclipse project indexer // Issue #1010 2019-01-11 12:52:17 +02:00
Ivan Kravets
56cd55ba7d Eclipse: Provide language standard to a project C/C++ indexer // Resolve #1010 2019-01-10 21:57:52 +02:00
Ivan Kravets
19b5285d50 Fix "TypeError : startswith first arg" when checking udev rules with PY3 // Resolve #2000 2019-01-10 19:34:09 +02:00
Ivan Kravets
d5d95092c4 Fix an error "Could not extract item..." when extracting TAR archive with symbolic items on Windows platform // Resolve #2015 2019-01-10 19:33:45 +02:00
Ivan Kravets
68e3f9dc00 Fix "Runtime Error: Dictionary size changed during iteration" // Resolve #2003 2019-01-09 16:34:55 +02:00
Ivan Kravets
fabaadec60 Fix an issue with incorrect detecting of compatibility (LDF) between generic library and Arduino or ARM mbed frameworks 2018-12-22 22:30:23 +02:00
Ivan Kravets
7f697961ec Sync docs 2018-12-21 17:26:13 +02:00
Ivan Kravets
9334f31ff2 Docs: Sync boards 2018-12-20 20:51:34 +02:00
Ivan Kravets
579de32d4e Docs: Remove examples with ESP8266 LD scripts, they can change 2018-12-14 18:38:41 +02:00
Ivan Kravets
c3702391ea Docs: Add RISC-V ASM Video Tutorial 2018-12-13 20:45:35 +02:00
Ivan Kravets
826418a443 Bump version to 3.6.4b1 2018-12-13 17:30:49 +02:00
Ivan Kravets
4dfa885a85 CLion: Improve project portability using "${CMAKE_CURRENT_LIST_DIR}" instead of full path 2018-12-13 17:30:10 +02:00
Ivan Kravets
9f4dde4b5e Use full path to PlatformIO CLI when generate project for IDE 2018-12-13 17:24:08 +02:00
Ivan Kravets
3748219cac Document system PATH for a custom VSCode task 2018-12-12 21:46:28 +02:00
Ivan Kravets
4b55767fb9 Docs: "Custom Build Task" for VSCode 2018-12-12 21:01:13 +02:00
Ivan Kravets
5aef182652 Merge tag 'v3.6.3' into develop
Bump version to 3.6.3
2018-12-12 16:19:26 +02:00
Ivan Kravets
6db47cec2b Merge branch 'release/v3.6.3' 2018-12-12 16:19:25 +02:00
Ivan Kravets
6f8b9d70bc Bump version to 3.6.3 2018-12-12 16:19:14 +02:00
Ivan Kravets
d8cbe99f2c Fix an issue with a broken headers list when generating ".clang_complete" for Emacs // Resolve #1960 2018-12-12 15:50:34 +02:00
Ivan Kravets
a690b8c085 Bump version to 3.6.3b2 2018-12-12 02:51:24 +02:00
Ivan Kravets
b874359482 Ignore *.asm and *.ASM files when building Arduino-based library (compatibility with Arduino builder) 2018-12-12 02:49:42 +02:00
Ivan Kravets
3a18e668c2 Docs: Better explanation about "PlatformIO IDE" 2018-12-12 01:55:54 +02:00
Ivan Kravets
3ca9527da4 Bump version to 3.6.3b1 2018-12-12 01:29:43 +02:00
Ivan Kravets
f539513376 Fixed spurious project's "Problems" for PlatformIO IDE for VSCode when ARM mbed framework is used 2018-12-12 01:28:37 +02:00
Ivan Kravets
afdfaeec68 Check if "_lockfile" attribute exists 2018-12-03 18:31:12 -08:00
Ivan Kravets
676c87d081 Allow to override platform "package_repositories" 2018-11-30 01:36:50 +02:00
Ivan Kravets
db3b0499c9 Merge branch 'release/v3.6.2' 2018-11-29 18:02:58 +02:00
Ivan Kravets
98032ec548 Merge tag 'v3.6.2' into develop
Bump version to 3.6.2
2018-11-29 18:02:58 +02:00
Ivan Kravets
8ef6ea8053 Bump version to 3.6.2 2018-11-29 18:02:49 +02:00
Ivan Kravets
d87ee0b286 Bump docs 2018-11-29 18:00:52 +02:00
Ivan Kravets
6f01f10f59 YAPF 2018-11-29 17:59:29 +02:00
Ivan Kravets
59a0d2b618 Bump version to 3.6.2rc2 2018-11-29 16:02:59 +02:00
Ivan Kravets
16df5474e4 VSCode IntelliSense config: Typo fix with useless bracket 2018-11-29 15:59:22 +02:00
Ivan Kravets
33ea6ef123 Be in silence when debug interpreter is run 2018-11-29 15:21:06 +02:00
Ivan Kravets
a485e563f0 Bump version to 3.6.2rc1 2018-11-29 00:52:36 +02:00
Ivan Kravets
cf35f9dbf8 Only patch versions are allowed for "contrib-pysite" 2018-11-28 21:32:37 +02:00
Ivan Kravets
710b150fcd Switch PIO Home to native WebSockets (next step to PY3) 2018-11-28 17:28:14 +02:00
Ivan Kravets
13731b4461 Switch PIO Home to native WebSockets (next step to PY3) 2018-11-28 17:23:33 +02:00
Ivan Kravets
3d52710935 Bump version to 3.6.2b6 2018-11-27 00:55:30 +02:00
Ivan Kravets
d475f44e49 Escape string when generating manifest for VSCode C/C++ IntelliSense service 2018-11-27 00:54:59 +02:00
Ivan Kravets
7574798a3a Document "erase" target 2018-11-24 15:51:50 +02:00
Ivan Kravets
9ef8d4cfe0 Docs: Grammar fixes 2018-11-24 14:30:21 +02:00
Ivan Kravets
b42d0efa73 Bump version to 3.6.2b5 2018-11-21 15:00:52 +02:00
Ivan Kravets
4a17a9b5b3 Improved IntelliSense for PlatformIO IDE for VSCode via passing extra compiler information for C/C++ Code Parser 2018-11-21 15:00:13 +02:00
Ivan Kravets
d3909bdfa2 Bump version to 3.6.2b4 2018-11-21 00:52:54 +02:00
Ivan Kravets
a2b0b2893b LDF: Stop handling "define" and "undef" when condition fails; handle CPP files in "chain+" and "deep+" modes // Resolve #1930 2018-11-21 00:52:34 +02:00
Ivan Kravets
9d2499ab98 Bump version to 3.6.2b3 2018-11-20 01:23:55 +02:00
Ivan Kravets
579a973512 Handle CWD when searching for a file // Resolve #1930 2018-11-20 01:23:34 +02:00
Ivan Kravets
b861e9c192 Document in library.json how to pass flags to a global build environment 2018-11-20 00:50:52 +02:00
Ivan Kravets
375006ee65 Bump version to 3.6.2b2 2018-11-19 22:30:52 +02:00
Ivan Kravets
23af9c9027 Fix an issue when Library Dependency Finder (LDF) finds spurious dependencies in `chain+ and deep+` modes // Resolve #1930 2018-11-19 22:29:53 +02:00
Ivan Kravets
7322df26ad Fix an issue when Library Dependency Finder (LDF) does not handle project src_filter // Resolve #1905 2018-11-19 19:06:56 +02:00
Ivan Kravets
32bb9c9d83 Bump version to 3.6.2b1 2018-11-19 17:46:19 +02:00
Ivan Kravets
b22ca10f8c Prepend CPPATH of library dependencies instead of appending // Resolve #1914 2018-11-19 17:45:53 +02:00
Ivan Kravets
95beb03aad Bump version to 3.6.2a3 2018-11-18 23:54:42 +02:00
Ivan Kravets
f65ab58c88 Go over 8010-8100 TCP ports when shutting down PIO Home server 2018-11-18 23:53:47 +02:00
Ivan Kravets
c06a018d88 Docs: Add support for OLIMEX ESP32-PRO 2018-11-05 18:32:58 +02:00
Ivan Kravets
7789e3bc62 Rename "System" to "Hardware" for board spec 2018-11-04 17:24:39 +02:00
Ivan Kravets
1287e51bf8 Add info about "EN" pin for ESP32 and debug probes 2018-11-04 13:13:34 +02:00
Ivan Kravets
151823f80e Fix pinouts for oddWires IOT-Bus JTAG 2018-11-02 15:48:12 +02:00
Ivan Kravets
09d58d0d49 Update docs for ESP8266 lwIP profiles 2018-11-02 12:54:09 +02:00
Ivan Kravets
0a6fb68840 Bump version to 3.6.2a2 2018-10-30 06:40:36 +02:00
Ivan Kravets
38fb5b2234 Typo fix 2018-10-30 06:40:11 +02:00
Ivan Kravets
ab6a323aca Fixed an issue with VSCode IntelliSense warning about the missed headers located in "include" folder 2018-10-30 00:27:29 +02:00
Ivan Kravets
50ed828e7a Bump version to 3.6.2a1 2018-10-30 00:14:58 +02:00
Ivan Kravets
692af90161 Fix incorrect wording when initializing/updating project 2018-10-30 00:14:06 +02:00
Ivan Kravets
543a1dddae Sync docs 2018-10-29 22:38:20 +02:00
Ivan Kravets
fce84b5a48 Add debugging support using TIAO USB Multi-Protocol adapter (TUMPA) 2018-10-29 22:27:47 +02:00
Ivan Kravets
67a6f66a35 Add support for oddWires IoT-Bus Io debug tool 2018-10-29 19:23:06 +02:00
Ivan Kravets
fdbebb178c Merge tag 'v3.6.1' into develop
Bump version to 3.6.1
2018-10-29 14:10:57 +02:00
Ivan Kravets
0747fe9dea Merge branch 'release/v3.6.1' 2018-10-29 14:10:56 +02:00
Ivan Kravets
331cd0aa0d Bump version to 3.6.1 2018-10-29 14:10:42 +02:00
Ivan Kravets
8b74b12990 Don't recreate git ignore and travis configs when project is already inited 2018-10-29 14:02:29 +02:00
Ivan Kravets
7a0c1e13f3 Bump version to 3.6.1rc7 2018-10-28 00:39:42 +03:00
Ivan Kravets
e94d758131 Use "items" instead of "iteritems" (PY2/3) // Issue #895 2018-10-27 20:51:55 +03:00
Ivan Kravets
080369f597 Make "print" compatible between Py2 & Py3 2018-10-27 20:22:11 +03:00
Ivan Kravets
729178731c Improve a loading speed of PIO Home "Recent News" 2018-10-27 20:07:07 +03:00
Ivan Kravets
5c278b54f7 Use "super" when calling parent class // Issue #895 2018-10-27 15:24:10 +03:00
Ivan Kravets
2007491be9 Don't override existing ".gitignore" file 2018-10-27 14:20:33 +03:00
Ivan Kravets
e96078b4e3 Exclude upcoming ".pio" from VCS 2018-10-27 14:18:47 +03:00
Ivan Kravets
118f22bed3 PyLint fix 2018-10-26 01:27:57 +03:00
Ivan Kravets
2134022565 Print board's configuration URL 2018-10-26 01:27:06 +03:00
Ivan Kravets
cf2a2395e5 Sync new Atmel AVR boards 2018-10-25 19:52:55 +03:00
Ivan Kravets
8947b63e41 Better formatting when asking to remove a file 2018-10-25 14:12:09 +03:00
Ivan Kravets
fc8bffdd81 Ask user to remove manually a file on exception 2018-10-25 14:03:52 +03:00
Ivan Kravets
75105e18ba Wait 1 seconds on Windows when PIO Home shuts down 2018-10-25 13:48:47 +03:00
Ivan Kravets
3507290a20 Shutdown PIO Home server before updating tool-pioplus; Update tool-pioplus to 1.4.11 2018-10-25 13:44:41 +03:00
Ivan Kravets
7cc4e8ce15 Bump version to 3.6.1rc6 2018-10-24 01:21:02 +03:00
Ivan Kravets
08dc5dec89 Revert "Cache loaded project config"
This reverts commit bfee896378.
2018-10-24 01:19:54 +03:00
Ivan Kravets
d92349c8f7 Add "reset" support for "memoized" 2018-10-24 01:19:39 +03:00
Ivan Kravets
92289d373b Add example with $PROJECT_HASH for Windows 2018-10-24 00:23:10 +03:00
Ivan Kravets
4b9e8f0ba4 Added $PROJECT_HASH template variable for build_dir 2018-10-23 22:55:26 +03:00
Ivan Kravets
bfee896378 Cache loaded project config 2018-10-23 22:27:18 +03:00
Ivan Kravets
e4c112608b Docs: Move "Custom target" to upper level 2018-10-23 19:21:25 +03:00
Ivan Kravets
04eb531ac2 Add more example with a custom target 2018-10-23 19:12:02 +03:00
Ivan Kravets
8e3020c0f8 Sync docs 2018-10-23 18:09:43 +03:00
Ivan Kravets
51acd02421 Bump version to 3.6.1rc5 2018-10-22 16:49:08 +03:00
Ivan Kravets
8a1b94b48c Process `build_unflags` for cloned environment when building a static library 2018-10-22 16:33:30 +03:00
Ivan Kravets
e11013189b Docs: Move library manager CLI to userguide 2018-10-19 22:07:00 +03:00
Ivan Kravets
98deefc4f5 Bump version to 3.6.1rc4 2018-10-19 17:48:00 +03:00
Ivan Kravets
058a5e854d Skip aceinna_imu from linux builds 2018-10-19 16:50:51 +03:00
Ivan Kravets
7b998c8cda Fix an issue with incorrect handling of a custom package name 2018-10-19 16:37:15 +03:00
Ivan Kravets
98a1fd79b6 Revert back "Handle first part for package name" 2018-10-19 16:13:55 +03:00
Ivan Kravets
e344194f86 Handle first part for package name 2018-10-19 15:58:43 +03:00
Ivan Kravets
05b656e6b0 Update README for "include", "lib", and "test" directories 2018-10-17 21:56:27 +03:00
Ivan Kravets
9c30472777 Generate "test" directory per project 2018-10-17 21:16:09 +03:00
Ivan Kravets
016caa731d Rename "readme.txt" to README for "include" and "lib" project folder; don't create these folders if they were delated before 2018-10-17 19:58:38 +03:00
Ivan Kravets
5b0befef45 Drop support for Freescale Kinetis FRDM-KL26Z, include Aceinna in platforms list 2018-10-17 14:48:53 +03:00
Ivan Kravets
4b588a589d Add Aceinna dev/platform 2018-10-17 14:11:54 +03:00
Ivan Kravets
1598b0632a Sync docs 2018-10-17 14:03:19 +03:00
Ivan Kravets
a32c67a0ce Bump version to 3.6.1rc3 2018-10-17 01:47:08 +03:00
Ivan Kravets
1183105557 Revert back an clang includes list without quotes for Atom 2018-10-17 01:46:16 +03:00
Ivan Kravets
d1e4f22e7f Add docs for JTAG and SWD connectors 2018-10-16 23:01:40 +03:00
Ivan Kravets
8a5b3a90cb Bump version to 3.6.1rc2 2018-10-13 19:32:54 +03:00
Ivan Kravets
2b53ecb111 Improve PIO Unified Debugger for "mbed" framework and fix issue with missed local variables 2018-10-13 19:32:31 +03:00
Ivan Kravets
0159b1cf7f Fixed an issue with broken includes when generating `.clang_complete` and space is used in path // Issue #1873 2018-10-12 23:04:12 +03:00
José Antonio de la Torre
d9dd83e327 Solved issues with vim whitespaces in paths (#1873)
When a library has whitespaces in the name the path
will contain whitespace too. When vim tries to decode
path it will fail.

This fix wrap each path with quotes.
2018-10-12 22:41:13 +03:00
Ivan Kravets
05fe52bda9 Bump version to 3.6.1rc1 2018-10-12 22:31:11 +03:00
Ivan Kravets
6294580e25 Show a valid error when Internet is off-line while initializing a new project // Resolve #1784 2018-10-12 22:30:28 +03:00
Ivan Kravets
69d01c4bc1 Fix an issue when `pio run -t monitor always uses first monitor_port` even with multiple environments // Resolve #1841 2018-10-12 21:57:57 +03:00
Ivan Kravets
d4e553fb5a Generate an "include" directory with a README file when initializing a new project 2018-10-12 21:49:02 +03:00
Ivan Kravets
ff8fefb797 Report about outdated 99-platformio-udev.rules // Resolve #1823 2018-10-12 19:35:58 +03:00
Ivan Kravets
b77fb79cd6 Sync docs 2018-10-12 16:02:35 +03:00
Ivan Kravets
00b173f13f Fix an issue when dynamic build flags were not handled correctly // Resolve #1799 2018-10-12 15:09:54 +03:00
Ivan Kravets
13ff30788e Sync docs 2018-10-12 15:06:09 +03:00
Ivan Kravets
842db2643d Docs: Typos in VSCode Watchpoints 2018-10-10 16:20:28 +03:00
Ivan Kravets
aee0c7b9c2 Docs: Fix invalid COMPONENT_EMBED_TXTFILES macro for ESP32 2018-10-10 12:58:16 +03:00
Ivan Kravets
f67cc1770d Document "Watchpoints" for VSCode 2018-10-10 02:19:03 +03:00
Ivan Kravets
159cd7c073 Better explanation about overriding settings for board 2018-10-05 01:25:33 +03:00
Ivan Kravets
e83a11d02a More detailed info about debug per board 2018-10-05 00:36:23 +03:00
Ivan Kravets
ba2275fbba Test windows builds on x86 & x64 2018-10-04 20:20:16 +03:00
Ivan Kravets
59a3a7dd55 Minor tweak to docs 2018-10-04 01:51:21 +03:00
Ivan Kravets
0a7d6fb814 Revert back initial white-space for docs tables 2018-10-04 01:46:30 +03:00
Ivan Kravets
94bf067639 Refactor docs for boards 2018-10-04 01:33:15 +03:00
Ivan Kravets
4cd13b9d47 Bump version to 3.6.1a5 2018-10-02 00:12:29 +03:00
Ivan Kravets
34325dbc4c Support in-line comments for multi-line value in platformio.ini 2018-10-02 00:11:41 +03:00
Ivan Kravets
ec9fbca181 Docs: tutorial for ESP32 2018-09-30 23:32:53 +03:00
Ivan Kravets
e9f2334e59 Fix lib test 2018-09-26 15:25:38 +03:00
Ivan Kravets
c10b8633ab Sync docs 2018-09-23 01:47:35 +03:00
Ivan Kravets
18a8b05214 Rename "fixed" to "detached" for LDF 2018-09-21 19:23:08 +03:00
Ivan Kravets
22ceae0149 * Do not re-create ".gitignore" and ".travis.yml" files if they were removed from a project 2018-09-20 14:57:42 +03:00
Ivan Kravets
e6fa8654ad YAPF 2018-09-20 14:55:55 +03:00
Ivan Kravets
24f97ef768 Introduce RISC-V GAP dev/platform 2018-09-20 13:56:42 +03:00
Ivan Kravets
f0a91df2cf Fix incorrect activation commands for PIO Core Installation (Virtual Environment) 2018-09-19 16:12:57 +03:00
Ivan Kravets
a3e3c30d0d Docs: Add "Configuration" group to Library Manager 2018-09-19 16:03:43 +03:00
Ivan Kravets
421694ce0c Sync docs 2018-09-10 22:37:25 +03:00
Ivan Kravets
3c4d978c1c Document URL Handlers for device port monitor // Resolve #1838 2018-09-10 21:20:28 +03:00
Ivan Kravets
e5fc18fddb Sync docs 2018-09-06 22:50:59 +03:00
Ivan Kravets
535048c420 Bump version to 3.6.1a4 2018-09-06 22:26:43 +03:00
Ivan Kravets
b7ac59066f Revert back "PIO Debug (skip Pre-Debug)" debug configuration for VSCode 2018-09-06 22:21:46 +03:00
Ivan Kravets
4b2a63db1f Bump version to 3.6.1a3 2018-09-06 16:35:03 +03:00
Ivan Kravets
a477e8cb23 Default VSCode Debug configuration without Pre-Debug 2018-09-06 16:34:27 +03:00
Ivan Kravets
7108b2fdd4 Introduce "Release" and "Debug" Build Configurations 2018-09-06 14:42:37 +03:00
Ivan Kravets
e6e629d2c5 Bump version to 3.6.1a2 2018-09-06 02:26:14 +03:00
Ivan Kravets
f54d32843a Add "debug" target, update docs for "uploads" option // Resolve #1833 2018-09-06 02:25:28 +03:00
Ivan Kravets
ce47b6f69f Docs: update tutorials 2018-09-06 01:26:08 +03:00
Ivan Kravets
4f0c60edfa Clean cache on PIO Core update 2018-09-02 19:32:45 +03:00
David Hasenfratz
6caa7f30ac Fix typos (#1819) 2018-08-30 20:22:36 +03:00
Ivan Kravets
b43f243f6a Sync docs 2018-08-28 22:08:28 +03:00
Ivan Kravets
abbe30ef97 Docs: Add info about drivers to the tutorial requirements // Resolve #1802 2018-08-27 19:21:12 +03:00
Ivan Kravets
8d1ff91af1 Bump version to 3.6.1a1 2018-08-27 18:56:06 +03:00
Ivan Kravets
78c383eb68 Use Pre-Debug task by its VSCode defination 2018-08-27 18:55:08 +03:00
Ivan Kravets
476a878733 Skip Intel MCS-51 tests for Linux 2018-08-20 20:54:07 +03:00
Ivan Kravets
d109e4756d Initial support for Intel MCS-51 (8051) 2018-08-20 19:37:34 +03:00
Ivan Kravets
d448a0ec5c Switch docs to HTTPS 2018-08-15 19:44:02 +03:00
Ivan Kravets
d009b997bc Auto-dropdown navigation; move "Docs" to the right side 2018-08-15 19:12:41 +03:00
Ivan Kravets
9258763491 Correct docs for SemVer/Deps syntax 2018-08-15 14:38:19 +03:00
Ivan Kravets
79e6df7263 Temporary hook for ReadTheDocs #2971 with a broken "edit" link 2018-08-14 14:36:03 +03:00
Ivan Kravets
4ff013c0fe Improve docs for advanced scripting 2018-08-14 14:24:03 +03:00
Ivan Kravets
71cdc9fe78 Docs: Add compatible platforms, frameworks, and boards per debug tool 2018-08-11 15:34:33 +03:00
Ivan Kravets
e3d17d132a Docs: Improve "Drivers" section for debugging tools 2018-08-10 14:33:36 +03:00
Ivan Kravets
70eedfbeec Merge branch 'release/v3.6.0' 2018-08-06 18:46:23 +03:00
Ivan Kravets
c3d598f488 Merge tag 'v3.6.0' into develop
Bump version to 3.6.0 (issues #1594 #1412 #1462 #1735)
2018-08-06 18:46:23 +03:00
Ivan Kravets
6d5dc60b47 Bump version to 3.6.0 (issues #1594 #1412 #1462 #1735) 2018-08-06 18:43:58 +03:00
Ivan Kravets
129146e82e Bump version to 3.6.0rc1 2018-08-03 21:44:41 +03:00
Ivan Kravets
df923bf17e Add package "lib" folder to LD_PATH 2018-08-03 21:43:40 +03:00
Ivan Kravets
8f19dd50fe Docs: Fix "build_flags" examples with a macro where special chars are used 2018-08-02 17:57:57 +03:00
Ivan Kravets
ab1d1f248c Refactor docs for PIO Unified debug tools 2018-07-30 19:39:31 +03:00
Ivan Kravets
617f51b9ea Improve docs for PIO Unified Debugger 2018-07-30 18:13:04 +03:00
Ivan Kravets
41432d4075 Merge branch 'hotfix/v3.5.5' into develop
* hotfix/v3.5.5:
  Fixed an issue with PIO Remote  when upload process depends on the source code of a project framework
2018-07-30 14:26:35 +03:00
Ivan Kravets
ae964fa729 Fixed an issue with PIO Remote when upload process depends on the source code of a project framework 2018-07-30 14:26:24 +03:00
Ivan Kravets
9a5f9843b9 Bump version to 3.6.0a11 2018-07-28 12:51:52 +03:00
Ivan Kravets
44175f87b1 Merge branch 'hotfix/v3.5.5' into develop
* hotfix/v3.5.5:
  Fix broken unit testing when mbed framework is used
2018-07-28 12:49:33 +03:00
Ivan Kravets
2f2cfc2d84 Fix broken unit testing when mbed framework is used 2018-07-28 12:49:22 +03:00
Ivan Kravets
d9e908fceb Test 3 random examples from each dev/platform 2018-07-27 15:45:51 +03:00
Ivan Kravets
98aa47c885 Drop support for codecov.io 2018-07-27 15:38:46 +03:00
Ivan Kravets
c777b0095d Merge branch 'hotfix/v3.5.5' into develop
* hotfix/v3.5.5:
  Fix an issue when "srcFilter" field in "library.json" breaks a library build // Resolve #1735
  Fix "test_lib" test
2018-07-27 01:27:17 +03:00
Ivan Kravets
9191ea97fe Fix an issue when "srcFilter" field in "library.json" breaks a library build // Resolve #1735 2018-07-27 01:27:05 +03:00
Ivan Kravets
fedf3162f1 Fix "test_lib" test 2018-07-27 00:48:13 +03:00
Ivan Kravets
89fc77d87a Docs: Add Community link in TOP menu 2018-07-26 20:46:43 +03:00
Ivan Kravets
2004c9b079 Fix "test_lib" test 2018-07-26 20:46:21 +03:00
Ivan Kravets
5aaa9cf205 Fix "Home: Internal Store Exception" // Resolve #1756 2018-07-25 22:25:47 +03:00
Ivan Kravets
9b15ec417b Document solution for "ImportError: cannot import name _remove_dead_weakref" 2018-07-25 15:58:47 +03:00
Ivan Kravets
42540d4207 Docs: Fix broken example for "Dynamic build flags" 2018-07-23 22:49:25 +03:00
Ivan Kravets
72bfa4a1e2 Bump version to 3.6.0a10 2018-07-20 13:35:52 +03:00
Ivan Kravets
7b8342cd9a Merge branch 'hotfix/v3.5.5' into develop
* hotfix/v3.5.5:
  Typo fix
2018-07-20 13:35:22 +03:00
Ivan Kravets
a206b2e4fd Typo fix 2018-07-20 13:35:06 +03:00
Ivan Kravets
6e8ce56206 Disable requirements status temporary 2018-07-20 02:10:50 +03:00
Ivan Kravets
7c2c0ba1aa Bump version to 3.6.0a9 2018-07-20 02:09:40 +03:00
Ivan Kravets
f130b5bfb6 Disable requirements status temporary 2018-07-20 02:07:07 +03:00
Ivan Kravets
bf23d85005 Merge branch 'hotfix/v3.5.5' into develop
* hotfix/v3.5.5:
  Add "test_build_project_src" option for PIO Unit Testing
2018-07-20 02:05:01 +03:00
Ivan Kravets
903b41b336 Add "test_build_project_src" option for PIO Unit Testing 2018-07-20 02:04:40 +03:00
Ivan Kravets
ab24ca4ff6 Cleanup 2018-07-20 00:33:24 +03:00
Ivan Kravets
fd8b603910 Update README 2018-07-19 19:00:09 +03:00
Ivan Kravets
e4462d7546 Add Infineon XMC dev/platform // Resolve #471 2018-07-19 16:19:10 +03:00
Ivan Kravets
aa796959c9 Merge branch 'hotfix/v3.5.5' into develop
* hotfix/v3.5.5:
  Fix PIO Unit Testing issue when ``UNIT_TEST`` macro was not set in a build environment
  Append __PLATFORMIO_DEBUG__ macro in debug session
  Add udev rules for J-Link devices
  Disable exec command for Sublime Text debugger

# Conflicts:
#	HISTORY.rst
2018-07-19 15:28:58 +03:00
Ivan Kravets
ff3ce2d69e Fix PIO Unit Testing issue when `UNIT_TEST` macro was not set in a build environment 2018-07-19 15:27:00 +03:00
Ivan Kravets
ff59dcefe0 Append __PLATFORMIO_DEBUG__ macro in debug session 2018-07-19 01:44:12 +03:00
Ivan Kravets
3f2f79ade4 Add udev rules for J-Link devices 2018-07-19 01:43:31 +03:00
Ivan Kravets
bc380714bd Disable exec command for Sublime Text debugger 2018-07-19 01:43:15 +03:00
Ivan Kravets
2ba41cddc4 Disable exec command for Sublime Text debugger 2018-07-17 20:58:20 +03:00
Ivan Kravets
4a14cc686c Add udev rules for J-Link devices 2018-07-17 20:57:41 +03:00
Ivan Kravets
734cb5c7aa Bump version to 3.6.0a8 2018-07-17 16:24:38 +03:00
Ivan Kravets
da89f57046 PIO Home 1.0.0; PIO Plus 1.4.0 2018-07-17 16:24:02 +03:00
Ivan Kravets
4a3b616b0f Sync docs 2018-07-16 17:45:52 +03:00
Ivan Kravets
a14f2d291e Improve checking of package structure after unpacking // Issue #1462 2018-07-15 01:06:59 +03:00
Ivan Kravets
72d260c295 Fix file locking of package installer // Issue #1594 2018-07-15 00:43:12 +03:00
Ivan Kravets
e1578dabac Lock interprocess requests to PlatformIO Package Manager for install/uninstall operations // Resolve #1462 2018-07-14 22:10:56 +03:00
Ivan Kravets
f2c4ba1895 Check item after unpacking only if not symbolic link 2018-07-13 13:10:24 +03:00
Ivan Kravets
695a850979 Bump version to 3.6.0a7 2018-07-13 12:16:44 +03:00
Ivan Kravets
1d7d518ec5 Temporary disable checking of extracted item from archive 2018-07-13 12:15:07 +03:00
Ivan Kravets
44a926b30a Check package structure after unpacking and raise error when antivirus tool blocks PlatformIO package manager // Resolve #1462 2018-07-13 01:54:37 +03:00
Ivan Kravets
735cfbf850 Fix "Cannot uninstall 'pyparsing'" for macOS CI build 2018-07-13 00:25:50 +03:00
Ivan Kravets
6b6c60e82c Fix "Cannot uninstall 'pyparsing'" for macOS CI build 2018-07-13 00:04:16 +03:00
Ivan Kravets
cb7717eaf6 Update docs for creating a custom dev/platform 2018-07-12 20:52:25 +03:00
Ivan Kravets
28a76eb389 Update ISSUE_TEMPLATE.md 2018-07-12 01:26:02 +03:00
Ivan Kravets
0fdfb273c6 Update ISSUE_TEMPLATE.md 2018-07-12 01:23:59 +03:00
Ivan Kravets
eced1c4c2a Update ISSUE_TEMPLATE.md 2018-07-12 01:23:24 +03:00
Ivan Kravets
7d6192b069 Update ISSUE_TEMPLATE.md 2018-07-12 01:22:18 +03:00
Ivan Kravets
23f0ffdfeb Append __PLATFORMIO_DEBUG__ macro in debug session 2018-07-10 15:54:46 +03:00
Ivan Kravets
f7ac71d48e Docs: `projenv` is available only for POST-type scripts 2018-07-10 00:59:01 +03:00
Ivan Kravets
9af715e872 Improve docs for Advanced Scripting section 2018-07-08 18:58:04 +03:00
Ivan Kravets
dae50a32c0 Docs: Info about connection Vbus [CN3-1] to Vcc [CN3-3] of FT2232H Mini-Module 2018-07-07 20:34:34 +03:00
Ivan Kravets
ca37190da4 Update docs for PIO Unified Debugger; add wiring connections and debug probes pictures 2018-07-07 01:35:52 +03:00
Ivan Kravets
7c5c5b5f70 Bump version to 3.6.0a6 2018-07-06 15:26:45 +03:00
Ivan Kravets
83ccf96f36 Merge branch 'hotfix/v3.5.5' into develop
* hotfix/v3.5.5:
  Export extra flash images for IDE
  Update core dependencies
2018-07-06 15:26:01 +03:00
Ivan Kravets
c1f4b729ea Export extra flash images for IDE 2018-07-06 15:25:51 +03:00
Ivan Kravets
1c8ac97073 Update core dependencies 2018-07-06 15:25:26 +03:00
Ivan Kravets
fd88a249b4 Update core dependencies 2018-07-06 15:23:23 +03:00
Ivan Kravets
d8329a6868 ThingForward: First steps with PlatformIO’s Unified Debugger 2018-07-05 18:53:47 +03:00
Ivan Kravets
5c48233259 Docs: Install PIO Core into Virtual Environment 2018-07-05 17:49:27 +03:00
Ivan Kravets
5efe0e4f8c Merge branch 'hotfix/v3.5.5' into develop
* hotfix/v3.5.5:
  Shorten a name for VSCode debug launch configurations
2018-07-04 00:06:54 +03:00
Ivan Kravets
8f88939aa0 Shorten a name for VSCode debug launch configurations 2018-07-04 00:06:44 +03:00
Ivan Kravets
a58535d95c Bump version to 3.6.0a5 2018-07-03 15:15:44 +03:00
Ivan Kravets
a4173f5de1 Merge tag 'v3.5.4' into develop
Bump version to 3.5.4

# Conflicts:
#	HISTORY.rst
#	platformio/__init__.py
2018-07-03 15:14:28 +03:00
Ivan Kravets
dc3973b046 Merge branch 'hotfix/v3.5.4' 2018-07-03 15:11:32 +03:00
Ivan Kravets
7a5af4b180 Bump version to 3.5.4 (issues #1712, #1705, #1023, #1254, #1658, #1054, #1683, #1343, #1665) 2018-07-03 15:10:39 +03:00
Ivan Kravets
25b562e1c1 Added workaround for Python SemVer package's issue 61 with caret range and pre-releases 2018-07-03 14:55:48 +03:00
Ivan Kravets
6dada01e70 Disable macOS frameworks for VSCode IntelliSense 2018-07-03 14:17:38 +03:00
Ivan Kravets
3956dae01e Sync docs 2018-07-02 18:02:00 +03:00
Ivan Kravets
19711d75e0 Merge branch 'hotfix/v3.5.4' into develop
* hotfix/v3.5.4:
  Typo fix
2018-06-30 21:05:40 +03:00
Ivan Kravets
5b1b05cd09 Typo fix 2018-06-30 21:05:20 +03:00
Ivan Kravets
3e0feeabb4 Bump version to 3.6.0a4 2018-06-30 19:48:30 +03:00
Ivan Kravets
e21ac05e71 Merge branch 'hotfix/v3.5.4' into develop
* hotfix/v3.5.4:
  Improve removing of default build flags using `build_unflags` option // Resolve #1712
  Export ``LIBS``, ``LIBPATH``, and ``LINKFLAGS`` data from project dependent libraries to the global build environment
  Replace "env" pattern by "sysenv" in "platformio.ini" // Resolve #1705
2018-06-30 19:36:02 +03:00
Ivan Kravets
4adc73ebe2 Improve removing of default build flags using build_unflags option // Resolve #1712 2018-06-30 19:34:24 +03:00
Ivan Kravets
357e70e5bb Export `LIBS, LIBPATH, and LINKFLAGS` data from project dependent libraries to the global build environment 2018-06-30 18:24:50 +03:00
Ivan Kravets
ca3567df1e Replace "env" pattern by "sysenv" in "platformio.ini" // Resolve #1705 2018-06-29 19:55:29 +03:00
Ivan Kravets
9bd033e288 Update ESP32 boards flash size // Resolve #30 2018-06-29 19:29:32 +03:00
Ivan Kravets
7564e00fc4 Introduce Premium Support 2018-06-29 17:01:13 +03:00
Ivan Kravets
f1a8add795 Merge branch 'hotfix/v3.5.4' into develop
* hotfix/v3.5.4:
  Switch to PIO Plus support
2018-06-28 23:57:38 +03:00
Ivan Kravets
d6ca30a920 Switch to PIO Plus support 2018-06-28 23:57:16 +03:00
Ivan Kravets
c8f6907d02 Merge branch 'hotfix/v3.5.4' into develop
* hotfix/v3.5.4:
  Export PIOCOREPYSITEDIR to system environment
2018-06-26 01:34:37 +03:00
Ivan Kravets
369868624e Export PIOCOREPYSITEDIR to system environment 2018-06-26 01:34:26 +03:00
Ivan Kravets
dfecc04901 Merge branch 'hotfix/v3.5.4' into develop
* hotfix/v3.5.4:
  Export PIOPYSITEDIR to system environment
2018-06-26 01:33:30 +03:00
Ivan Kravets
e9fe2856ec Export PIOPYSITEDIR to system environment 2018-06-26 01:33:12 +03:00
Ivan Kravets
0ba9b341cd Add new Olimex debug tools for ESP32: ARM-USB-OCD and ARM-USB-TINY 2018-06-26 01:01:37 +03:00
Ivan Kravets
9cff2d3206 Sync docs 2018-06-25 15:52:57 +03:00
Ivan Kravets
ab8497e7ce [VIDEO OVERVIEW] ThingForward - Intro to PIO Unified Debugger using ARM mbed OS and PlatformIO IDE for VSCode 2018-06-25 15:47:01 +03:00
Ivan Kravets
f0cd122952 Merge branch 'hotfix/v3.5.4' into develop
* hotfix/v3.5.4:
  Fix broken history
2018-06-25 15:46:32 +03:00
Ivan Kravets
b83acf4297 Fix broken history 2018-06-25 15:46:21 +03:00
Ivan Kravets
89d403879e Docs: Reorder TOP menu 2018-06-23 19:32:58 +03:00
Ivan Kravets
b7ad64226e Merge branch 'hotfix/v3.5.4' into develop
* hotfix/v3.5.4:
  Fix issue when "platformio lib uninstall" removes initial source code // Resolve #1023
2018-06-21 21:34:07 +03:00
Ivan Kravets
2725d8da8b Fix issue when "platformio lib uninstall" removes initial source code // Resolve #1023 2018-06-21 21:33:56 +03:00
Vladimir Dronnikov
08759700b6 Add udev rule for Maple Leaf board (#1699) 2018-06-21 13:45:43 +03:00
Ivan Kravets
7cac351d25 Bump version to 3.6.0a3 2018-06-20 16:33:13 +03:00
Ivan Kravets
f62bde0e38 Merge branch 'hotfix/v3.5.4' into develop
* hotfix/v3.5.4:
  Handle ConfigParser erros
2018-06-20 16:32:48 +03:00
Ivan Kravets
a9577bc0ba Handle ConfigParser erros 2018-06-20 16:32:38 +03:00
Ivan Kravets
ee69c13b2d Handle ConfigParser erros 2018-06-20 16:31:03 +03:00
Ivan Kravets
3c6f57ac5c Custom tasks for VScode 2018-06-20 15:48:14 +03:00
Ivan Kravets
4d48c365f5 Autogenerate examples for platforms and frameworks 2018-06-16 21:11:31 +03:00
Ivan Kravets
a3cda59d70 Refactor docs menu 2018-06-16 18:41:46 +03:00
Ivan Kravets
22b5e4e5c0 Remove examples with cart range for SemVer (issue with dependent Python package) 2018-06-15 17:53:04 +03:00
Ivan Kravets
19844c89c1 Merge branch 'hotfix/v3.5.4' into develop
* hotfix/v3.5.4:
  Fix preprocessor for Arduino sketch when function returns certain type // Resolve #1683
2018-06-15 15:55:30 +03:00
Ivan Kravets
c055ed4850 Fix preprocessor for Arduino sketch when function returns certain type // Resolve #1683 2018-06-15 15:55:17 +03:00
Ivan Kravets
6f905e319f Merge branch 'hotfix/v3.5.4' into develop
* hotfix/v3.5.4:
  Removed "date&time" when processing project with "platformio run" command // Resolve #1343
  Improve documentation for advanced scripting
2018-06-15 15:03:15 +03:00
Ivan Kravets
389783adae Removed "date&time" when processing project with "platformio run" command // Resolve #1343 2018-06-15 14:53:48 +03:00
Ivan Kravets
46a62de14c Improve documentation for advanced scripting 2018-06-15 14:33:05 +03:00
Ivan Kravets
38c74b3f78 Merge branch 'hotfix/v3.5.4' into develop
* hotfix/v3.5.4:
  Handle "architectures" data from "library.properties" manifest in `lib_compat_mode = strict`
2018-06-12 13:47:29 +03:00
Ivan Kravets
92fc308590 Handle "architectures" data from "library.properties" manifest in lib_compat_mode = strict 2018-06-12 13:47:16 +03:00
Ivan Kravets
0f9c213796 Bump version to 3.6.0a2 2018-06-11 22:55:28 +03:00
Ivan Kravets
a6831d9783 Merge branch 'hotfix/v3.5.4' into develop
* hotfix/v3.5.4:
  Isolate build environment for "BuildSources" nodes
  Append a main LD script at the beginning
  Bump version to 3.5.4a1
  Fixed issue with invalid LD script if path contains space
  Don't export ``CPPPATH`` of project dependent libraries to frameworks // Resolve #1665
  YAPF

# Conflicts:
#	HISTORY.rst
#	platformio/__init__.py
#	platformio/builder/tools/pioupload.py
2018-06-11 18:41:39 +03:00
Ivan Kravets
2ba7c47603 Temporary disable raising error when program data size excesses declared in board manfiest 2018-06-10 01:56:48 +03:00
Ivan Kravets
786d505ecb Isolate build environment for "BuildSources" nodes 2018-06-10 01:27:50 +03:00
Ivan Kravets
00c0eaed8a Append a main LD script at the beginning 2018-06-09 14:50:54 +03:00
Ivan Kravets
46c904e67d Bump version to 3.5.4a1 2018-06-09 01:37:38 +03:00
Ivan Kravets
f9fde5d627 Fixed issue with invalid LD script if path contains space 2018-06-09 00:48:42 +03:00
Ivan Kravets
75754a4750 Don't export `CPPPATH` of project dependent libraries to frameworks // Resolve #1665 2018-06-08 21:38:41 +03:00
Ivan Kravets
a584ac1da2 YAPF 2018-06-08 21:37:57 +03:00
Ivan Kravets
2ff88837ec Sync docs 2018-06-08 18:37:05 +03:00
Ivan Kravets
4528ca0365 Bump version to 3.6.0a1 2018-06-04 16:38:53 +03:00
Ivan Kravets
bfc94d36e3 Introduce "Program Memory Usage" 2018-06-04 14:09:48 +03:00
Ivan Kravets
2fb8128791 YAPF 2018-06-02 16:14:58 +03:00
Ivan Kravets
20c1ce40d3 Check maximum allowed firmware size for uploading with "pio run --target checkprogsize" // Resolve #1412 2018-06-01 18:07:47 +03:00
Ivan Kravets
ecaa9d90b3 Continue work on PIO Core 3.5.4 2018-06-01 17:02:49 +03:00
Ivan Kravets
c0b069c920 Merge branch 'release/v3.5.3' 2018-06-01 17:01:46 +03:00
Ivan Kravets
551f0c1514 Merge tag 'v3.5.3' into develop
Bump version to 3.5.3
2018-06-01 17:01:46 +03:00
Ivan Kravets
7db04b1c3f Bump version to 3.5.3 (issue #1641 issue #1612 issue #1473 issue #1528 issue #1546 issue #1282 issue #1516 issue #1381 issue #1474) 2018-06-01 17:01:32 +03:00
Ivan Kravets
32dbf22d44 ESP32: Calculate an absolute path for a custom partitions table 2018-05-31 20:03:03 +03:00
Ivan Kravets
3243a84dba Fix incorrect handling of C/C++ standards passed via build_flags 2018-05-31 16:38:33 +03:00
Ivan Kravets
f465befa68 Rename "Standalone" to "Desktop" for IDE 2018-05-31 16:11:22 +03:00
Ivan Kravets
f4b4f5c434 Improve docs for lib_ignore option 2018-05-31 15:21:39 +03:00
Ivan Kravets
ffc94a88fe Sync dccs 2018-05-31 14:31:28 +03:00
Ivan Kravets
fb29c9c0f6 Support old version of monitor_baud option for device monitor 2018-05-30 20:25:59 +03:00
Ivan Kravets
df437995df Enable C++ exceptions for ESP32 and ESP-IDF 2018-05-30 19:12:30 +03:00
Ivan Kravets
e4440ed94c Bump version to 3.5.3rc2 2018-05-30 14:16:46 +03:00
Ivan Kravets
b213a302e3 Handle _MCU and _F_CPU variables for AVR native // Resolve #1641 2018-05-30 14:14:42 +03:00
Ivan Kravets
d50dfe19d9 Bump version to 3.5.3rc1 2018-05-26 01:04:38 +03:00
Ivan Kravets
9ba5dc0a60 Override any option from board manifest in Project Configuration File "platformio.ini" // Resolve #1612 2018-05-26 01:02:52 +03:00
Ivan Kravets
5011c3e21c API to update BoardConfig manifest 2018-05-25 21:18:08 +03:00
Ivan Kravets
e48e15b014 Fix "memoized" helper when "expire" is not used 2018-05-25 21:13:47 +03:00
Ivan Kravets
357c932a88 Fix broken link 2018-05-25 19:06:23 +03:00
Ivan Kravets
2f07a58e4f Split stable/upstream docs for dev/platforms 2018-05-25 16:17:02 +03:00
Ivan Kravets
4a2594c12e Docs: revert back to 'sphinx' theme for Pygments 2018-05-25 15:31:31 +03:00
Ivan Kravets
8cda6db02d Rename LDF compatibility mode from "light" to "soft" 2018-05-25 01:46:53 +03:00
Ivan Kravets
f7053928f0 Better explanation how to switch between stable/upstream dev/platforms 2018-05-15 17:22:03 -07:00
Ivan Kravets
e22335984f Default debugging configuration for ESP-WROVER-KIT 2018-05-15 15:23:09 -07:00
Ivan Kravets
6a1a1956c8 Docs: add ft2232h as debug tool 2018-05-15 15:13:11 -07:00
Ivan Kravets
98852caefa Bump version to 3.5.3b5 2018-05-15 14:06:12 -07:00
Ivan Kravets
e399c6b363 Fix issue with monitor_speed option 2018-05-15 14:05:29 -07:00
Ivan Kravets
cb2c3b1b63 Bump version to 3.5.3b4 2018-05-14 22:14:10 -07:00
Ivan Kravets
19003ea51b Simplify configuration for PIO Unit Testing 2018-05-14 22:13:42 -07:00
Ivan Kravets
29064b6c63 Rename "monitor_baud" option to "monitor_speed" 2018-05-14 18:53:16 -07:00
Ivan Kravets
ba352454ed RISC-V dev/platform; Debug for ESP32 2018-05-11 20:02:33 +03:00
Ivan Kravets
5ee194b2b9 Document "Debug Level" for ESP32 2018-05-11 03:06:28 +03:00
Ivan Kravets
9d566d8905 Add debugger to env PATH 2018-05-11 03:06:18 +03:00
Ivan Kravets
b310c57136 Document the missed "--project-dir" and "--environment" options for pio device monitor 2018-05-09 15:35:06 +03:00
Ivan Kravets
9aa5f16b49 Update link to PIO Unified Debugger / Community 2018-05-08 21:31:55 +03:00
Ivan Kravets
4fac523811 Bump version to 3.5.3b3 2018-05-08 18:24:13 +03:00
Ivan Kravets
2bb22a86d7 New UI for PIO Unified Debugger and VSCode 2018-05-08 18:23:27 +03:00
Ivan Kravets
39aaae303f Use "debug_init_break" for Eclipse 2018-05-08 18:22:56 +03:00
Ivan Kravets
1310b7b07b Add OTA demo for ESP32 2018-05-05 23:51:59 +03:00
Ivan Kravets
18f6f23271 Fix default OTA port for ESP32 2018-05-05 21:30:18 +03:00
Ivan Kravets
366efacd81 Prepend upload flags instead of "append" 2018-05-05 21:15:50 +03:00
Ivan Kravets
7be1af4241 Do not check if a custom SVD file exists 2018-05-02 12:37:51 +03:00
Ivan Kravets
d0bc40bc24 Update history 2018-05-01 21:16:32 +03:00
Ivan Kravets
73b1d9ccd5 Configure a custom path to SVD file using "debug_svd_path" option 2018-05-01 21:09:32 +03:00
Ivan Kravets
94c27ae30f Update PIO Core deps 2018-05-01 00:58:36 +03:00
Ivan Kravets
b476e298d3 Bump version to 3.5.3b2 2018-04-30 16:50:23 +03:00
Ivan Kravets
c9fa2206ef Normalize SVD path 2018-04-30 12:33:19 +03:00
Ivan Kravets
b1caaa2208 Export path to SVD file for IDEs 2018-04-27 20:37:41 +03:00
Ivan Kravets
f46072f769 Generate beta configuration for the new PIO Debugger for VSCode 2018-04-27 18:01:08 +03:00
Ivan Kravets
7de4d6aeef PyLint fix 2018-04-27 12:57:11 +03:00
Ivan Kravets
31f14274af Ignore idedata event 2018-04-27 01:41:28 +03:00
Ivan Kravets
50c568c232 Fix "RuntimeError: maximum recursion depth exceeded" for library manager // Resolve #1528 2018-04-26 01:49:16 +03:00
Ivan Kravets
3bcc3e07ae Respect a custom "lib_dir" option in platformio.ini // Resolve #1473 2018-04-25 17:15:40 +03:00
Ivan Kravets
2ae169d210 Fix "RuntimeError: maximum recursion depth exceeded" for library manager // Resolve #1528 2018-04-25 16:57:43 +03:00
Ivan Kravets
1c68409a08 Improve support for Black Magic Probe in "uploader" role 2018-04-23 17:30:05 +03:00
Ivan Kravets
f981916f1d Docs: ESP8266 VTables 2018-04-21 01:00:55 +03:00
Ivan Kravets
0a9031e448 Switch to shutil.move instead of os.rename // Resolve #1584 2018-04-20 21:27:52 +03:00
Ivan Kravets
2d1daa756d Docs: Custom lwIP Variant and debug levels for ESP8266 2018-04-20 21:02:52 +03:00
Ivan Kravets
6b6860196a Fix issue with "build_unparse" for string items 2018-04-20 19:10:20 +03:00
Ivan Kravets
ccb63a9ecc Bump version to 3.5.3b1 2018-04-20 15:55:29 +03:00
Ivan Kravets
3ce62fbafe Escape compiler path for Win 2018-04-20 14:00:40 +03:00
Ivan Kravets
b77160d363 Upgrade VSCode CPP manifest to v3 2018-04-20 13:56:04 +03:00
Ivan Kravets
6a04f52620 Add info about "library.json" to README instruction 2018-04-15 06:28:02 +03:00
Ivan Kravets
aa28beddd8 Handle broken JSON files 2018-04-15 06:08:29 +03:00
Ivan Kravets
d0cc3a045e Handle unknown packages when do cleaning // Resolve #1282 2018-04-15 05:48:38 +03:00
Ivan Kravets
02efe4f7f3 Bump version to 3.5.3a9 2018-04-12 18:47:12 -07:00
Ivan Kravets
2c0ca3e437 Update PIO Home and PIO Core+ 2018-04-12 18:46:11 -07:00
Ivan Kravets
aa8de4ff4b Export "docs" from platform manifest 2018-04-12 18:44:38 -07:00
Ivan Kravets
59fe190f20 Better handling of VSCode Terminal IOError 2018-04-05 22:10:28 -07:00
Ivan Kravets
e0fc44aa42 Reinit download session when IOError 2018-04-05 13:08:23 -07:00
Ivan Kravets
e7b5a14e11 Temporary workaround for VSCode's "IOError: PackageManager" issue 2018-04-05 11:06:23 -07:00
Ivan Kravets
0710c094e7 Bump version to 3.5.3a8 2018-04-01 10:31:47 -07:00
Ivan Kravets
1410dd093a Revert back g++ macro from dump list // Issue #1546 2018-04-01 10:28:25 -07:00
Ivan Kravets
d1362c3751 Article: Building a Web Of Things REST-API on an Arduino MKR1000 with PlatformIO 2018-03-27 22:29:26 +03:00
Ivan Kravets
2299383b03 Docs: Update example "J-Link as debugger and uploader" 2018-03-24 15:31:08 +02:00
Ivan Kravets
622e4033c1 Dump g++ macros for IDE 2018-03-24 12:45:00 +02:00
Ivan Kravets
ec9a2b02ea Verify mDNS devices before dumping // Issue #1381 2018-03-23 17:51:45 +02:00
Ivan Kravets
275648a882 Check for non-ASCII chars mDNS service // Issue #1381 2018-03-23 13:54:33 +02:00
Ivan Kravets
5214b32ee3 Print request URL when package fails 2018-03-23 13:50:33 +02:00
Ivan Kravets
c1c2be0b58 Bump version to 3.5.3a7 2018-03-23 12:14:40 +02:00
Ivan Kravets
44fc500c93 Fix issue with incorrect API result 2018-03-23 12:13:59 +02:00
Ivan Kravets
b6d2e1b243 Bump version to 3.5.3a6 2018-03-23 00:09:05 +02:00
Ivan Kravets
d54327f1a9 Refactor @memoized decorator with expiration feature; cache installed boards per platform 2018-03-23 00:08:07 +02:00
Ivan Kravets
0f4ab5b50b Update Unity tool to 2.4.3 2018-03-22 18:18:03 +02:00
Ivan Kravets
ca34da51aa PIO Home: sort folders by name (ignore case), catch ServerError exceptions // Resolve #1454 Resolve #1302 2018-03-22 17:37:51 +02:00
Ivan Kravets
f937eabc1a Handle error connections to the latest news for PIO Home // Resolve #1470 Resolve #1474 Resolve #1478 Resolve #1480 2018-03-22 16:14:11 +02:00
Ivan Kravets
e019341e59 Bump version to 3.5.3a5 2018-03-22 12:11:46 +02:00
Ivan Kravets
50b2bc07dc Add PlatformIO IDE as recommended extension for VSCode's workspace 2018-03-22 12:10:16 +02:00
Ivan Kravets
8f7206b186 Use absolute path for CPP includes when exporting data to IDE 2018-03-22 12:09:36 +02:00
Ivan Kravets
1461953341 Process "unflags" after frameworks 2018-03-21 19:49:45 +02:00
Ivan Kravets
cd3245960b ThingForward, Webinar: Unit Testing for Embedded with PlatformIO and Qt Creator 2018-03-21 19:49:06 +02:00
Ivan Kravets
580c0601cf Add example with POST scripting and changing of build flags in runtime 2018-03-20 19:32:00 +02:00
Ivan Kravets
979a6a80f0 Fix issue which did not allow to override runtime build environment using extra POST script 2018-03-20 19:24:05 +02:00
Ivan Kravets
6f9fac5663 YAPF for test 2018-03-20 16:10:11 +02:00
Ivan Kravets
85730619f4 Ignore ".pytest_cache" from sources 2018-03-20 16:08:22 +02:00
Ivan Kravets
61374f15f1 Fix issue with `build_unflags` option when a macro contains value 2018-03-20 16:06:39 +02:00
Ivan Kravets
ad52f618cf Save data in UTF-8 by default 2018-03-20 01:14:29 +02:00
Ivan Kravets
bbb32607ed Catch UnicodeError when saving content cache 2018-03-20 01:06:05 +02:00
Ivan Kravets
669ef3cc93 Bump version to 3.5.3a4 2018-03-20 00:42:37 +02:00
Ivan Kravets
d47022b8c3 PIO Home: Recent news 2018-03-20 00:42:10 +02:00
Ivan Kravets
c20cd1b464 Do not load automaically JSON from cached resources 2018-03-19 17:16:51 +02:00
Ivan Kravets
3161e5f606 Bump version to 3.5.3a3 2018-03-16 14:13:00 +02:00
Ivan Kravets
233d48fac0 Describe a project with "description" option for "platformio.ini" 2018-03-16 14:12:05 +02:00
Ivan Kravets
218a1dccf6 Fix issue with installing only the one platform package by specified type 2018-03-16 13:33:59 +02:00
Ivan Kravets
02bad10652 Do not show duplicated upload protocols 2018-03-15 20:53:23 +02:00
Ivan Kravets
7495160374 Bump version to 3.5.3a2 2018-03-15 19:53:47 +02:00
Ivan Kravets
3663dc3470 Fix issue with useless project rebuilding for case insensitive file systems (Windows) 2018-03-15 19:53:05 +02:00
Ivan Kravets
d2b34d42f7 Docs: Fix missed "s" for "99-platformio-udev.rules" 2018-03-14 11:36:30 +02:00
Ivan Kravets
b78a151706 Update requirements for PIO Home and PIO Core+ contribs 2018-03-13 22:59:02 +02:00
Ivan Kravets
6a49df7dfe Start a work on PIO Core 3.5.3 2018-03-13 12:02:25 +02:00
Ivan Kravets
f79e2e38ef Merge branch 'release/v3.5.2' 2018-03-13 12:00:28 +02:00
Ivan Kravets
bc323252e8 Bump version to 3.5.2 (issue #1301 issue #1313 issue #1323 issue #1303 issue #1029 issue #1310 issue #1390 issue #1312 issue #1433) 2018-03-13 12:00:01 +02:00
Ivan Kravets
f63a6d73ee Add "include_dir" to known options list // Resolve #1433 2018-03-12 19:22:55 +02:00
Jack
5e6d1d9361 Fix CLion not recognizing includes in lib and .piolibdeps (#1429) 2018-03-12 16:58:08 +02:00
Ivan Kravets
7e875553c2 Allow to control maximum depth of nested includes for conditional PreProcessor 2018-03-08 16:29:09 +02:00
Ivan Kravets
105cd0fa71 Use custom object suffix only for Arduino/Energia frameworks 2018-03-07 23:35:03 +02:00
Ivan Kravets
8676f471f1 Docs: Update url for Sanguino boards 2018-03-07 22:48:21 +02:00
Ivan Kravets
93d524a392 Bump version to 3.5.2rc4 2018-03-07 20:47:09 +02:00
Ivan Kravets
e5b73dcd2b Fix issue when custom board is used with the same ID 2018-03-07 20:46:31 +02:00
Ivan Kravets
ade6c25056 Docs: Arduino Core ESP32 Wiki 2018-03-07 17:01:53 +02:00
Ivan Kravets
e0ce40d6b3 Ability to specify a name for new project 2018-03-07 16:30:21 +02:00
Ivan Kravets
90993ec69f Docs: Show onboard debug tools before 2018-03-07 13:48:59 +02:00
Ivan Kravets
3269d243a8 Firmware memory size explanation: text, data and bss 2018-03-06 12:06:52 +02:00
Ivan Kravets
ef861ed702 Use workspace folder for VSCode CPP tool DB files 2018-03-06 10:38:41 +02:00
Ivan Kravets
b1c9eb9022 Bump version to 3.5.2rc3 2018-03-05 15:51:23 +02:00
Ivan Kravets
3d300414ac Multiple themes (Dark & Light) for PlatformIO Home 2018-03-05 15:50:49 +02:00
Ivan Kravets
3a16ecbaa1 Typo fix 2018-03-04 11:29:03 +02:00
Ivan Kravets
9415b369e1 Bump version to 3.5.2rc2 2018-03-03 14:37:08 +02:00
Ivan Kravets
6ec1890f52 Fix GitHub's "TLSV1_ALERT_PROTOCOL_VERSION" issue when upgrading PIO Core to development version 2018-03-03 14:36:17 +02:00
Ivan Kravets
d1c7f56950 Add example with a custom name for library dependency 2018-03-02 14:40:35 +02:00
Ivan Kravets
2ccb30b0f0 Print VCS version if available in LDF Graph 2018-03-02 14:31:24 +02:00
Ivan Kravets
e95354afeb Bump version to 3.5.2rc1 2018-02-24 01:51:07 +02:00
Ivan Kravets
cfb9ec77ce Keep VSCode CPP DB in workspace 2018-02-24 01:40:49 +02:00
Ivan Kravets
3a52f35fe5 Add "udev" to FAQ 2018-02-20 15:06:35 +02:00
Ivan Kravets
ba0e87b978 Fix issue with mDNS lookup service // Resolve #1310 2018-02-20 14:38:47 +02:00
Ivan Kravets
1cb1af3375 Revert back caching for Travis.CI 2018-02-20 14:29:20 +02:00
Ivan Kravets
7c0cd12f80 Builder: append target suffix to the filename instead of replacing 2018-02-20 01:15:52 +02:00
Ivan Kravets
a3457dfca6 Bump version to 3.5.2b5 2018-02-20 01:11:25 +02:00
Ivan Kravets
cdee242333 Builder: append target suffix to the filename instead of replacing 2018-02-20 01:10:27 +02:00
Ivan Kravets
aa0b6c2071 Sync boards from ST STM32 2018-02-19 20:37:57 +02:00
Ivan Kravets
be306224e3 Document creating of a "Custom build target" 2018-02-17 17:13:30 +02:00
Ivan Kravets
1fce214a6b Bump version to 3.5.2b4 2018-02-16 14:53:00 +02:00
Ivan Kravets
f1f42c6888 Fix issue with PIO Unified Debugger and "debug_load_mode = always" 2018-02-16 14:52:36 +02:00
Ivan Kravets
1d6dcb1c5a Bump version to 3.5.2b3 2018-02-15 19:44:51 +02:00
Ivan Kravets
6b36a29858 Control PIO Unified Debugger and its firmware loading mode using "debug_load_mode" option 2018-02-15 19:44:29 +02:00
Ivan Kravets
18c6fe98ee Search for a library using PIO Library Registry ID 2018-02-15 02:00:12 +02:00
Ivan Kravets
f86885a523 Typo fix 2018-02-13 23:21:19 +02:00
Ivan Kravets
0c2f973412 Mark project source and library directories for CLion IDE // Resolve #1359 Resolve #897 Resolve #1345 2018-02-13 19:24:02 +02:00
Ivan Kravets
591e876660 Move http://platformio.org to https://platformio.org 2018-02-13 01:34:24 +02:00
Ivan Kravets
acefc8d276 Remove debug code 2018-02-13 00:19:07 +02:00
Ivan Kravets
0763a54af3 Add Atmel-ICE debugging tool 2018-02-12 19:32:02 +02:00
Ivan Kravets
d7f7418812 Sync docs with hardware changes 2018-02-12 18:31:31 +02:00
Ivan Kravets
06cce20707 Minor formatting to the system info 2018-02-12 17:27:20 +02:00
Ivan Kravets
b553b8f9df Sort debug tools by name 2018-02-10 18:09:23 +02:00
Ivan Kravets
8736e7bfb0 Bump version to 3.5.2b2 2018-02-10 17:00:45 +02:00
Ivan Kravets
231bd8b294 Implement autodetecting of default debug tool 2018-02-09 21:47:59 +02:00
Ivan Kravets
cc08bb0fd0 Document how to switch between stable release and upstream dev/platform 2018-02-09 14:31:22 +02:00
Ivan Kravets
46cca359e7 Bump version to 3.5.2b1 2018-02-09 01:30:35 +02:00
Ivan Kravets
aac0b29929 Dump only "platform, board and framework" by default when processing environment 2018-02-09 01:23:02 +02:00
Ivan Kravets
f7023aa8ff Print platform information while processing environment 2018-02-07 19:44:02 +02:00
Ivan Kravets
904c5464c3 Add user libraries before built-in (frameworks, toolchains) 2018-02-07 15:28:34 +02:00
Ivan Kravets
31edb2a570 Minor fix 2018-02-06 11:27:44 +02:00
Ivan Kravets
d428d18fae Bump version to 3.5.2a8 2018-02-06 11:26:26 +02:00
Ivan Kravets
e7e80ff152 Fix issue when no environment is specified 2018-02-06 11:25:38 +02:00
Ivan Kravets
1362630ed6 Raise an error when invalid environment name was set for env_default option 2018-02-06 01:54:34 +02:00
Ivan Kravets
e5543b2aee Fix broken RST 2018-02-06 00:36:52 +02:00
Ivan Kravets
53afdc5e02 Fix project generator for Qt Creator IDE // Resolve #1303 Resolve #1323 2018-02-05 21:44:37 +02:00
Ivan Kravets
20641bb4ff Sync docs 2018-02-05 17:30:44 +02:00
Ivan Kravets
33a05fa7ca Update README 2018-02-05 13:16:12 +02:00
Ivan Kravets
f358a4ff57 Ignore unit-testing/calculator from CI test 2018-02-05 12:25:24 +02:00
Ivan Kravets
aa57924488 Fix command:ci test 2018-02-04 01:46:57 +02:00
Ivan Kravets
c5af85f123 Fix issue with multiple OneWire libs and infinite pause 2018-02-04 01:04:53 +02:00
Ivan Kravets
55b8ff7e74 Fold install dev/platform output for CI 2018-02-04 00:08:28 +02:00
Ivan Kravets
8913f1b1ea Don't install desktop dev/platforms 2018-02-03 23:04:46 +02:00
Ivan Kravets
4360ff7463 Use examples from dev/platforms for test 2018-02-03 22:59:41 +02:00
Ivan Kravets
718d1f2de1 Use examples from development platforms 2018-02-03 21:08:45 +02:00
Ivan Kravets
195444b253 Refactor dev/platforms and frameworks docs 2018-02-03 01:34:01 +02:00
Ivan Kravets
17dc5f594f VSCode: Show debug console for each session 2018-02-02 17:51:54 +02:00
Ivan Kravets
3b99dabbf4 Add example with executing of a custom script before upload action 2018-02-01 22:20:26 +02:00
Ivan Kravets
c9e578f977 Do not show debug console by default for VSCode 2018-01-31 01:11:25 +02:00
Ivan Kravets
00782fc624 Cosmetic changes 2018-01-30 23:31:21 +02:00
Ivan Kravets
19d2dfdad0 Bump version to 3.5.2a7 2018-01-30 21:34:10 +02:00
Ivan Kravets
1890162f3f VSCode: add a new "Pre-Debug" task and run it before debugging session 2018-01-30 21:33:56 +02:00
Ivan Kravets
4980d3e4bb Bump version to 3.5.2a6 2018-01-30 15:25:17 +02:00
Ivan Kravets
fc53cb4489 Revert back to SCons 2.0, a lot of issues with non-unicode locales // Issue #895 2018-01-30 15:14:55 +02:00
Ivan Kravets
640aa72cff Minor fix 2018-01-30 14:49:10 +02:00
Ivan Kravets
6235328194 Bump version to 3.5.2a5 2018-01-28 00:28:21 +02:00
Ivan Kravets
332472e84b Save temporary file in unicode for INO2CPP 2018-01-28 00:17:55 +02:00
Ivan Kravets
59fb4b103f Fix issue when project without a specified board can't be uploaded // Resolve #1313 2018-01-27 01:16:37 +02:00
Ivan Kravets
8186aed8d9 Bump version to 3.5.2a4 2018-01-27 00:52:45 +02:00
Ivan Kravets
31700c6bfc Fix issue with detecting media disk when mbed upload protocol is specified 2018-01-27 00:18:32 +02:00
Ivan Kravets
316c2c6e1a Improve calculating of project hash 2018-01-26 22:24:49 +02:00
Ivan Kravets
b6ad672f6a Use SCons "gas" tool instead of "as" 2018-01-26 20:50:33 +02:00
Ivan Kravets
59337c71c1 Upgrad build system to SCons 3.0 // Issue #895 2018-01-26 20:04:43 +02:00
Ivan Kravets
7a40992cc1 Add aliases for LDF compatibility modes 2018-01-26 19:53:07 +02:00
Ivan Kravets
1412f085b8 Minor improvements 2018-01-25 18:12:36 +02:00
Ivan Kravets
6b826abce0 Bump version to 3.5.2a3 2018-01-25 18:03:10 +02:00
Ivan Kravets
f8dafbca80 Show device system information (MCU, Frequency, RAM, Flash, Debugging tools) in a build log 2018-01-25 17:58:52 +02:00
Ivan Kravets
dabe9ba2a7 Show all available upload protocols before firmware uploading 2018-01-25 14:26:15 +02:00
Ivan Kravets
b8fde283fd Use util.get_systype when checking for system 2018-01-24 18:33:41 +02:00
Ivan Kravets
fa738650da Add special prefix for BlackMagic probe for Windows COM ports >= COM10 2018-01-24 17:22:28 +02:00
Ivan Kravets
717a699546 Bump version to 3.5.2a2 2018-01-24 16:10:14 +02:00
Ivan Kravets
f512ccbe68 Fix issue with duplicated "include" records when generating data for IDE // Resolve #1301 2018-01-24 15:53:28 +02:00
Ivan Kravets
de523493b2 Improve support for old mbed libraries without manifest 2018-01-24 14:56:44 +02:00
Ivan Kravets
c0b277d9c8 Handle "os.mbed.com" URL as Mercurial (hg) repository 2018-01-24 14:56:15 +02:00
Ivan Kravets
e615e7529e Fix issue with downloader when dependency URL ends with "/" 2018-01-24 14:34:08 +02:00
Ivan Kravets
86667c5664 Bump version to 3.5.2a1 2018-01-18 22:13:36 +02:00
Ivan Kravets
dcb299e9b9 Use dynamic "build_dir" when checking project for structure chnages 2018-01-18 22:12:32 +02:00
Ivan Kravets
2b4b2eb571 Pass a list iterator directly to "any" or "all" functions 2018-01-18 22:04:43 +02:00
Ivan Kravets
3caa2a9e8d Merge branch 'release/v3.5.1' into develop 2018-01-18 15:12:25 +02:00
Ivan Kravets
0b5769dc57 Merge branch 'release/v3.5.1' 2018-01-18 15:12:24 +02:00
Ivan Kravets
9b9b05439b Bump version to 3.5.1 (issue #1273 issue #1280 issue #1286 issue #1247 issue #1284 issue #1299 issue #1290) 2018-01-18 15:11:38 +02:00
Ivan Kravets
93d4e68378 Bump docs to 3.5.1 2018-01-18 15:04:15 +02:00
Ivan Kravets
2c79de971e Show full library version in "Library Dependency Graph" including VCS information // Issue #1274 2018-01-18 14:49:01 +02:00
Ivan Kravets
bc18941eb0 Fix project generator for Qt Creator IDE // Resolve #1299 Resolve #1290 2018-01-18 01:30:39 +02:00
Ivan Kravets
23ecce297a Update docs for custom slash size for ESP8266 2018-01-17 13:18:14 +02:00
Ivan Kravets
cc646b19bf Extend example with a custom program name using extra scripting 2018-01-17 01:00:09 +02:00
Ivan Kravets
4b08dbd602 Bump version to 3.5.1a7 2018-01-16 13:40:59 +02:00
Ivan Kravets
d822334fdd Drop "python-dateutil" dependency, implement light-weight "parse_date" 2018-01-16 00:57:06 +02:00
Ivan Kravets
3289b36450 Refactore code without "arrow" dependency (resolve issue with "ImportError: No module named backports.functools_lru_cache") 2018-01-16 00:06:24 +02:00
Ivan Kravets
affd53eb27 Use "python_requires" for setuptools and depend on Python 2.7+ <3 2018-01-15 23:02:40 +02:00
Ivan Kravets
06a6822173 Minor fix to history 2018-01-13 19:45:54 +02:00
Ivan Kravets
6380d6c3ea Bump version to 3.5.1a6 2018-01-13 19:44:55 +02:00
Ivan Kravets
24f314d73d Improve a work in off-line mode 2018-01-13 19:44:05 +02:00
Ivan Kravets
6cddaf9eb7 Ignore VSCode launch.json for VCS 2018-01-13 19:01:27 +02:00
Ivan Kravets
ec419f3d0e Refactor CMD:LIB tests 2018-01-13 17:02:08 +02:00
Ivan Kravets
a6c84da83a Check cached API result before Internet 2018-01-13 01:21:53 +02:00
Ivan Kravets
7cad113f0a Cleanup tests 2018-01-13 01:19:41 +02:00
Ivan Kravets
712155243c Add "lib" and ".piolibdeps" to CLion project index 2018-01-12 14:20:53 +02:00
Ivan Kravets
2091a33fb9 Show full version of the current interpreter 2018-01-12 02:23:55 +02:00
Ivan Kravets
1d5245edbd Add "lib_ldf_mode = chain+" for example with C/C++ Preprocessor conditional syntax 2018-01-11 14:14:35 +02:00
Ivan Kravets
cfb22f2a36 Add FAQ:Package Manager for PackageInstallError 2018-01-11 02:10:51 +02:00
Ivan Kravets
16eb41b84e Document "[Error 5] Access is denied" for Package Manager 2018-01-11 02:02:59 +02:00
Ivan Kravets
ae38d17b7f Ignore packages with TMP_FOLDER_PREFIX 2018-01-10 15:23:56 +02:00
Ivan Kravets
7bbb850c2f Bump version to 3.5.1a5 2018-01-10 03:33:51 +02:00
Ivan Kravets
fda439841e Restore PY2/3 ConfigParser 2018-01-10 03:07:17 +02:00
Ivan Kravets
c558584640 Fix importing of ConfigParser 2018-01-10 02:58:50 +02:00
Ivan Kravets
cfb04b31a4 Bump version to 3.5.1a4 2018-01-10 02:12:26 +02:00
Ivan Kravets
1090c414f5 Update PIO Core+ and PIO Home // Resolve #1247 Resolve #1280 Resolve #1284 Resolve #1286 2018-01-10 02:06:05 +02:00
Ivan Kravets
5b64bf1f7c Set default build environment for each example 2018-01-09 22:26:33 +02:00
Ivan Kravets
61eb989edd Fix project generator for CLion // Issue #1287 2018-01-09 22:15:03 +02:00
Dmitry Bolotin
23ae8e0d3e Additional fix to 7354515 (#1287)
* Additional fix to 7354515

* Update CMakeListsPrivate.txt.tpl

Also perform `replace('"', '\\"')`
2018-01-09 22:07:57 +02:00
Ivan Kravets
e4f8a1877c Fix library updates when a version is declared in VCS format (not SemVer) 2018-01-09 21:56:21 +02:00
Ivan Kravets
61872dd734 Improve support of PIO Unified Debugger for Eclipse Oxygen 2018-01-06 01:04:43 +02:00
Ivan Kravets
16b307d1b3 Sync docs 2018-01-05 19:08:16 +02:00
Ivan Kravets
a4770a27f4 Bump version to 3.5.1a3 2018-01-05 00:18:08 +02:00
Ivan Kravets
ba858989f2 Fix PIO Unified Debugger for mbed framework 2018-01-05 00:17:45 +02:00
Ivan Kravets
93c055a2ec Bump version to 3.5.1a2 2018-01-04 17:14:04 +02:00
Ivan Kravets
2b3bc05f2b Option which allows to specify custom test_speed // Resolve #1273 2018-01-04 16:14:56 +02:00
Ivan Kravets
5260217537 Sync docs with new boards 2018-01-03 19:46:44 +02:00
Ivan Kravets
62235ef32d Sync examples 2018-01-03 18:59:41 +02:00
Ivan Kravets
ec40dcada7 Custom firmware/program name in build directory 2018-01-03 18:35:25 +02:00
Ivan Kravets
5e666492c3 Rename envs_dir option to build_dir in platformio.ini 2018-01-03 15:47:02 +02:00
Ivan Kravets
82246a837e Change wording 2017-12-31 00:07:12 +02:00
Ivan Kravets
2758e99295 Bump version to 3.5.1a1 2017-12-29 20:51:20 +02:00
Ivan Kravets
7354515845 Fix project generator for CLion IDE 2017-12-29 20:50:17 +02:00
Ivan Kravets
d58c392930 Typo fix in docs 2017-12-28 20:38:08 +02:00
Ivan Kravets
86cb2efd64 Sync docs 2017-12-28 20:36:57 +02:00
Ivan Kravets
b307855207 Fix typo in docs 2017-12-28 20:35:24 +02:00
Ivan Kravets
3ad4ff02e8 Typo fix 2017-12-28 18:57:17 +02:00
Ivan Kravets
0082dc43a3 Merge branch 'release/v3.5.0' into develop 2017-12-28 17:26:12 +02:00
90 changed files with 4271 additions and 2213 deletions

View File

@@ -1,12 +1,18 @@
build: off
environment:
platform:
- x86
- x64
environment:
matrix:
- TOXENV: "py27"
install:
- cmd: git submodule update --init --recursive
- cmd: SET PATH=%PATH%;C:\Python27\Scripts;C:\MinGW\bin
- cmd: SET PATH=C:\MinGW\bin;%PATH%
- if %PLATFORM% == x64 SET PATH=C:\Python27-x64;C:\Python27-x64\Scripts;%PATH%
- if %PLATFORM% == x86 SET PATH=C:\Python27;C:\Python27\Scripts;%PATH%
- cmd: pip install tox
test_script:

View File

@@ -1,22 +1,28 @@
What kind of issue is this?
- [ ] Question. This issue tracker is not the place for questions. If you want to ask how to do
something, or to understand why something isn't working the way you expect it to, use
our Community Forums https://community.platformio.org
- [ ] **Question**.
This issue tracker is not the place for questions. If you want to ask how to do something,
or to understand why something isn't working the way you expect it to,
use [Community Forums](https://community.platformio.org) or [Premium Support](https://platformio.org/support)
- [ ] PlatformIO IDE. All issues related to PlatformIO IDE should be reported to appropriate repository
https://github.com/platformio/platformio-atom-ide/issues
- [ ] **PlatformIO IDE**.
All issues related to PlatformIO IDE should be reported to appropriate repository:
[PlatformIO IDE for Atom](https://github.com/platformio/platformio-atom-ide/issues) or
[PlatformIO IDE for VSCode](https://github.com/platformio/platformio-vscode-ide/issues)
- [ ] Development Platform or Board. All issues related to Development Platforms or Embedded Boards
should be reported to appropriate repository.
See full list with repositories and search for "platform-xxx" repository related to your hardware
https://github.com/platformio?query=platform-
- [ ] **Development Platform or Board**.
All issues (building, uploading, adding new boards, etc.) related to PlatformIO development platforms
should be reported to appropriate repository related to your hardware
https://github.com/topics/platformio-platform
- [ ] Feature Request. Start by telling us what problem youre trying to solve. Often a solution
- [ ] **Feature Request**.
Start by telling us what problem youre trying to solve. Often a solution
already exists! Dont send pull requests to implement new features without first getting our
support. Sometimes we leave features out on purpose to keep the project small.
- [ ] PlatformIO Core. If youve found a bug, please provide an information below.
- [ ] **PlatformIO Core**.
If youve found a bug, please provide an information below.
*You can erase any parts of this template not applicable to your Issue.*

1
.gitignore vendored
View File

@@ -9,3 +9,4 @@ build
coverage.xml
.coverage
htmlcov
.pytest_cache

View File

@@ -1,3 +1,3 @@
[settings]
line_length=79
known_third_party=arrow,bottle,click,lockfile,pytest,requests,SCons,semantic_version,serial
known_third_party=bottle,click,pytest,requests,SCons,semantic_version,serial

View File

@@ -20,4 +20,4 @@ confidence=
# --disable=W"
# disable=import-star-module-level,old-octal-literal,oct-method,print-statement,unpacking-in-except,parameter-unpacking,backtick,old-raise-syntax,old-ne-operator,long-suffix,dict-view-method,dict-iter-method,metaclass-assignment,next-method-called,raising-string,indexing-exception,raw_input-builtin,long-builtin,file-builtin,execfile-builtin,coerce-builtin,cmp-builtin,buffer-builtin,basestring-builtin,apply-builtin,filter-builtin-not-iterating,using-cmp-argument,useless-suppression,range-builtin-not-iterating,suppressed-message,no-absolute-import,old-division,cmp-method,reload-builtin,zip-builtin-not-iterating,intern-builtin,unichr-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,input-builtin,round-builtin,hex-method,nonzero-method,map-builtin-not-iterating
disable=locally-disabled,missing-docstring,invalid-name,too-few-public-methods,redefined-variable-type,import-error,similarities,unsupported-membership-test,unsubscriptable-object,ungrouped-imports,cyclic-import
disable=locally-disabled,missing-docstring,invalid-name,too-few-public-methods,redefined-variable-type,import-error,similarities,unsupported-membership-test,unsubscriptable-object,ungrouped-imports,cyclic-import,superfluous-parens

View File

@@ -21,7 +21,7 @@ matrix:
install:
- git submodule update --init --recursive
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then curl -fsSL https://bootstrap.pypa.io/get-pip.py | sudo python; fi
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then sudo pip install tox; else pip install -U tox; fi
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then sudo pip install "tox==3.0.0"; else pip install -U tox; fi
# ChipKIT issue: install 32-bit support for GCC PIC32
- if [[ "$TOX_ENV" == "py27" ]] && [[ "$TRAVIS_OS_NAME" == "linux" ]]; then sudo apt-get install libc6-i386; fi
@@ -29,10 +29,6 @@ install:
script:
- tox -e $TOX_ENV
after_success:
- if [[ "$TOX_ENV" == "py27" ]] && [[ "$TRAVIS_OS_NAME" == "linux" ]]; then tox -e coverage; fi
- if [[ "$TOX_ENV" == "py27" ]] && [[ "$TRAVIS_OS_NAME" == "linux" ]]; then bash <(curl -s https://codecov.io/bash); fi
notifications:
email: false

File diff suppressed because it is too large Load Diff

View File

@@ -10,7 +10,7 @@ yapf:
yapf --recursive --in-place platformio/
test:
py.test -v -s tests --ignore tests/test_examples.py --ignore tests/test_pkgmanifest.py
py.test -v -s -n 3 --dist=loadscope tests --ignore tests/test_examples.py --ignore tests/test_pkgmanifest.py
before-commit: isort yapf lint test

View File

@@ -7,9 +7,6 @@ PlatformIO
.. image:: https://ci.appveyor.com/api/projects/status/unnpw0n3c5k14btn/branch/develop?svg=true
:target: https://ci.appveyor.com/project/ivankravets/platformio-core
:alt: AppVeyor.CI Build Status
.. image:: https://requires.io/github/platformio/platformio-core/requirements.svg?branch=develop
:target: https://requires.io/github/platformio/platformio-core/requirements/?branch=develop
:alt: Requirements Status
.. image:: https://img.shields.io/pypi/v/platformio.svg
:target: https://pypi.python.org/pypi/platformio/
:alt: Latest Version
@@ -18,101 +15,110 @@ PlatformIO
:alt: License
.. image:: https://img.shields.io/PlatformIO/Community.png
:alt: Community Forums
:target: https://community.platformio.org
.. image:: https://img.shields.io/PlatformIO/Plus.png?color=orange
:alt: PlatformIO Plus: Professional solutions for an awesome open source PlatformIO ecosystem
:target: https://pioplus.com
:target: https://community.platformio.org?utm_source=github&utm_medium=core
.. image:: https://img.shields.io/PIO/Plus.png?color=orange
:alt: PIO Plus: Professional solutions for an awesome open source PlatformIO ecosystem
:target: https://platformio.org/pricing?utm_source=github&utm_medium=core
**Quick Links:** `Home Page <http://platformio.org>`_ |
`PlatformIO Plus <https://pioplus.com>`_ |
`PlatformIO IDE <http://platformio.org/platformio-ide>`_ |
**Quick Links:** `Web <https://platformio.org?utm_source=github&utm_medium=core>`_ |
`PIO Plus <https://platformio.org/pricing?utm_source=github&utm_medium=core>`_ |
`PlatformIO IDE <https://platformio.org/platformio-ide?utm_source=github&utm_medium=core>`_ |
`Project Examples <https://github.com/platformio/platformio-examples/>`_ |
`Docs <http://docs.platformio.org>`_ |
`Donate <http://platformio.org/donate>`_ |
`Contact Us <https://pioplus.com/contact.html>`_
`Docs <https://docs.platformio.org?utm_source=github&utm_medium=core>`_ |
`Donate <https://platformio.org/donate?utm_source=github&utm_medium=core>`_ |
`Contact Us <https://platformio.org/contact?utm_source=github&utm_medium=core>`_
**Social:** `Twitter <https://twitter.com/PlatformIO_Org>`_ |
`LinkedIn <https://www.linkedin.com/company/platformio/>`_ |
`Facebook <https://www.facebook.com/platformio>`_ |
`Hackaday <https://hackaday.io/project/7980-platformio>`_ |
`Bintray <https://bintray.com/platformio>`_ |
`Community <https://community.platformio.org>`_
`Community <https://community.platformio.org?utm_source=github&utm_medium=core>`_
.. image:: https://raw.githubusercontent.com/platformio/platformio-web/develop/app/images/platformio-ide-laptop.png
:target: http://platformio.org
:target: https://platformio.org?utm_source=github&utm_medium=core
`PlatformIO <http://platformio.org>`_ is an open source ecosystem for IoT
`PlatformIO <https://platformio.org?utm_source=github&utm_medium=core>`_ is an open source ecosystem for IoT
development. Cross-platform IDE and unified debugger. Remote unit testing and
firmware updates.
Get Started
-----------
* `What is PlatformIO? <http://docs.platformio.org/page/what-is-platformio.html>`_
* `What is PlatformIO? <https://docs.platformio.org/en/latest/what-is-platformio.html?utm_source=github&utm_medium=core>`_
Products
Open Source
-----------
* `PlatformIO IDE <https://platformio.org/platformio-ide?utm_source=github&utm_medium=core>`_
* `PlatformIO Core (CLI) <https://docs.platformio.org/en/latest/core.html?utm_source=github&utm_medium=core>`_
* `Library Management <https://docs.platformio.org/page/librarymanager/index.html?utm_source=github&utm_medium=core>`_
* `Project Examples <https://github.com/platformio/platformio-examples?utm_source=github&utm_medium=core>`_
* `Desktop IDEs Integration <https://docs.platformio.org/page/ide.html?utm_source=github&utm_medium=core>`_
* `Continuous Integration <https://docs.platformio.org/page/ci/index.html?utm_source=github&utm_medium=core>`_
* `Advanced Scripting API <https://docs.platformio.org/page/projectconf/advanced_scripting.html?utm_source=github&utm_medium=core>`_
PIO Plus
--------
* `PlatformIO IDE <http://platformio.org/platformio-ide>`_
* `PlatformIO Core <http://docs.platformio.org/page/core.html>`_
* `PIO Remote™ <http://docs.platformio.org/page/plus/pio-remote.html>`_
* `PIO Unified Debugger <http://docs.platformio.org/page/plus/debugging.html>`_
* `PIO Unit Testing <http://docs.platformio.org/page/plus/unit-testing.html>`_
* `PIO Delivery™ <http://platformio.org/pricing#solution-pio-delivery>`_
* `Cloud Builder <http://platformio.org/pricing#solution-cloud-builder>`_
* `PIO Remote <https://docs.platformio.org/page/plus/pio-remote.html?utm_source=github&utm_medium=core>`_
* `PIO Unified Debugger <https://docs.platformio.org/page/plus/debugging.html?utm_source=github&utm_medium=core>`_
* `PIO Unit Testing <https://docs.platformio.org/en/latest/plus/unit-testing.html?utm_source=github&utm_medium=core>`_
* `Cloud IDEs Integration <https://docs.platformio.org/en/latest/ide.html?utm_source=github&utm_medium=core#solution-pio-delivery>`_
* `Integration Services <https://platformio.org/pricing?utm_source=github&utm_medium=core#enterprise-features>`_
Registry
--------
* `Libraries <http://platformio.org/lib>`_
* `Development Platforms <http://platformio.org/platforms>`_
* `Frameworks <http://platformio.org/frameworks>`_
* `Embedded Boards <http://platformio.org/boards>`_
Solutions
---------
* `Library Manager <http://docs.platformio.org/page/librarymanager/index.html>`_
* `Cloud IDEs Integration <http://platformio.org/pricing#solution-cloud-ide>`_
* `Standalone IDEs Integration <http://docs.platformio.org/page/ide.html#other-ide>`_
* `Continuous Integration <http://docs.platformio.org/page/ci/index.html>`_
* `Libraries <https://platformio.org/lib?utm_source=github&utm_medium=core>`_
* `Development Platforms <https://platformio.org/platforms?utm_source=github&utm_medium=core>`_
* `Frameworks <https://platformio.org/frameworks?utm_source=github&utm_medium=core>`_
* `Embedded Boards <https://platformio.org/boards?utm_source=github&utm_medium=core>`_
Development Platforms
---------------------
* `Atmel AVR <http://platformio.org/platforms/atmelavr>`_
* `Atmel SAM <http://platformio.org/platforms/atmelsam>`_
* `Espressif 32 <http://platformio.org/platforms/espressif32>`_
* `Espressif 8266 <http://platformio.org/platforms/espressif8266>`_
* `Freescale Kinetis <http://platformio.org/platforms/freescalekinetis>`_
* `Intel ARC32 <http://platformio.org/platforms/intel_arc32>`_
* `Lattice iCE40 <http://platformio.org/platforms/lattice_ice40>`_
* `Maxim 32 <http://platformio.org/platforms/maxim32>`_
* `Microchip PIC32 <http://platformio.org/platforms/microchippic32>`_
* `Nordic nRF51 <http://platformio.org/platforms/nordicnrf51>`_
* `Nordic nRF52 <http://platformio.org/platforms/nordicnrf52>`_
* `NXP LPC <http://platformio.org/platforms/nxplpc>`_
* `Silicon Labs EFM32 <http://platformio.org/platforms/siliconlabsefm32>`_
* `ST STM32 <http://platformio.org/platforms/ststm32>`_
* `Teensy <http://platformio.org/platforms/teensy>`_
* `TI MSP430 <http://platformio.org/platforms/timsp430>`_
* `TI Tiva <http://platformio.org/platforms/titiva>`_
* `WIZNet W7500 <http://platformio.org/platforms/wiznet7500>`_
* `Atmel AVR <https://platformio.org/platforms/atmelavr?utm_source=github&utm_medium=core>`_
* `Atmel SAM <https://platformio.org/platforms/atmelsam?utm_source=github&utm_medium=core>`_
* `Espressif 32 <https://platformio.org/platforms/espressif32?utm_source=github&utm_medium=core>`_
* `Espressif 8266 <https://platformio.org/platforms/espressif8266?utm_source=github&utm_medium=core>`_
* `Freescale Kinetis <https://platformio.org/platforms/freescalekinetis?utm_source=github&utm_medium=core>`_
* `Infineon XMC <https://platformio.org/platforms/infineonxmc?utm_source=github&utm_medium=core>`_
* `Intel ARC32 <https://platformio.org/platforms/intel_arc32?utm_source=github&utm_medium=core>`_
* `Intel MCS-51 (8051) <https://platformio.org/platforms/intel_mcs51?utm_source=github&utm_medium=core>`_
* `Lattice iCE40 <https://platformio.org/platforms/lattice_ice40?utm_source=github&utm_medium=core>`_
* `Maxim 32 <https://platformio.org/platforms/maxim32?utm_source=github&utm_medium=core>`_
* `Microchip PIC32 <https://platformio.org/platforms/microchippic32?utm_source=github&utm_medium=core>`_
* `Nordic nRF51 <https://platformio.org/platforms/nordicnrf51?utm_source=github&utm_medium=core>`_
* `Nordic nRF52 <https://platformio.org/platforms/nordicnrf52?utm_source=github&utm_medium=core>`_
* `NXP LPC <https://platformio.org/platforms/nxplpc?utm_source=github&utm_medium=core>`_
* `RISC-V <https://platformio.org/platforms/riscv?utm_source=github&utm_medium=core>`_
* `Samsung ARTIK <https://platformio.org/platforms/samsung_artik?utm_source=github&utm_medium=core>`_
* `Silicon Labs EFM32 <https://platformio.org/platforms/siliconlabsefm32?utm_source=github&utm_medium=core>`_
* `ST STM32 <https://platformio.org/platforms/ststm32?utm_source=github&utm_medium=core>`_
* `Teensy <https://platformio.org/platforms/teensy?utm_source=github&utm_medium=core>`_
* `TI MSP430 <https://platformio.org/platforms/timsp430?utm_source=github&utm_medium=core>`_
* `TI Tiva <https://platformio.org/platforms/titiva?utm_source=github&utm_medium=core>`_
* `WIZNet W7500 <https://platformio.org/platforms/wiznet7500?utm_source=github&utm_medium=core>`_
Frameworks
----------
* `Arduino <http://platformio.org/frameworks/arduino>`_
* `ARTIK SDK <http://platformio.org/frameworks/artik-sdk>`_
* `CMSIS <http://platformio.org/frameworks/cmsis>`_
* `Energia <http://platformio.org/frameworks/energia>`_
* `ESP-IDF <http://platformio.org/frameworks/espidf>`_
* `libOpenCM3 <http://platformio.org/frameworks/libopencm3>`_
* `mbed <http://platformio.org/frameworks/mbed>`_
* `Pumbaa <http://platformio.org/frameworks/pumbaa>`_
* `Simba <http://platformio.org/frameworks/simba>`_
* `SPL <http://platformio.org/frameworks/spl>`_
* `STM32Cube <http://platformio.org/frameworks/stm32cube>`_
* `WiringPi <http://platformio.org/frameworks/wiringpi>`_
* `Arduino <https://platformio.org/frameworks/arduino?utm_source=github&utm_medium=core>`_
* `ARTIK SDK <https://platformio.org/frameworks/artik-sdk?utm_source=github&utm_medium=core>`_
* `CMSIS <https://platformio.org/frameworks/cmsis?utm_source=github&utm_medium=core>`_
* `Energia <https://platformio.org/frameworks/energia?utm_source=github&utm_medium=core>`_
* `ESP-IDF <https://platformio.org/frameworks/espidf?utm_source=github&utm_medium=core>`_
* `ESP8266 Non-OS SDK <https://platformio.org/frameworks/esp8266-nonos-sdk?utm_source=github&utm_medium=core>`_
* `ESP8266 RTOS SDK <https://platformio.org/frameworks/esp8266-rtos-sdk?utm_source=github&utm_medium=core>`_
* `libOpenCM3 <https://platformio.org/frameworks/libopencm3?utm_source=github&utm_medium=core>`_
* `mbed <https://platformio.org/frameworks/mbed?utm_source=github&utm_medium=core>`_
* `Pumbaa <https://platformio.org/frameworks/pumbaa?utm_source=github&utm_medium=core>`_
* `Simba <https://platformio.org/frameworks/simba?utm_source=github&utm_medium=core>`_
* `SPL <https://platformio.org/frameworks/spl?utm_source=github&utm_medium=core>`_
* `STM32Cube <https://platformio.org/frameworks/stm32cube?utm_source=github&utm_medium=core>`_
* `Tizen RT <https://platformio.org/frameworks/tizenrt?utm_source=github&utm_medium=core>`_
* `WiringPi <https://platformio.org/frameworks/wiringpi?utm_source=github&utm_medium=core>`_
Contributing
------------

2
docs

Submodule docs updated: c76ccaf337...768ccfd2b4

View File

@@ -14,18 +14,20 @@
import sys
VERSION = (3, 5, 0)
VERSION = (3, 6, 5)
__version__ = ".".join([str(s) for s in VERSION])
__title__ = "platformio"
__description__ = ("An open source ecosystem for IoT development. "
"Cross-platform build system and library manager. "
"Continuous and IDE integration. "
"Arduino, ESP8266 and ARM mbed compatible")
__url__ = "http://platformio.org"
__description__ = (
"An open source ecosystem for IoT development. "
"Cross-platform IDE and unified debugger. "
"Remote unit testing and firmware updates. "
"Arduino, ARM mbed, Espressif (ESP8266/ESP32), STM32, PIC32, nRF51/nRF52, "
"FPGA, CMSIS, SPL, AVR, Samsung ARTIK, libOpenCM3")
__url__ = "https://platformio.org"
__author__ = "Ivan Kravets"
__email__ = "me@ikravets.com"
__author__ = "PlatformIO"
__email__ = "contact@platformio.org"
__license__ = "Apache Software License"
__copyright__ = "Copyright 2014-present PlatformIO"
@@ -36,5 +38,5 @@ if sys.version_info < (2, 7, 0) or sys.version_info >= (3, 0, 0):
msg = ("PlatformIO Core v%s does not run under Python version %s.\n"
"Minimum supported version is 2.7, please upgrade Python.\n"
"Python 3 is not yet supported.\n")
sys.stderr.write(msg % (__version__, sys.version.split()[0]))
sys.stderr.write(msg % (__version__, sys.version))
sys.exit(1)

View File

@@ -107,8 +107,8 @@ def configure():
try:
click_echo_origin[origin](*args, **kwargs)
except IOError:
(sys.stderr.write if kwargs.get("err") else
sys.stdout.write)("%s\n" % (args[0] if args else ""))
(sys.stderr.write if kwargs.get("err") else sys.stdout.write)(
"%s\n" % (args[0] if args else ""))
click.echo = lambda *args, **kwargs: _safe_echo(0, *args, **kwargs)
click.secho = lambda *args, **kwargs: _safe_echo(1, *args, **kwargs)
@@ -135,7 +135,7 @@ An unexpected error occurred. Further steps:
`pip install -U platformio` command
* Try to find answer in FAQ Troubleshooting section
http://docs.platformio.org/page/faq.html
https://docs.platformio.org/page/faq.html
* Report this problem to the developers
https://github.com/platformio/platformio-core/issues

View File

@@ -12,19 +12,19 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import codecs
import hashlib
import json
import os
import uuid
from copy import deepcopy
from os import environ, getenv, listdir, remove
from os.path import abspath, dirname, expanduser, getmtime, isdir, isfile, join
from os.path import abspath, dirname, expanduser, isdir, isfile, join
from time import time
import requests
from lockfile import LockFailed, LockFile
from platformio import __version__, exception, util
from platformio import exception, lockfile, util
def projects_dir_validate(projects_dir):
@@ -63,7 +63,7 @@ DEFAULT_SETTINGS = {
},
"enable_telemetry": {
"description":
("Telemetry service <http://docs.platformio.org/page/"
("Telemetry service <https://docs.platformio.org/page/"
"userguide/cmd_settings.html?#enable-telemetry> (Yes/No)"),
"value":
True
@@ -106,11 +106,8 @@ class State(object):
def __exit__(self, type_, value, traceback):
if self._prev_state != self._state:
try:
with open(self.path, "w") as fp:
if "dev" in __version__:
json.dump(self._state, fp, indent=4)
else:
json.dump(self._state, fp)
with codecs.open(self.path, "w", encoding="utf8") as fp:
json.dump(self._state, fp)
except IOError:
raise exception.HomeDirPermissionsError(util.get_home_dir())
self._unlock_state_file()
@@ -118,21 +115,19 @@ class State(object):
def _lock_state_file(self):
if not self.lock:
return
self._lockfile = LockFile(self.path)
if self._lockfile.is_locked() and \
(time() - getmtime(self._lockfile.lock_file)) > 10:
self._lockfile.break_lock()
self._lockfile = lockfile.LockFile(self.path)
try:
self._lockfile.acquire()
except LockFailed:
except IOError:
raise exception.HomeDirPermissionsError(dirname(self.path))
def _unlock_state_file(self):
if self._lockfile:
if hasattr(self, "_lockfile") and self._lockfile:
self._lockfile.release()
def __del__(self):
self._unlock_state_file()
class ContentCache(object):
@@ -141,7 +136,7 @@ class ContentCache(object):
self._db_path = None
self._lockfile = None
self.cache_dir = cache_dir or join(util.get_home_dir(), ".cache")
self.cache_dir = cache_dir or util.get_cache_dir()
self._db_path = join(self.cache_dir, "db.data")
def __enter__(self):
@@ -154,15 +149,10 @@ class ContentCache(object):
def _lock_dbindex(self):
if not self.cache_dir:
os.makedirs(self.cache_dir)
self._lockfile = LockFile(self.cache_dir)
if self._lockfile.is_locked() and \
isfile(self._lockfile.lock_file) and \
(time() - getmtime(self._lockfile.lock_file)) > 10:
self._lockfile.break_lock()
self._lockfile = lockfile.LockFile(self.cache_dir)
try:
self._lockfile.acquire()
except LockFailed:
except: # pylint: disable=bare-except
return False
return True
@@ -187,11 +177,8 @@ class ContentCache(object):
cache_path = self.get_cache_path(key)
if not isfile(cache_path):
return None
with open(cache_path, "rb") as fp:
data = fp.read()
if data and data[0] in ("{", "["):
return json.loads(data)
return data
with codecs.open(cache_path, "rb", encoding="utf8") as fp:
return fp.read()
def set(self, key, data, valid):
if not get_setting("enable_cache"):
@@ -212,13 +199,17 @@ class ContentCache(object):
if not isdir(dirname(cache_path)):
os.makedirs(dirname(cache_path))
with open(cache_path, "wb") as fp:
if isinstance(data, (dict, list)):
json.dump(data, fp)
else:
fp.write(str(data))
with open(self._db_path, "a") as fp:
fp.write("%s=%s\n" % (str(expire_time), cache_path))
try:
with codecs.open(cache_path, "wb", encoding="utf8") as fp:
fp.write(data)
with open(self._db_path, "a") as fp:
fp.write("%s=%s\n" % (str(expire_time), cache_path))
except UnicodeError:
if isfile(cache_path):
try:
remove(cache_path)
except OSError:
pass
return self._unlock_dbindex()
@@ -235,9 +226,9 @@ class ContentCache(object):
newlines = []
with open(self._db_path) as fp:
for line in fp.readlines():
line = line.strip()
if "=" not in line:
continue
line = line.strip()
expire, path = line.split("=")
if time() < int(expire) and isfile(path) and \
path not in paths_for_delete:
@@ -361,8 +352,8 @@ def get_cid():
pass
cid = str(
uuid.UUID(
bytes=hashlib.md5(str(_uid if _uid else uuid.getnode()))
.digest()))
bytes=hashlib.md5(str(
_uid if _uid else uuid.getnode())).digest()))
if "windows" in util.get_systype() or os.getuid() > 0:
set_state_item("cid", cid)
return cid

View File

@@ -20,7 +20,7 @@ from os.path import expanduser, join
from time import time
from SCons.Script import (ARGUMENTS, COMMAND_LINE_TARGETS, DEFAULT_TARGETS,
AllowSubstExceptions, AlwaysBuild,
AllowSubstExceptions, AlwaysBuild, Default,
DefaultEnvironment, Variables)
from platformio import util
@@ -54,28 +54,38 @@ commonvars.AddVariables(
# board options
("BOARD",),
# deprecated options, use board_{object.path} instead
("BOARD_MCU",),
("BOARD_F_CPU",),
("BOARD_F_FLASH",),
("BOARD_FLASH_MODE",),
# end of deprecated options
# upload options
("UPLOAD_PORT",),
("UPLOAD_PROTOCOL",),
("UPLOAD_SPEED",),
("UPLOAD_FLAGS",),
("UPLOAD_RESETMETHOD",)
("UPLOAD_RESETMETHOD",),
# test options
("TEST_BUILD_PROJECT_SRC",),
# debug options
("DEBUG_TOOL",),
("DEBUG_SVD_PATH",),
) # yapf: disable
MULTILINE_VARS = [
"EXTRA_SCRIPTS", "PIOFRAMEWORK", "BUILD_FLAGS", "SRC_BUILD_FLAGS",
"BUILD_UNFLAGS", "SRC_FILTER", "LIB_DEPS", "LIB_IGNORE", "LIB_EXTRA_DIRS"
"BUILD_UNFLAGS", "UPLOAD_FLAGS", "SRC_FILTER", "LIB_DEPS", "LIB_IGNORE",
"LIB_EXTRA_DIRS"
]
DEFAULT_ENV_OPTIONS = dict(
tools=[
"ar", "as", "gcc", "g++", "gnulink", "platformio", "pioplatform",
"ar", "gas", "gcc", "g++", "gnulink", "platformio", "pioplatform",
"piowinhooks", "piolib", "pioupload", "piomisc", "pioide"
], # yapf: disable
toolpath=[join(util.get_source_dir(), "builder", "tools")],
@@ -91,10 +101,11 @@ DEFAULT_ENV_OPTIONS = dict(
PROJECTSRC_DIR=util.get_projectsrc_dir(),
PROJECTTEST_DIR=util.get_projecttest_dir(),
PROJECTDATA_DIR=util.get_projectdata_dir(),
PROJECTPIOENVS_DIR=util.get_projectpioenvs_dir(),
BUILD_DIR=join("$PROJECTPIOENVS_DIR", "$PIOENV"),
PROJECTBUILD_DIR=util.get_projectbuild_dir(),
BUILD_DIR=join("$PROJECTBUILD_DIR", "$PIOENV"),
BUILDSRC_DIR=join("$BUILD_DIR", "src"),
BUILDTEST_DIR=join("$BUILD_DIR", "test"),
LIBPATH=["$BUILD_DIR"],
LIBSOURCE_DIRS=[
util.get_projectlib_dir(),
util.get_projectlibdeps_dir(),
@@ -124,7 +135,7 @@ if env.GetOption('clean'):
env.PioClean(env.subst("$BUILD_DIR"))
env.Exit(0)
elif not int(ARGUMENTS.get("PIOVERBOSE", 0)):
print "Verbose mode can be enabled via `-v, --verbose` option"
print("Verbose mode can be enabled via `-v, --verbose` option")
# Handle custom variables from system environment
for var in ("BUILD_FLAGS", "SRC_BUILD_FLAGS", "SRC_FILTER", "EXTRA_SCRIPTS",
@@ -150,35 +161,55 @@ env['LIBSOURCE_DIRS'] = [
env.LoadPioPlatform(commonvars)
env.SConscriptChdir(0)
env.SConsignFile(join("$PROJECTPIOENVS_DIR", ".sconsign.dblite"))
env.SConsignFile(join("$PROJECTBUILD_DIR", ".sconsign.dblite"))
for item in env.GetPreExtraScripts():
for item in env.GetExtraScripts("pre"):
env.SConscript(item, exports="env")
env.SConscript("$BUILD_SCRIPT")
AlwaysBuild(env.Alias("__debug", DEFAULT_TARGETS + ["size"]))
AlwaysBuild(env.Alias("__test", DEFAULT_TARGETS + ["size"]))
if "UPLOAD_FLAGS" in env:
env.Append(UPLOADERFLAGS=["$UPLOAD_FLAGS"])
env.Prepend(UPLOADERFLAGS=["$UPLOAD_FLAGS"])
for item in env.GetPostExtraScripts():
for item in env.GetExtraScripts("post"):
env.SConscript(item, exports="env")
##############################################################################
# Checking program size
if env.get("SIZETOOL") and "nobuild" not in COMMAND_LINE_TARGETS:
env.Depends(["upload", "program"], "checkprogsize")
# Replace platform's "size" target with our
_new_targets = [t for t in DEFAULT_TARGETS if str(t) != "size"]
Default(None)
Default(_new_targets)
Default("checkprogsize")
# Print configured protocols
env.AddPreAction(
["upload", "program"],
env.VerboseAction(lambda source, target, env: env.PrintUploadInfo(),
"Configuring upload protocol..."))
AlwaysBuild(env.Alias("debug", DEFAULT_TARGETS))
AlwaysBuild(env.Alias("__debug", DEFAULT_TARGETS))
AlwaysBuild(env.Alias("__test", DEFAULT_TARGETS))
##############################################################################
if "envdump" in COMMAND_LINE_TARGETS:
print env.Dump()
print(env.Dump())
env.Exit(0)
if "idedata" in COMMAND_LINE_TARGETS:
try:
print "\n%s\n" % util.path_to_unicode(
json.dumps(env.DumpIDEData(), ensure_ascii=False))
print("\n%s\n" % util.path_to_unicode(
json.dumps(env.DumpIDEData(), ensure_ascii=False)))
env.Exit(0)
except UnicodeDecodeError:
sys.stderr.write(
"\nUnicodeDecodeError: Non-ASCII characters found in build "
"environment\n"
"See explanation in FAQ > Troubleshooting > Building\n"
"http://docs.platformio.org/page/faq.html\n\n")
"https://docs.platformio.org/page/faq.html\n\n")
env.Exit(1)

View File

@@ -16,7 +16,7 @@ from __future__ import absolute_import
from glob import glob
from os import environ
from os.path import join
from os.path import abspath, isfile, join
from SCons.Defaults import processDefines
@@ -53,7 +53,17 @@ def _dump_includes(env):
if unity_dir:
includes.append(unity_dir)
return includes
includes.extend(
[env.subst("$PROJECTINCLUDE_DIR"),
env.subst("$PROJECTSRC_DIR")])
# remove duplicates
result = []
for item in includes:
if item not in result:
result.append(abspath(item))
return result
def _get_gcc_defines(env):
@@ -91,19 +101,41 @@ def _dump_defines(env):
board_mcu = env.BoardConfig().get("build.mcu")
if board_mcu:
defines.append(
str("__AVR_%s__" % board_mcu.upper()
.replace("ATMEGA", "ATmega").replace("ATTINY", "ATtiny")))
str("__AVR_%s__" % board_mcu.upper().replace(
"ATMEGA", "ATmega").replace("ATTINY", "ATtiny")))
# built-in GCC marcos
if env.GetCompilerType() == "gcc":
defines.extend(_get_gcc_defines(env))
# if env.GetCompilerType() == "gcc":
# defines.extend(_get_gcc_defines(env))
return defines
def _get_svd_path(env):
svd_path = env.subst("$DEBUG_SVD_PATH")
if svd_path:
return abspath(svd_path)
if "BOARD" not in env:
return None
try:
svd_path = env.BoardConfig().get("debug.svd_path")
assert svd_path
except (AssertionError, KeyError):
return None
# custom path to SVD file
if isfile(svd_path):
return svd_path
# default file from ./platform/misc/svd folder
p = env.PioPlatform()
if isfile(join(p.get_dir(), "misc", "svd", svd_path)):
return abspath(join(p.get_dir(), "misc", "svd", svd_path))
return None
def DumpIDEData(env):
LINTCCOM = "$CFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS"
LINTCXXCOM = "$CXXFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS"
LINTCCOM = "$CFLAGS $CCFLAGS $CPPFLAGS"
LINTCXXCOM = "$CXXFLAGS $CCFLAGS $CPPFLAGS"
data = {
"libsource_dirs":
@@ -124,6 +156,12 @@ def DumpIDEData(env):
util.where_is_program(env.subst("$GDB"), env.subst("${ENV['PATH']}")),
"prog_path":
env.subst("$PROG_PATH"),
"flash_extra_images": [{
"offset": item[0],
"path": env.subst(item[1])
} for item in env.get("FLASH_EXTRA_IMAGES", [])],
"svd_path":
_get_svd_path(env),
"compiler_type":
env.GetCompilerType()
}

View File

@@ -19,17 +19,19 @@ from __future__ import absolute_import
import hashlib
import os
import re
import sys
from glob import glob
from os.path import (basename, commonprefix, dirname, isdir, isfile, join,
realpath, sep)
from platform import system
import SCons.Scanner
from SCons.Script import ARGUMENTS, COMMAND_LINE_TARGETS, DefaultEnvironment
from platformio import util
from platformio import exception, util
from platformio.builder.tools import platformio as piotool
from platformio.managers.lib import LibraryManager
from platformio.managers.package import PackageManager
class LibBuilderFactory(object):
@@ -55,15 +57,17 @@ class LibBuilderFactory(object):
@staticmethod
def get_used_frameworks(env, path):
if any([
if any(
isfile(join(path, fname))
for fname in ("library.properties", "keywords.txt")
]):
for fname in ("library.properties", "keywords.txt")):
return ["arduino"]
if isfile(join(path, "module.json")):
return ["mbed"]
include_re = re.compile(
r'^#include\s+(<|")(Arduino|mbed)\.h(<|")', flags=re.MULTILINE)
# check source files
for root, _, files in os.walk(path, followlinks=True):
for fname in files:
@@ -72,23 +76,30 @@ class LibBuilderFactory(object):
continue
with open(join(root, fname)) as f:
content = f.read()
if "Arduino.h" in content:
if "Arduino.h" in content and include_re.search(content):
return ["arduino"]
elif "mbed.h" in content:
elif "mbed.h" in content and include_re.search(content):
return ["mbed"]
return []
class LibBuilderBase(object):
IS_WINDOWS = "windows" in util.get_systype()
LDF_MODES = ["off", "chain", "deep", "chain+", "deep+"]
LDF_MODE_DEFAULT = "chain"
COMPAT_MODES = [0, 1, 2]
COMPAT_MODE_DEFAULT = 1
COMPAT_MODES = ["off", "soft", "strict"]
COMPAT_MODE_DEFAULT = "soft"
CLASSIC_SCANNER = SCons.Scanner.C.CScanner()
ADVANCED_SCANNER = SCons.Scanner.C.CScanner(advanced=True)
CCONDITIONAL_SCANNER = SCons.Scanner.C.CConditionalScanner()
# Max depth of nested includes:
# -1 = unlimited
# 0 - disabled nesting
# >0 - number of allowed nested includes
CCONDITIONAL_SCANNER_DEPTH = 99
PARSE_SRC_BY_H_NAME = True
_INCLUDE_DIRS_CACHE = None
@@ -118,7 +129,7 @@ class LibBuilderBase(object):
def __contains__(self, path):
p1 = self.path
p2 = path
if system() == "Windows":
if self.IS_WINDOWS:
p1 = p1.lower()
p2 = p2.lower()
return commonprefix((p1 + sep, p2)) == p1 + sep
@@ -131,6 +142,13 @@ class LibBuilderBase(object):
def version(self):
return self._manifest.get("version")
@property
def vcs_info(self):
items = glob(join(self.path, ".*", PackageManager.SRC_MANIFEST_NAME))
if not items:
return None
return util.load_json(items[0])
@property
def dependencies(self):
return LibraryManager.normalize_dependencies(
@@ -147,7 +165,7 @@ class LibBuilderBase(object):
@property
def include_dir(self):
if not all([isdir(join(self.path, d)) for d in ("include", "src")]):
if not all(isdir(join(self.path, d)) for d in ("include", "src")):
return None
return join(self.path, "include")
@@ -221,25 +239,15 @@ class LibBuilderBase(object):
@staticmethod
def validate_compat_mode(mode):
try:
mode = int(mode)
assert mode in LibBuilderBase.COMPAT_MODES
if isinstance(mode, basestring):
mode = mode.strip().lower()
if mode in LibBuilderBase.COMPAT_MODES:
return mode
except (AssertionError, ValueError):
return LibBuilderBase.COMPAT_MODE_DEFAULT
@staticmethod
def items_to_list(items):
if not isinstance(items, list):
items = [i.strip() for i in items.split(",")]
return [i.lower() for i in items if i]
def items_in_list(self, items, ilist):
items = self.items_to_list(items)
ilist = self.items_to_list(ilist)
if "*" in items or "*" in ilist:
return True
return set(items) & set(ilist)
try:
return LibBuilderBase.COMPAT_MODES[int(mode)]
except (IndexError, ValueError):
pass
return LibBuilderBase.COMPAT_MODE_DEFAULT
def is_platforms_compatible(self, platforms):
return True
@@ -273,11 +281,10 @@ class LibBuilderBase(object):
if env_key not in self.env:
continue
if (key in item and
not self.items_in_list(self.env[env_key], item[key])):
not util.items_in_list(self.env[env_key], item[key])):
if self.verbose:
sys.stderr.write(
"Skip %s incompatible dependency %s\n" % (key[:-1],
item))
sys.stderr.write("Skip %s incompatible dependency %s\n"
% (key[:-1], item))
skip = True
if skip:
continue
@@ -304,9 +311,8 @@ class LibBuilderBase(object):
def get_search_files(self):
items = [
join(self.src_dir, item)
for item in self.env.MatchSourceFiles(self.src_dir,
self.src_filter)
join(self.src_dir, item) for item in self.env.MatchSourceFiles(
self.src_dir, self.src_filter)
]
include_dir = self.include_dir
if include_dir:
@@ -345,35 +351,36 @@ class LibBuilderBase(object):
for path in self._validate_search_files(search_files):
try:
assert "+" in self.lib_ldf_mode
incs = self.env.File(path).get_found_includes(
self.env, LibBuilderBase.ADVANCED_SCANNER,
tuple(include_dirs))
candidates = LibBuilderBase.CCONDITIONAL_SCANNER(
self.env.File(path),
self.env,
tuple(include_dirs),
depth=self.CCONDITIONAL_SCANNER_DEPTH)
except Exception as e: # pylint: disable=broad-except
if self.verbose and "+" in self.lib_ldf_mode:
sys.stderr.write(
"Warning! Classic Pre Processor is used for `%s`, "
"advanced has failed with `%s`\n" % (path, e))
_incs = self.env.File(path).get_found_includes(
self.env, LibBuilderBase.CLASSIC_SCANNER,
tuple(include_dirs))
incs = []
for inc in _incs:
incs.append(inc)
if not self.PARSE_SRC_BY_H_NAME:
candidates = LibBuilderBase.CLASSIC_SCANNER(
self.env.File(path), self.env, tuple(include_dirs))
# print(path, map(lambda n: n.get_abspath(), candidates))
for item in candidates:
if item not in result:
result.append(item)
if not self.PARSE_SRC_BY_H_NAME:
continue
_h_path = item.get_abspath()
if not self.env.IsFileWithExt(_h_path, piotool.SRC_HEADER_EXT):
continue
_f_part = _h_path[:_h_path.rindex(".")]
for ext in piotool.SRC_C_EXT:
if not isfile("%s.%s" % (_f_part, ext)):
continue
_h_path = inc.get_abspath()
if not self.env.IsFileWithExt(_h_path,
piotool.SRC_HEADER_EXT):
continue
_f_part = _h_path[:_h_path.rindex(".")]
for ext in piotool.SRC_C_EXT:
if isfile("%s.%s" % (_f_part, ext)):
incs.append(
self.env.File("%s.%s" % (_f_part, ext)))
# print path, map(lambda n: n.get_abspath(), incs)
for inc in incs:
if inc not in result:
result.append(inc)
_c_path = self.env.File("%s.%s" % (_f_part, ext))
if _c_path not in result:
result.append(_c_path)
return result
def depend_recursive(self, lb, search_files=None):
@@ -390,9 +397,9 @@ class LibBuilderBase(object):
if self != lb:
if _already_depends(lb):
if self.verbose:
sys.stderr.write("Warning! Circular dependencies detected "
"between `%s` and `%s`\n" % (self.path,
lb.path))
sys.stderr.write(
"Warning! Circular dependencies detected "
"between `%s` and `%s`\n" % (self.path, lb.path))
self._circular_deps.append(lb)
elif lb not in self._depbuilders:
self._depbuilders.append(lb)
@@ -429,23 +436,23 @@ class LibBuilderBase(object):
libs.extend(lb.build())
# copy shared information to self env
for key in ("CPPPATH", "LIBPATH", "LIBS", "LINKFLAGS"):
self.env.AppendUnique(**{key: lb.env.get(key)})
self.env.PrependUnique(**{key: lb.env.get(key)})
for lb in self._circular_deps:
self.env.AppendUnique(CPPPATH=lb.get_include_dirs())
self.env.PrependUnique(CPPPATH=lb.get_include_dirs())
if self._is_built:
return libs
self._is_built = True
self.env.AppendUnique(CPPPATH=self.get_include_dirs())
self.env.PrependUnique(CPPPATH=self.get_include_dirs())
if self.lib_ldf_mode == "off":
for lb in self.env.GetLibBuilders():
if self == lb or not lb.is_built:
continue
for key in ("CPPPATH", "LIBPATH", "LIBS", "LINKFLAGS"):
self.env.AppendUnique(**{key: lb.env.get(key)})
self.env.PrependUnique(**{key: lb.env.get(key)})
if self.lib_archive:
libs.append(
@@ -485,18 +492,58 @@ class ArduinoLibBuilder(LibBuilderBase):
@property
def src_filter(self):
if isdir(join(self.path, "src")):
return LibBuilderBase.src_filter.fget(self)
src_dir = join(self.path, "src")
if isdir(src_dir):
src_filter = LibBuilderBase.src_filter.fget(self)
for root, _, files in os.walk(src_dir, followlinks=True):
found = False
for fname in files:
if fname.lower().endswith("asm"):
found = True
break
if not found:
continue
rel_path = root.replace(src_dir, "")
if rel_path.startswith(sep):
rel_path = rel_path[1:] + sep
src_filter.append("-<%s*.[aA][sS][mM]>" % rel_path)
return src_filter
src_filter = []
is_utility = isdir(join(self.path, "utility"))
for ext in piotool.SRC_BUILD_EXT + piotool.SRC_HEADER_EXT:
# arduino ide ignores files with .asm or .ASM extensions
if ext.lower() == "asm":
continue
src_filter.append("+<*.%s>" % ext)
if is_utility:
src_filter.append("+<utility%s*.%s>" % (sep, ext))
return src_filter
def is_frameworks_compatible(self, frameworks):
return self.items_in_list(frameworks, ["arduino", "energia"])
return util.items_in_list(frameworks, ["arduino", "energia"])
def is_platforms_compatible(self, platforms):
platforms_map = {
"avr": "atmelavr",
"sam": "atmelsam",
"samd": "atmelsam",
"esp8266": "espressif8266",
"esp32": "espressif32",
"arc32": "intel_arc32",
"stm32": "ststm32"
}
items = []
for arch in self._manifest.get("architectures", "").split(","):
arch = arch.strip()
if arch == "*":
items = "*"
break
if arch in platforms_map:
items.append(platforms_map[arch])
if not items:
return LibBuilderBase.is_platforms_compatible(self, platforms)
return util.items_in_list(platforms, items)
class MbedLibBuilder(LibBuilderBase):
@@ -522,12 +569,24 @@ class MbedLibBuilder(LibBuilderBase):
include_dirs = LibBuilderBase.get_include_dirs(self)
if self.path not in include_dirs:
include_dirs.append(self.path)
# library with module.json
for p in self._manifest.get("extraIncludes", []):
include_dirs.append(join(self.path, p))
# old mbed library without manifest, add to CPPPATH all folders
if not self._manifest:
for root, _, __ in os.walk(self.path):
part = root.replace(self.path, "").lower()
if any(s in part for s in ("%s." % sep, "test", "example")):
continue
if root not in include_dirs:
include_dirs.append(root)
return include_dirs
def is_frameworks_compatible(self, frameworks):
return self.items_in_list(frameworks, ["mbed"])
return util.items_in_list(frameworks, ["mbed"])
class PlatformIOLibBuilder(LibBuilderBase):
@@ -541,7 +600,7 @@ class PlatformIOLibBuilder(LibBuilderBase):
if "platforms" in manifest:
manifest['platforms'] = [
"espressif8266" if p == "espressif" else p
for p in self.items_to_list(manifest['platforms'])
for p in util.items_to_list(manifest['platforms'])
]
return manifest
@@ -549,15 +608,6 @@ class PlatformIOLibBuilder(LibBuilderBase):
def _is_arduino_manifest(self):
return isfile(join(self.path, "library.properties"))
@property
def src_dir(self):
if all([
"srcFilter" in self._manifest.get("build", {})
or self.env['SRC_FILTER'], not self._is_arduino_manifest()
]):
return self.path
return LibBuilderBase.src_dir.fget(self)
@property
def src_filter(self):
if "srcFilter" in self._manifest.get("build", {}):
@@ -610,13 +660,13 @@ class PlatformIOLibBuilder(LibBuilderBase):
items = self._manifest.get("platforms")
if not items:
return LibBuilderBase.is_platforms_compatible(self, platforms)
return self.items_in_list(platforms, items)
return util.items_in_list(platforms, items)
def is_frameworks_compatible(self, frameworks):
items = self._manifest.get("frameworks")
if not items:
return LibBuilderBase.is_frameworks_compatible(self, frameworks)
return self.items_in_list(frameworks, items)
return util.items_in_list(frameworks, items)
def get_include_dirs(self):
include_dirs = LibBuilderBase.get_include_dirs(self)
@@ -635,6 +685,12 @@ class PlatformIOLibBuilder(LibBuilderBase):
class ProjectAsLibBuilder(LibBuilderBase):
def __init__(self, env, *args, **kwargs):
# backup original value, will be reset in base.__init__
project_src_filter = env.get("SRC_FILTER")
super(ProjectAsLibBuilder, self).__init__(env, *args, **kwargs)
self.env['SRC_FILTER'] = project_src_filter
@property
def include_dir(self):
include_dir = self.env.subst("$PROJECTINCLUDE_DIR")
@@ -646,7 +702,9 @@ class ProjectAsLibBuilder(LibBuilderBase):
def get_include_dirs(self):
include_dirs = LibBuilderBase.get_include_dirs(self)
include_dirs.append(self.env.subst("$PROJECTINCLUDE_DIR"))
project_include_dir = self.env.subst("$PROJECTINCLUDE_DIR")
if isdir(project_include_dir):
include_dirs.append(project_include_dir)
return include_dirs
def get_search_files(self):
@@ -655,9 +713,9 @@ class ProjectAsLibBuilder(LibBuilderBase):
# test files
if "__test" in COMMAND_LINE_TARGETS:
items.extend([
join("$PROJECTTEST_DIR", item)
for item in self.env.MatchSourceFiles("$PROJECTTEST_DIR",
"$PIOTEST_SRC_FILTER")
join("$PROJECTTEST_DIR",
item) for item in self.env.MatchSourceFiles(
"$PROJECTTEST_DIR", "$PIOTEST_SRC_FILTER")
])
return items
@@ -671,7 +729,8 @@ class ProjectAsLibBuilder(LibBuilderBase):
@property
def src_filter(self):
return self.env.get("SRC_FILTER", LibBuilderBase.src_filter.fget(self))
return (self.env.get("SRC_FILTER")
or LibBuilderBase.src_filter.fget(self))
def process_extra_options(self):
# skip for project, options are already processed
@@ -713,7 +772,7 @@ class ProjectAsLibBuilder(LibBuilderBase):
def build(self):
self._is_built = True # do not build Project now
self.env.AppendUnique(CPPPATH=self.get_include_dirs())
self.env.PrependUnique(CPPPATH=self.get_include_dirs())
return LibBuilderBase.build(self)
@@ -734,13 +793,13 @@ def GetLibBuilders(env): # pylint: disable=too-many-branches
if verbose:
sys.stderr.write("Ignored library %s\n" % lb.path)
return None
if compat_mode > 1 and not lb.is_platforms_compatible(
if compat_mode == "strict" and not lb.is_platforms_compatible(
env['PIOPLATFORM']):
if verbose:
sys.stderr.write(
"Platform incompatible library %s\n" % lb.path)
return False
if compat_mode > 0 and "PIOFRAMEWORK" in env and \
if compat_mode == "soft" and "PIOFRAMEWORK" in env and \
not lb.is_frameworks_compatible(env.get("PIOFRAMEWORK", [])):
if verbose:
sys.stderr.write(
@@ -759,7 +818,7 @@ def GetLibBuilders(env): # pylint: disable=too-many-branches
try:
lb = LibBuilderFactory.new(
env, join(libs_dir, item), verbose=verbose)
except ValueError:
except exception.InvalidJSONFile:
if verbose:
sys.stderr.write("Skip library with broken manifest: %s\n"
% join(libs_dir, item))
@@ -778,17 +837,16 @@ def GetLibBuilders(env): # pylint: disable=too-many-branches
if verbose and found_incompat:
sys.stderr.write(
"More details about \"Library Compatibility Mode\": "
"http://docs.platformio.org/page/librarymanager/ldf.html#"
"https://docs.platformio.org/page/librarymanager/ldf.html#"
"ldf-compat-mode\n")
DefaultEnvironment()['__PIO_LIB_BUILDERS'] = items
return items
def BuildProjectLibraries(env):
lib_builders = env.GetLibBuilders()
def ConfigureProjectLibBuilder(env):
def correct_found_libs():
def correct_found_libs(lib_builders):
# build full dependency graph
found_lbs = [lb for lb in lib_builders if lb.dependent]
for lb in lib_builders:
@@ -803,10 +861,15 @@ def BuildProjectLibraries(env):
margin = "| " * (level)
for lb in root.depbuilders:
title = "<%s>" % lb.name
vcs_info = lb.vcs_info
if lb.version:
title += " v%s" % lb.version
title += " %s" % lb.version
if vcs_info and vcs_info.get("version"):
title += " #%s" % vcs_info.get("version")
sys.stdout.write("%s|-- %s" % (margin, title))
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
if vcs_info:
sys.stdout.write(" [%s]" % vcs_info.get("url"))
sys.stdout.write(" (")
sys.stdout.write(lb.path)
sys.stdout.write(")")
@@ -814,25 +877,29 @@ def BuildProjectLibraries(env):
if lb.depbuilders:
print_deps_tree(lb, level + 1)
print "Collected %d compatible libraries" % len(lib_builders)
print "Looking for dependencies..."
project = ProjectAsLibBuilder(env, "$PROJECT_DIR")
project.env = env
ldf_mode = LibBuilderBase.lib_ldf_mode.fget(project)
print("Library Dependency Finder -> http://bit.ly/configure-pio-ldf")
print("LDF MODES: FINDER(%s) COMPATIBILITY(%s)" %
(ldf_mode, project.lib_compat_mode))
lib_builders = env.GetLibBuilders()
print("Collected %d compatible libraries" % len(lib_builders))
print("Scanning dependencies...")
project.search_deps_recursive()
if (LibBuilderBase.validate_ldf_mode(
env.get("LIB_LDF_MODE", LibBuilderBase.LDF_MODE_DEFAULT))
.startswith("chain") and project.depbuilders):
correct_found_libs()
if ldf_mode.startswith("chain") and project.depbuilders:
correct_found_libs(lib_builders)
if project.depbuilders:
print "Library Dependency Graph ( http://bit.ly/configure-pio-ldf )"
print("Dependency Graph")
print_deps_tree(project)
else:
print "No dependencies"
print("No dependencies")
return project.build()
return project
def exists(_):
@@ -841,5 +908,5 @@ def exists(_):
def generate(env):
env.AddMethod(GetLibBuilders)
env.AddMethod(BuildProjectLibraries)
env.AddMethod(ConfigureProjectLibBuilder)
return env

View File

@@ -18,7 +18,7 @@ import atexit
import re
import sys
from os import environ, remove, walk
from os.path import basename, isdir, isfile, join, relpath, sep
from os.path import basename, isdir, isfile, join, realpath, relpath, sep
from tempfile import mkstemp
from SCons.Action import Action
@@ -30,9 +30,10 @@ from platformio.managers.core import get_core_package_dir
class InoToCPPConverter(object):
PROTOTYPE_RE = re.compile(r"""^(
PROTOTYPE_RE = re.compile(
r"""^(
(?:template\<.*\>\s*)? # template
([a-z_\d]+\*?\s+){1,2} # return type
([a-z_\d\&]+\*?\s+){1,2} # return type
([a-z_\d]+\s*) # name of prototype
\([a-z_,\.\*\&\[\]\s\d]*\) # arguments
)\s*\{ # must end with {
@@ -89,8 +90,8 @@ class InoToCPPConverter(object):
self.env.Execute(
self.env.VerboseAction(
'$CXX -o "{0}" -x c++ -fpreprocessed -dD -E "{1}"'.format(
out_file,
tmp_path), "Converting " + basename(out_file[:-4])))
out_file, tmp_path),
"Converting " + basename(out_file[:-4])))
atexit.register(_delete_file, tmp_path)
return isfile(out_file)
@@ -163,18 +164,17 @@ class InoToCPPConverter(object):
prototype_names = set([m.group(3).strip() for m in prototypes])
split_pos = prototypes[0].start()
match_ptrs = re.search(self.PROTOPTRS_TPLRE %
("|".join(prototype_names)),
contents[:split_pos], re.M)
match_ptrs = re.search(
self.PROTOPTRS_TPLRE % ("|".join(prototype_names)),
contents[:split_pos], re.M)
if match_ptrs:
split_pos = contents.rfind("\n", 0, match_ptrs.start()) + 1
result = []
result.append(contents[:split_pos].strip())
result.append("%s;" % ";\n".join([m.group(1) for m in prototypes]))
result.append('#line %d "%s"' %
(self._get_total_lines(contents[:split_pos]),
self._main_ino.replace("\\", "/")))
result.append('#line %d "%s"' % (self._get_total_lines(
contents[:split_pos]), self._main_ino.replace("\\", "/")))
result.append(contents[split_pos:].strip())
return "\n".join(result)
@@ -199,7 +199,7 @@ def _delete_file(path):
pass
@util.memoized
@util.memoized()
def _get_compiler_type(env):
try:
sysenv = environ.copy()
@@ -231,14 +231,25 @@ def GetActualLDScript(env):
return None
script = None
script_in_next = False
for f in env.get("LINKFLAGS", []):
if f.startswith("-Wl,-T"):
script = env.subst(f[6:].replace('"', "").strip())
if isfile(script):
return script
path = _lookup_in_ldpath(script)
if path:
return path
raw_script = None
if f == "-T":
script_in_next = True
continue
elif script_in_next:
script_in_next = False
raw_script = f
elif f.startswith("-Wl,-T"):
raw_script = f[6:]
else:
continue
script = env.subst(raw_script.replace('"', "").strip())
if isfile(script):
return script
path = _lookup_in_ldpath(script)
if path:
return path
if script:
sys.stderr.write(
@@ -263,13 +274,13 @@ def VerboseAction(_, act, actstr):
def PioClean(env, clean_dir):
if not isdir(clean_dir):
print "Build environment is clean"
print("Build environment is clean")
env.Exit(0)
for root, _, files in walk(clean_dir):
for file_ in files:
remove(join(root, file_))
print "Removed %s" % relpath(join(root, file_))
print "Done cleaning"
print("Removed %s" % relpath(join(root, file_)))
print("Done cleaning")
util.rmtree_(clean_dir)
env.Exit(0)
@@ -278,8 +289,13 @@ def ProcessDebug(env):
if not env.subst("$PIODEBUGFLAGS"):
env.Replace(PIODEBUGFLAGS=["-Og", "-g3", "-ggdb3"])
env.Append(
BUILD_FLAGS=env.get("PIODEBUGFLAGS", []),
BUILD_UNFLAGS=["-Os", "-O0", "-O1", "-O2", "-O3"])
PIODEBUGFLAGS=["-D__PLATFORMIO_DEBUG__"],
BUILD_FLAGS=env.get("PIODEBUGFLAGS", []))
unflags = ["-Os"]
for level in [0, 1, 2]:
for flag in ("O", "g", "ggdb"):
unflags.append("-%s%d" % (flag, level))
env.Append(BUILD_UNFLAGS=unflags)
def ProcessTest(env):
@@ -295,25 +311,18 @@ def ProcessTest(env):
src_filter.append("+<%s%s>" % (env['PIOTEST'], sep))
env.Replace(PIOTEST_SRC_FILTER=src_filter)
return env.CollectBuildFiles(
"$BUILDTEST_DIR",
"$PROJECTTEST_DIR",
"$PIOTEST_SRC_FILTER",
duplicate=False)
def GetPreExtraScripts(env):
return [
item[4:] for item in env.get("EXTRA_SCRIPTS", [])
if item.startswith("pre:")
]
def GetPostExtraScripts(env):
return [
item[5:] if item.startswith("post:") else item
for item in env.get("EXTRA_SCRIPTS", []) if not item.startswith("pre:")
]
def GetExtraScripts(env, scope):
items = []
for item in env.get("EXTRA_SCRIPTS", []):
if scope == "post" and ":" not in item:
items.append(item)
elif item.startswith("%s:" % scope):
items.append(item[len(scope) + 1:])
if not items:
return items
with util.cd(env.subst("$PROJECT_DIR")):
return [realpath(item) for item in items]
def exists(_):
@@ -328,6 +337,5 @@ def generate(env):
env.AddMethod(PioClean)
env.AddMethod(ProcessDebug)
env.AddMethod(ProcessTest)
env.AddMethod(GetPreExtraScripts)
env.AddMethod(GetPostExtraScripts)
env.AddMethod(GetExtraScripts)
return env

View File

@@ -14,6 +14,7 @@
from __future__ import absolute_import
import base64
import sys
from os.path import isdir, isfile, join
@@ -22,8 +23,10 @@ from SCons.Script import COMMAND_LINE_TARGETS
from platformio import exception, util
from platformio.managers.platform import PlatformFactory
# pylint: disable=too-many-branches, too-many-locals
@util.memoized
@util.memoized()
def initPioPlatform(name):
return PlatformFactory.newPlatform(name)
@@ -41,8 +44,9 @@ def PioPlatform(env):
def BoardConfig(env, board=None):
p = initPioPlatform(env['PLATFORM_MANIFEST'])
try:
assert env.get("BOARD", board), "BoardConfig: Board is not defined"
config = p.board_config(board if board else env.get("BOARD"))
board = board or env.get("BOARD")
assert board, "BoardConfig: Board is not defined"
config = p.board_config(board)
except (AssertionError, exception.UnknownBoard) as e:
sys.stderr.write("Error: %s\n" % str(e))
env.Exit(1)
@@ -65,38 +69,115 @@ def LoadPioPlatform(env, variables):
# Ensure real platform name
env['PIOPLATFORM'] = p.name
# Add toolchains and uploaders to $PATH
# Add toolchains and uploaders to $PATH and $*_LIBRARY_PATH
systype = util.get_systype()
for name in installed_packages:
type_ = p.get_package_type(name)
if type_ not in ("toolchain", "uploader"):
if type_ not in ("toolchain", "uploader", "debugger"):
continue
path = p.get_package_dir(name)
if isdir(join(path, "bin")):
path = join(path, "bin")
env.PrependENVPath("PATH", path)
pkg_dir = p.get_package_dir(name)
env.PrependENVPath(
"PATH",
join(pkg_dir, "bin") if isdir(join(pkg_dir, "bin")) else pkg_dir)
if ("windows" not in systype and isdir(join(pkg_dir, "lib"))
and type_ != "toolchain"):
env.PrependENVPath(
"DYLD_LIBRARY_PATH"
if "darwin" in systype else "LD_LIBRARY_PATH",
join(pkg_dir, "lib"))
# Platform specific LD Scripts
if isdir(join(p.get_dir(), "ldscripts")):
env.Prepend(LIBPATH=[join(p.get_dir(), "ldscripts")])
if "BOARD" not in env:
# handle _MCU and _F_CPU variables for AVR native
for key, value in variables.UnknownVariables().items():
if not key.startswith("BOARD_"):
continue
env.Replace(**{
key.upper().replace("BUILD.", ""):
base64.b64decode(value)
})
return
# update board manifest with a custom data
board_config = env.BoardConfig()
for k in variables.keys():
if k in env or \
not any([k.startswith("BOARD_"), k.startswith("UPLOAD_")]):
for key, value in variables.UnknownVariables().items():
if not key.startswith("BOARD_"):
continue
_opt, _val = k.lower().split("_", 1)
board_config.update(key.lower()[6:], base64.b64decode(value))
# update default environment variables
for key in variables.keys():
if key in env or \
not any([key.startswith("BOARD_"), key.startswith("UPLOAD_")]):
continue
_opt, _val = key.lower().split("_", 1)
if _opt == "board":
_opt = "build"
if _val in board_config.get(_opt):
env.Replace(**{k: board_config.get("%s.%s" % (_opt, _val))})
env.Replace(**{key: board_config.get("%s.%s" % (_opt, _val))})
if "build.ldscript" in board_config:
env.Replace(LDSCRIPT_PATH=board_config.get("build.ldscript"))
def PrintConfiguration(env):
platform = env.PioPlatform()
platform_data = ["PLATFORM: %s >" % platform.title]
hardware_data = ["HARDWARE:"]
configuration_data = ["CONFIGURATION:"]
mcu = env.subst("$BOARD_MCU")
f_cpu = env.subst("$BOARD_F_CPU")
if mcu:
hardware_data.append(mcu.upper())
if f_cpu:
f_cpu = int("".join([c for c in str(f_cpu) if c.isdigit()]))
hardware_data.append("%dMHz" % (f_cpu / 1000000))
debug_tools = None
if "BOARD" in env:
board_config = env.BoardConfig()
platform_data.append(board_config.get("name"))
debug_tools = board_config.get("debug", {}).get("tools")
ram = board_config.get("upload", {}).get("maximum_ram_size")
flash = board_config.get("upload", {}).get("maximum_size")
hardware_data.append(
"%s RAM (%s Flash)" % (util.format_filesize(ram),
util.format_filesize(flash)))
configuration_data.append(
"https://docs.platformio.org/page/boards/%s/%s.html" %
(platform.name, board_config.id))
for data in (configuration_data, platform_data, hardware_data):
if len(data) > 1:
print(" ".join(data))
# Debugging
if not debug_tools:
return
data = [
"CURRENT(%s)" % board_config.get_debug_tool_name(
env.subst("$DEBUG_TOOL"))
]
onboard = []
external = []
for key, value in debug_tools.items():
if value.get("onboard"):
onboard.append(key)
else:
external.append(key)
if onboard:
data.append("ON-BOARD(%s)" % ", ".join(sorted(onboard)))
if external:
data.append("EXTERNAL(%s)" % ", ".join(sorted(external)))
print("DEBUG: %s" % " ".join(data))
def exists(_):
return True
@@ -106,4 +187,5 @@ def generate(env):
env.AddMethod(BoardConfig)
env.AddMethod(GetFrameworkScript)
env.AddMethod(LoadPioPlatform)
env.AddMethod(PrintConfiguration)
return env

View File

@@ -14,18 +14,20 @@
from __future__ import absolute_import
import re
import sys
from fnmatch import fnmatch
from os import environ
from os.path import isfile, join
from platform import system
from shutil import copyfile
from time import sleep
from SCons.Node.Alias import Alias
from SCons.Script import ARGUMENTS
from serial import Serial, SerialException
from platformio import util
from platformio import exception, util
# pylint: disable=unused-argument
def FlushSerialBuffer(env, port):
@@ -41,18 +43,18 @@ def FlushSerialBuffer(env, port):
def TouchSerialPort(env, port, baudrate):
port = env.subst(port)
print "Forcing reset using %dbps open/close on port %s" % (baudrate, port)
print("Forcing reset using %dbps open/close on port %s" % (baudrate, port))
try:
s = Serial(port=port, baudrate=baudrate)
s.setDTR(False)
s.close()
except: # pylint: disable=W0702
except: # pylint: disable=bare-except
pass
sleep(0.4) # DO NOT REMOVE THAT (required by SAM-BA based boards)
def WaitForNewSerialPort(env, before):
print "Waiting for the new upload port..."
print("Waiting for the new upload port...")
prev_port = env.subst("$UPLOAD_PORT")
new_port = None
elapsed = 0
@@ -89,7 +91,7 @@ def WaitForNewSerialPort(env, before):
return new_port
def AutodetectUploadPort(*args, **kwargs): # pylint: disable=unused-argument
def AutodetectUploadPort(*args, **kwargs):
env = args[0]
def _get_pattern():
@@ -114,10 +116,10 @@ def AutodetectUploadPort(*args, **kwargs): # pylint: disable=unused-argument
mbed_pages = [
join(item['path'], n) for n in ("mbed.htm", "mbed.html")
]
if any([isfile(p) for p in mbed_pages]):
if any(isfile(p) for p in mbed_pages):
return item['path']
if item['name'] \
and any([l in item['name'].lower() for l in msdlabels]):
and any(l in item['name'].lower() for l in msdlabels):
return item['path']
return None
@@ -131,9 +133,12 @@ def AutodetectUploadPort(*args, **kwargs): # pylint: disable=unused-argument
if not _is_match_pattern(item['port']):
continue
port = item['port']
if upload_protocol.startswith("blackmagic") \
and "GDB" in item['description']:
return port
if upload_protocol.startswith("blackmagic"):
if "windows" in util.get_systype() and \
port.startswith("COM") and len(port) > 4:
port = "\\\\.\\%s" % port
if "GDB" in item['description']:
return port
for hwid in board_hwids:
hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "")
if hwid_str in item['hwid']:
@@ -141,26 +146,22 @@ def AutodetectUploadPort(*args, **kwargs): # pylint: disable=unused-argument
return port
if "UPLOAD_PORT" in env and not _get_pattern():
print env.subst("Use manually specified: $UPLOAD_PORT")
print(env.subst("Use manually specified: $UPLOAD_PORT"))
return
if "mbed" in env.subst("$PIOFRAMEWORK") \
and not env.subst("$UPLOAD_PROTOCOL"):
if (env.subst("$UPLOAD_PROTOCOL") == "mbed"
or ("mbed" in env.subst("$PIOFRAMEWORK")
and not env.subst("$UPLOAD_PROTOCOL"))):
env.Replace(UPLOAD_PORT=_look_for_mbed_disk())
else:
if (system() == "Linux" and not any([
isfile("/etc/udev/rules.d/99-platformio-udev.rules"),
isfile("/lib/udev/rules.d/99-platformio-udev.rules")
])):
sys.stderr.write(
"\nWarning! Please install `99-platformio-udev.rules` and "
"check that your board's PID and VID are listed in the rules."
"\n https://raw.githubusercontent.com/platformio/platformio"
"/develop/scripts/99-platformio-udev.rules\n")
try:
util.ensure_udev_rules()
except exception.InvalidUdevRules as e:
sys.stderr.write("\n%s\n\n" % e)
env.Replace(UPLOAD_PORT=_look_for_serial_port())
if env.subst("$UPLOAD_PORT"):
print env.subst("Auto-detected: $UPLOAD_PORT")
print(env.subst("Auto-detected: $UPLOAD_PORT"))
else:
sys.stderr.write(
"Error: Please specify `upload_port` for environment or use "
@@ -170,7 +171,7 @@ def AutodetectUploadPort(*args, **kwargs): # pylint: disable=unused-argument
env.Exit(1)
def UploadToDisk(_, target, source, env): # pylint: disable=W0613,W0621
def UploadToDisk(_, target, source, env):
assert "UPLOAD_PORT" in env
progname = env.subst("$PROGNAME")
for ext in ("bin", "hex"):
@@ -179,39 +180,106 @@ def UploadToDisk(_, target, source, env): # pylint: disable=W0613,W0621
continue
copyfile(fpath,
join(env.subst("$UPLOAD_PORT"), "%s.%s" % (progname, ext)))
print "Firmware has been successfully uploaded.\n"\
"(Some boards may require manual hard reset)"
print("Firmware has been successfully uploaded.\n"
"(Some boards may require manual hard reset)")
def CheckUploadSize(_, target, source, env): # pylint: disable=W0613,W0621
if "BOARD" not in env:
return
max_size = int(env.BoardConfig().get("upload.maximum_size", 0))
if max_size == 0 or "SIZETOOL" not in env:
return
sysenv = environ.copy()
sysenv['PATH'] = str(env['ENV']['PATH'])
cmd = [
env.subst("$SIZETOOL"), "-B",
str(source[0] if isinstance(target[0], Alias) else target[0])
def CheckUploadSize(_, target, source, env):
check_conditions = [
env.get("BOARD"),
env.get("SIZETOOL") or env.get("SIZECHECKCMD")
]
result = util.exec_command(cmd, env=sysenv)
if result['returncode'] != 0:
if not all(check_conditions):
return
program_max_size = int(env.BoardConfig().get("upload.maximum_size", 0))
data_max_size = int(env.BoardConfig().get("upload.maximum_ram_size", 0))
if program_max_size == 0:
return
print result['out'].strip()
line = result['out'].strip().splitlines()[1]
values = [v.strip() for v in line.split("\t")]
used_size = int(values[0]) + int(values[1])
def _configure_defaults():
env.Replace(
SIZECHECKCMD="$SIZETOOL -B -d $SOURCES",
SIZEPROGREGEXP=r"^(\d+)\s+(\d+)\s+\d+\s",
SIZEDATAREGEXP=r"^\d+\s+(\d+)\s+(\d+)\s+\d+")
if used_size > max_size:
def _get_size_output():
cmd = env.get("SIZECHECKCMD")
if not cmd:
return None
if not isinstance(cmd, list):
cmd = cmd.split()
cmd = [arg.replace("$SOURCES", str(source[0])) for arg in cmd if arg]
sysenv = environ.copy()
sysenv['PATH'] = str(env['ENV']['PATH'])
result = util.exec_command(env.subst(cmd), env=sysenv)
if result['returncode'] != 0:
return None
return result['out'].strip()
def _calculate_size(output, pattern):
if not output or not pattern:
return -1
size = 0
regexp = re.compile(pattern)
for line in output.split("\n"):
line = line.strip()
if not line:
continue
match = regexp.search(line)
if not match:
continue
size += sum(int(value) for value in match.groups())
return size
def _format_availale_bytes(value, total):
percent_raw = float(value) / float(total)
blocks_per_progress = 10
used_blocks = int(round(blocks_per_progress * percent_raw))
if used_blocks > blocks_per_progress:
used_blocks = blocks_per_progress
return "[{:{}}] {: 6.1%} (used {:d} bytes from {:d} bytes)".format(
"=" * used_blocks, blocks_per_progress, percent_raw, value, total)
if not env.get("SIZECHECKCMD") and not env.get("SIZEPROGREGEXP"):
_configure_defaults()
output = _get_size_output()
program_size = _calculate_size(output, env.get("SIZEPROGREGEXP"))
data_size = _calculate_size(output, env.get("SIZEDATAREGEXP"))
print("Memory Usage -> http://bit.ly/pio-memory-usage")
if data_max_size and data_size > -1:
print("DATA: %s" % _format_availale_bytes(data_size, data_max_size))
if program_size > -1:
print("PROGRAM: %s" % _format_availale_bytes(program_size,
program_max_size))
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
print(output)
# raise error
# if data_max_size and data_size > data_max_size:
# sys.stderr.write(
# "Error: The data size (%d bytes) is greater "
# "than maximum allowed (%s bytes)\n" % (data_size, data_max_size))
# env.Exit(1)
if program_size > program_max_size:
sys.stderr.write("Error: The program size (%d bytes) is greater "
"than maximum allowed (%s bytes)\n" % (used_size,
max_size))
"than maximum allowed (%s bytes)\n" %
(program_size, program_max_size))
env.Exit(1)
def PrintUploadInfo(env):
configured = env.subst("$UPLOAD_PROTOCOL")
available = [configured] if configured else []
if "BOARD" in env:
available.extend(env.BoardConfig().get("upload", {}).get(
"protocols", []))
if available:
print("AVAILABLE: %s" % ", ".join(sorted(set(available))))
if configured:
print("CURRENT: upload_protocol = %s" % configured)
def exists(_):
return True
@@ -223,4 +291,5 @@ def generate(env):
env.AddMethod(AutodetectUploadPort)
env.AddMethod(UploadToDisk)
env.AddMethod(CheckUploadSize)
env.AddMethod(PrintUploadInfo)
return env

View File

@@ -20,10 +20,9 @@ from glob import glob
from os import sep, walk
from os.path import basename, dirname, isdir, join, realpath
from SCons.Action import Action
from SCons import Builder, Util
from SCons.Script import (COMMAND_LINE_TARGETS, AlwaysBuild,
DefaultEnvironment, SConscript)
from SCons.Util import case_sensitive_suffixes, is_Sequence
DefaultEnvironment, Export, SConscript)
from platformio.util import glob_escape, pioversion_to_intstr
@@ -31,6 +30,52 @@ SRC_HEADER_EXT = ["h", "hpp"]
SRC_C_EXT = ["c", "cc", "cpp"]
SRC_BUILD_EXT = SRC_C_EXT + ["S", "spp", "SPP", "sx", "s", "asm", "ASM"]
SRC_FILTER_DEFAULT = ["+<*>", "-<.git%s>" % sep, "-<svn%s>" % sep]
SRC_FILTER_PATTERNS_RE = re.compile(r"(\+|\-)<([^>]+)>")
def scons_patched_match_splitext(path, suffixes=None):
"""Patch SCons Builder, append $OBJSUFFIX to the end of each target"""
tokens = Util.splitext(path)
if suffixes and tokens[1] and tokens[1] in suffixes:
return (path, tokens[1])
return tokens
def _build_project_deps(env):
project_lib_builder = env.ConfigureProjectLibBuilder()
# prepend project libs to the beginning of list
env.Prepend(LIBS=project_lib_builder.build())
# prepend extra linker related options from libs
env.PrependUnique(
**{
key: project_lib_builder.env.get(key)
for key in ("LIBS", "LIBPATH", "LINKFLAGS")
if project_lib_builder.env.get(key)
})
projenv = env.Clone()
# CPPPATH from dependencies
projenv.PrependUnique(CPPPATH=project_lib_builder.env.get("CPPPATH"))
# extra build flags from `platformio.ini`
projenv.ProcessFlags(env.get("SRC_BUILD_FLAGS"))
is_test = "__test" in COMMAND_LINE_TARGETS
if is_test:
projenv.BuildSources("$BUILDTEST_DIR", "$PROJECTTEST_DIR",
"$PIOTEST_SRC_FILTER")
if not is_test or env.get("TEST_BUILD_PROJECT_SRC") == "true":
projenv.BuildSources("$BUILDSRC_DIR", "$PROJECTSRC_DIR",
env.get("SRC_FILTER"))
if not env.get("PIOBUILDFILES") and not COMMAND_LINE_TARGETS:
sys.stderr.write(
"Error: Nothing to build. Please put your source code files "
"to '%s' folder\n" % env.subst("$PROJECTSRC_DIR"))
env.Exit(1)
Export("projenv")
def BuildProgram(env):
@@ -42,15 +87,19 @@ def BuildProgram(env):
_append_pio_macros()
# fix ASM handling under non-casitive OS
if not case_sensitive_suffixes(".s", ".S"):
env.PrintConfiguration()
# fix ASM handling under non case-sensitive OS
if not Util.case_sensitive_suffixes(".s", ".S"):
env.Replace(AS="$CC", ASCOM="$ASPPCOM")
if set(["__debug", "debug"]) & set(COMMAND_LINE_TARGETS):
env.ProcessDebug()
# process extra flags from board
if "BOARD" in env and "build.extra_flags" in env.BoardConfig():
env.ProcessFlags(env.BoardConfig().get("build.extra_flags"))
# remove base flags
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
# apply user flags
env.ProcessFlags(env.get("BUILD_FLAGS"))
@@ -60,83 +109,84 @@ def BuildProgram(env):
# restore PIO macros if it was deleted by framework
_append_pio_macros()
# Search for project source files
env.Append(
LIBPATH=["$BUILD_DIR"],
PIOBUILDFILES=env.CollectBuildFiles(
"$BUILDSRC_DIR", "$PROJECTSRC_DIR", "$SRC_FILTER",
duplicate=False))
# remove specified flags
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
if "__debug" in COMMAND_LINE_TARGETS:
env.ProcessDebug()
if "__test" in COMMAND_LINE_TARGETS:
env.Append(PIOBUILDFILES=env.ProcessTest())
env.ProcessTest()
# build dependent libs
env.Append(LIBS=env.BuildProjectLibraries())
# build project with dependencies
_build_project_deps(env)
# append specified LD_SCRIPT
if ("LDSCRIPT_PATH" in env
and not any(["-Wl,-T" in f for f in env['LINKFLAGS']])):
env.Append(LINKFLAGS=['-Wl,-T"$LDSCRIPT_PATH"'])
# append into the beginning a main LD script
if (env.get("LDSCRIPT_PATH")
and not any("-Wl,-T" in f for f in env['LINKFLAGS'])):
env.Prepend(LINKFLAGS=["-T", "$LDSCRIPT_PATH"])
# enable "cyclic reference" for linker
if env.get("LIBS") and env.GetCompilerType() == "gcc":
env.Prepend(_LIBFLAGS="-Wl,--start-group ")
env.Append(_LIBFLAGS=" -Wl,--end-group")
# Handle SRC_BUILD_FLAGS
env.ProcessFlags(env.get("SRC_BUILD_FLAGS"))
if not env.get("PIOBUILDFILES") and not COMMAND_LINE_TARGETS:
sys.stderr.write(
"Error: Nothing to build. Please put your source code files "
"to '%s' folder\n" % env.subst("$PROJECTSRC_DIR"))
env.Exit(1)
program = env.Program(
join("$BUILD_DIR", env.subst("$PROGNAME")), env['PIOBUILDFILES'])
env.Replace(PIOMAINPROG=program)
checksize_action = Action(env.CheckUploadSize, "Checking program size")
AlwaysBuild(env.Alias("checkprogsize", program, checksize_action))
if set(["upload", "program"]) & set(COMMAND_LINE_TARGETS):
env.AddPostAction(program, checksize_action)
AlwaysBuild(
env.Alias(
"checkprogsize", program,
env.VerboseAction(env.CheckUploadSize,
"Checking size $PIOMAINPROG")))
return program
def ParseFlagsExtended(env, flags): # pylint: disable=too-many-branches
if not isinstance(flags, list):
flags = [flags]
result = {}
for raw in flags:
for key, value in env.ParseFlags(str(raw)).items():
if key not in result:
result[key] = []
result[key].extend(value)
cppdefines = []
for item in result['CPPDEFINES']:
if not Util.is_Sequence(item):
cppdefines.append(item)
continue
name, value = item[:2]
if '\"' in value:
value = value.replace('\"', '\\\"')
elif value.isdigit():
value = int(value)
elif value.replace(".", "", 1).isdigit():
value = float(value)
cppdefines.append((name, value))
result['CPPDEFINES'] = cppdefines
# fix relative CPPPATH & LIBPATH
for k in ("CPPPATH", "LIBPATH"):
for i, p in enumerate(result.get(k, [])):
if isdir(p):
result[k][i] = realpath(p)
# fix relative path for "-include"
for i, f in enumerate(result.get("CCFLAGS", [])):
if isinstance(f, tuple) and f[0] == "-include":
result['CCFLAGS'][i] = (f[0], env.File(realpath(f[1].get_path())))
return result
def ProcessFlags(env, flags): # pylint: disable=too-many-branches
if not flags:
return
if isinstance(flags, list):
flags = " ".join(flags)
parsed_flags = env.ParseFlags(str(flags))
for flag in parsed_flags.pop("CPPDEFINES"):
if not is_Sequence(flag):
env.Append(CPPDEFINES=flag)
continue
_key, _value = flag[:2]
if '\"' in _value:
_value = _value.replace('\"', '\\\"')
elif _value.isdigit():
_value = int(_value)
elif _value.replace(".", "", 1).isdigit():
_value = float(_value)
env.Append(CPPDEFINES=(_key, _value))
env.Append(**parsed_flags)
# fix relative CPPPATH & LIBPATH
for k in ("CPPPATH", "LIBPATH"):
for i, p in enumerate(env.get(k, [])):
if isdir(p):
env[k][i] = realpath(p)
# fix relative path for "-include"
for i, f in enumerate(env.get("CCFLAGS", [])):
if isinstance(f, tuple) and f[0] == "-include":
env['CCFLAGS'][i] = (f[0], env.File(realpath(f[1].get_path())))
env.Append(**env.ParseFlagsExtended(flags))
# Cancel any previous definition of name, either built in or
# provided with a -D option // Issue #191
# provided with a -U option // Issue #191
undefines = [
u for u in env.get("CCFLAGS", [])
if isinstance(u, basestring) and u.startswith("-U")
@@ -150,19 +200,27 @@ def ProcessFlags(env, flags): # pylint: disable=too-many-branches
def ProcessUnFlags(env, flags):
if not flags:
return
if isinstance(flags, list):
flags = " ".join(flags)
parsed_flags = env.ParseFlags(str(flags))
all_flags = []
for items in parsed_flags.values():
all_flags.extend(items)
all_flags = set(all_flags)
parsed = env.ParseFlagsExtended(flags)
for key in parsed_flags:
cur_flags = set(env.Flatten(env.get(key, [])))
for item in cur_flags & all_flags:
while item in env[key]:
env[key].remove(item)
# get all flags and copy them to each "*FLAGS" variable
all_flags = []
for key, unflags in parsed.items():
if key.endswith("FLAGS"):
all_flags.extend(unflags)
for key, unflags in parsed.items():
if key.endswith("FLAGS"):
parsed[key].extend(all_flags)
for key, unflags in parsed.items():
for unflag in unflags:
for current in env.get(key, []):
conditions = [
unflag == current,
isinstance(current, (tuple, list))
and unflag[0] == current[0]
]
if any(conditions):
env[key].remove(current)
def IsFileWithExt(env, file_, ext): # pylint: disable=W0613
@@ -176,8 +234,6 @@ def IsFileWithExt(env, file_, ext): # pylint: disable=W0613
def MatchSourceFiles(env, src_dir, src_filter=None):
SRC_FILTER_PATTERNS_RE = re.compile(r"(\+|\-)<([^>]+)>")
def _append_build_item(items, item, src_dir):
if env.IsFileWithExt(item, SRC_BUILD_EXT + SRC_HEADER_EXT):
items.add(item.replace(src_dir + sep, ""))
@@ -254,10 +310,13 @@ def BuildFrameworks(env, frameworks):
for f in frameworks:
if f in ("arduino", "energia"):
env.ConvertInoToCpp()
# Arduino IDE appends .o the end of filename
Builder.match_splitext = scons_patched_match_splitext
if "nobuild" not in COMMAND_LINE_TARGETS:
env.ConvertInoToCpp()
if f in board_frameworks:
SConscript(env.GetFrameworkScript(f))
SConscript(env.GetFrameworkScript(f), exports="env")
else:
sys.stderr.write(
"Error: This board doesn't support %s framework!\n" % f)
@@ -265,15 +324,16 @@ def BuildFrameworks(env, frameworks):
def BuildLibrary(env, variant_dir, src_dir, src_filter=None):
lib = env.Clone()
return lib.StaticLibrary(
lib.subst(variant_dir),
lib.CollectBuildFiles(variant_dir, src_dir, src_filter))
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
return env.StaticLibrary(
env.subst(variant_dir),
env.CollectBuildFiles(variant_dir, src_dir, src_filter))
def BuildSources(env, variant_dir, src_dir, src_filter=None):
DefaultEnvironment().Append(PIOBUILDFILES=env.Clone().CollectBuildFiles(
variant_dir, src_dir, src_filter))
nodes = env.CollectBuildFiles(variant_dir, src_dir, src_filter)
DefaultEnvironment().Append(
PIOBUILDFILES=[env.Object(node) for node in nodes])
def exists(_):
@@ -282,6 +342,7 @@ def exists(_):
def generate(env):
env.AddMethod(BuildProgram)
env.AddMethod(ParseFlagsExtended)
env.AddMethod(ProcessFlags)
env.AddMethod(ProcessUnFlags)
env.AddMethod(IsFileWithExt)

View File

@@ -16,6 +16,7 @@ import json
import click
from platformio import util
from platformio.managers.platform import PlatformManager
@@ -60,22 +61,13 @@ def print_boards(boards):
click.echo("-" * terminal_width)
for board in boards:
ram_size = board['ram']
if ram_size >= 1024:
if ram_size % 1024:
ram_size = "%.1fkB" % (ram_size / 1024.0)
else:
ram_size = "%dkB" % (ram_size / 1024)
else:
ram_size = "%dB" % ram_size
click.echo(
BOARDLIST_TPL.format(
type=click.style(board['id'], fg="cyan"),
mcu=board['mcu'],
frequency="%dMhz" % (board['fcpu'] / 1000000),
flash="%dkB" % (board['rom'] / 1024),
ram=ram_size,
frequency="%dMHz" % (board['fcpu'] / 1000000),
flash=util.format_filesize(board['rom']),
ram=util.format_filesize(board['ram']),
name=board['name']))

View File

@@ -73,7 +73,7 @@ def validate_path(ctx, param, value): # pylint: disable=unused-argument
@click.option("-O", "--project-option", multiple=True)
@click.option("-v", "--verbose", is_flag=True)
@click.pass_context
def cli( # pylint: disable=too-many-arguments
def cli( # pylint: disable=too-many-arguments, too-many-branches
ctx, src, lib, exclude, board, build_dir, keep_build_dir, project_conf,
project_option, verbose):
@@ -84,9 +84,13 @@ def cli( # pylint: disable=too-many-arguments
try:
app.set_session_var("force_option", True)
_clean_dir(build_dir)
for dir_name, patterns in dict(lib=lib, src=src).iteritems():
if not keep_build_dir and isdir(build_dir):
util.rmtree_(build_dir)
if not isdir(build_dir):
makedirs(build_dir)
for dir_name, patterns in dict(lib=lib, src=src).items():
if not patterns:
continue
contents = []
@@ -116,11 +120,6 @@ def cli( # pylint: disable=too-many-arguments
util.rmtree_(build_dir)
def _clean_dir(dirpath):
util.rmtree_(dirpath)
makedirs(dirpath)
def _copy_contents(dst_dir, contents):
items = {"dirs": set(), "files": set()}

View File

@@ -55,7 +55,7 @@ def device_list( # pylint: disable=too-many-branches
"mdns": "Multicast DNS Services"
}
for key, value in data.iteritems():
for key, value in data.items():
if not single_key:
click.secho(titles[key], bold=True)
click.echo("=" * len(titles[key]))
@@ -85,7 +85,7 @@ def device_list( # pylint: disable=too-many-branches
if item['properties']:
click.echo("Properties: %s" % ("; ".join([
"%s=%s" % (k, v)
for k, v in item['properties'].iteritems()
for k, v in item['properties'].items()
])))
click.echo("")
@@ -165,8 +165,10 @@ def device_monitor(**kwargs): # pylint: disable=too-many-branches
kwargs['environment'])
monitor_options = {k: v for k, v in project_options or []}
if monitor_options:
for k in ("port", "baud", "rts", "dtr"):
for k in ("port", "baud", "speed", "rts", "dtr"):
k2 = "monitor_%s" % k
if k == "speed":
k = "baud"
if kwargs[k] is None and k2 in monitor_options:
kwargs[k] = monitor_options[k2]
if k != "port":
@@ -180,7 +182,7 @@ def device_monitor(**kwargs): # pylint: disable=too-many-branches
kwargs['port'] = ports[0]['port']
sys.argv = ["monitor"]
for k, v in kwargs.iteritems():
for k, v in kwargs.items():
if k in ("port", "baud", "rts", "dtr", "environment", "project_dir"):
continue
k = "--" + k.replace("_", "-")

View File

@@ -15,7 +15,6 @@
import sys
import click
import requests
from platformio.managers.core import pioplus_call
@@ -30,13 +29,3 @@ from platformio.managers.core import pioplus_call
@click.option("--no-open", is_flag=True)
def cli(*args, **kwargs): # pylint: disable=unused-argument
pioplus_call(sys.argv[1:])
def shutdown_servers():
port = 8010
while port < 9000:
try:
requests.get("http://127.0.0.1:%d?__shutdown__=1" % port)
port += 1
except: # pylint: disable=bare-except
return

View File

@@ -73,22 +73,21 @@ def cli(
click.secho(
"\nThe current working directory", fg="yellow", nl=False)
click.secho(" %s " % project_dir, fg="cyan", nl=False)
click.secho(
"will be used for project.\n"
"You can specify another project directory via\n"
"`platformio init -d %PATH_TO_THE_PROJECT_DIR%` command.",
fg="yellow")
click.secho("will be used for the project.", fg="yellow")
click.echo("")
click.echo("The next files/directories have been created in %s" %
click.style(project_dir, fg="cyan"))
click.echo("%s - Project Configuration File" % click.style(
"platformio.ini", fg="cyan"))
click.echo(
"%s - Put your source files here" % click.style("src", fg="cyan"))
click.echo("%s - Put project header files here" % click.style(
"include", fg="cyan"))
click.echo("%s - Put here project specific (private) libraries" %
click.style("lib", fg="cyan"))
click.echo("%s - Put project source files here" % click.style(
"src", fg="cyan"))
click.echo("%s - Project Configuration File" % click.style(
"platformio.ini", fg="cyan"))
is_new_project = not util.is_platformio_project(project_dir)
init_base_project(project_dir)
if board:
@@ -102,16 +101,28 @@ def cli(
pg = ProjectGenerator(project_dir, ide, env_name)
pg.generate()
if not silent:
if is_new_project:
init_ci_conf(project_dir)
init_cvs_ignore(project_dir)
if silent:
return
if ide:
click.secho(
"\nProject has been successfully initialized!\nUseful commands:\n"
"`platformio run` - process/build project from the current "
"directory\n"
"`platformio run --target upload` or `platformio run -t upload` "
"- upload firmware to embedded board\n"
"`platformio run --target clean` - clean project (remove compiled "
"files)\n"
"`platformio run --help` - additional information",
"\nProject has been successfully %s including configuration files "
"for `%s` IDE." % ("initialized" if is_new_project else "updated",
ide),
fg="green")
else:
click.secho(
"\nProject has been successfully %s! Useful commands:\n"
"`pio run` - process/build project from the current directory\n"
"`pio run --target upload` or `pio run -t upload` "
"- upload firmware to a target\n"
"`pio run --target clean` - clean project (remove compiled files)"
"\n`pio run --help` - additional information" %
("initialized" if is_new_project else "updated"),
fg="green")
@@ -134,76 +145,147 @@ def get_best_envname(project_dir, boards=None):
def init_base_project(project_dir):
if not util.is_platformio_project(project_dir):
copyfile(
join(util.get_source_dir(), "projectconftpl.ini"),
join(project_dir, "platformio.ini"))
if util.is_platformio_project(project_dir):
return
lib_dir = join(project_dir, "lib")
src_dir = join(project_dir, "src")
config = util.load_project_config(project_dir)
if config.has_option("platformio", "src_dir"):
src_dir = join(project_dir, config.get("platformio", "src_dir"))
copyfile(
join(util.get_source_dir(), "projectconftpl.ini"),
join(project_dir, "platformio.ini"))
for d in (src_dir, lib_dir):
if not isdir(d):
makedirs(d)
with util.cd(project_dir):
dir_to_readme = [
(util.get_projectsrc_dir(), None),
(util.get_projectinclude_dir(), init_include_readme),
(util.get_projectlib_dir(), init_lib_readme),
(util.get_projecttest_dir(), init_test_readme),
]
for (path, cb) in dir_to_readme:
if isdir(path):
continue
makedirs(path)
if cb:
cb(path)
init_lib_readme(lib_dir)
init_ci_conf(project_dir)
init_cvs_ignore(project_dir)
def init_include_readme(include_dir):
with open(join(include_dir, "README"), "w") as f:
f.write("""
This directory is intended for project header files.
A header file is a file containing C declarations and macro definitions
to be shared between several project source files. You request the use of a
header file in your project source file (C, C++, etc) located in `src` folder
by including it, with the C preprocessing directive `#include'.
```src/main.c
#include "header.h"
int main (void)
{
...
}
```
Including a header file produces the same results as copying the header file
into each source file that needs it. Such copying would be time-consuming
and error-prone. With a header file, the related declarations appear
in only one place. If they need to be changed, they can be changed in one
place, and programs that include the header file will automatically use the
new version when next recompiled. The header file eliminates the labor of
finding and changing all the copies as well as the risk that a failure to
find one copy will result in inconsistencies within a program.
In C, the usual convention is to give header files names that end with `.h'.
It is most portable to use only letters, digits, dashes, and underscores in
header file names, and at most one dot.
Read more about using header files in official GCC documentation:
* Include Syntax
* Include Operation
* Once-Only Headers
* Computed Includes
https://gcc.gnu.org/onlinedocs/cpp/Header-Files.html
""")
def init_lib_readme(lib_dir):
if isfile(join(lib_dir, "readme.txt")):
return
with open(join(lib_dir, "readme.txt"), "w") as f:
with open(join(lib_dir, "README"), "w") as f:
f.write("""
This directory is intended for the project specific (private) libraries.
PlatformIO will compile them to static libraries and link to executable file.
This directory is intended for project specific (private) libraries.
PlatformIO will compile them to static libraries and link into executable file.
The source code of each library should be placed in separate directory, like
"lib/private_lib/[here are source files]".
The source code of each library should be placed in a an own separate directory
("lib/your_library_name/[here are source files]").
For example, see how can be organized `Foo` and `Bar` libraries:
For example, see a structure of the following two libraries `Foo` and `Bar`:
|--lib
| |
| |--Bar
| | |--docs
| | |--examples
| | |--src
| | |- Bar.c
| | |- Bar.h
| | |- library.json (optional, custom build options, etc) https://docs.platformio.org/page/librarymanager/config.html
| |
| |--Foo
| | |- Foo.c
| | |- Foo.h
| |- readme.txt --> THIS FILE
| |
| |- README --> THIS FILE
|
|- platformio.ini
|--src
|- main.c
Then in `src/main.c` you should use:
and a contents of `src/main.c`:
```
#include <Foo.h>
#include <Bar.h>
// rest H/C/CPP code
int main (void)
{
...
}
PlatformIO will find your libraries automatically, configure preprocessor's
include paths and build them.
```
PlatformIO Library Dependency Finder will find automatically dependent
libraries scanning project source files.
More information about PlatformIO Library Dependency Finder
- http://docs.platformio.org/page/librarymanager/ldf.html
- https://docs.platformio.org/page/librarymanager/ldf.html
""")
def init_test_readme(test_dir):
with open(join(test_dir, "README"), "w") as f:
f.write("""
This directory is intended for PIO Unit Testing and project tests.
Unit Testing is a software testing method by which individual units of
source code, sets of one or more MCU program modules together with associated
control data, usage procedures, and operating procedures, are tested to
determine whether they are fit for use. Unit testing finds problems early
in the development cycle.
More information about PIO Unit Testing:
- https://docs.platformio.org/page/plus/unit-testing.html
""")
def init_ci_conf(project_dir):
if isfile(join(project_dir, ".travis.yml")):
conf_path = join(project_dir, ".travis.yml")
if isfile(conf_path):
return
with open(join(project_dir, ".travis.yml"), "w") as f:
with open(conf_path, "w") as f:
f.write("""# Continuous Integration (CI) is the practice, in software
# engineering, of merging all developer working copies with a shared mainline
# several times a day < http://docs.platformio.org/page/ci/index.html >
# several times a day < https://docs.platformio.org/page/ci/index.html >
#
# Documentation:
#
@@ -211,13 +293,13 @@ def init_ci_conf(project_dir):
# < https://docs.travis-ci.com/user/integration/platformio/ >
#
# * PlatformIO integration with Travis CI
# < http://docs.platformio.org/page/ci/travis.html >
# < https://docs.platformio.org/page/ci/travis.html >
#
# * User Guide for `platformio ci` command
# < http://docs.platformio.org/page/userguide/cmd_ci.html >
# < https://docs.platformio.org/page/userguide/cmd_ci.html >
#
#
# Please choice one of the following templates (proposed below) and uncomment
# Please choose one of the following templates (proposed below) and uncomment
# it (remove "# " before each line) or use own configuration according to the
# Travis CI documentation (see above).
#
@@ -231,21 +313,32 @@ def init_ci_conf(project_dir):
# python:
# - "2.7"
#
# sudo: false
# cache:
# directories:
# - "~/.platformio"
#
# install:
# - pip install -U platformio
# - platformio update
#
# script:
# - platformio run
#
# Template #2: The project is intended to by used as a library with examples
# Template #2: The project is intended to be used as a library with examples.
#
# language: python
# python:
# - "2.7"
#
# sudo: false
# cache:
# directories:
# - "~/.platformio"
#
# env:
# - PLATFORMIO_CI_SRC=path/to/test/file.c
# - PLATFORMIO_CI_SRC=examples/file.ino
@@ -253,6 +346,7 @@ def init_ci_conf(project_dir):
#
# install:
# - pip install -U platformio
# - platformio update
#
# script:
# - platformio ci --lib="." --board=ID_1 --board=ID_2 --board=ID_N
@@ -260,23 +354,11 @@ def init_ci_conf(project_dir):
def init_cvs_ignore(project_dir):
ignore_path = join(project_dir, ".gitignore")
default = [".pioenvs\n", ".piolibdeps\n"]
current = []
modified = False
if isfile(ignore_path):
with open(ignore_path) as fp:
current = fp.readlines()
if current and not current[-1].endswith("\n"):
current[-1] += "\n"
for d in default:
if d not in current:
modified = True
current.append(d)
if not modified:
conf_path = join(project_dir, ".gitignore")
if isfile(conf_path):
return
with open(ignore_path, "w") as fp:
fp.writelines(current)
with open(conf_path, "w") as fp:
fp.writelines([".pio\n", ".pioenvs\n", ".piolibdeps\n"])
def fill_project_envs(ctx, project_dir, board_ids, project_option, env_prefix,

View File

@@ -15,16 +15,14 @@
# pylint: disable=too-many-branches, too-many-locals
import json
import time
from os.path import isdir, join
from time import sleep
from urllib import quote
import arrow
import click
from platformio import exception, util
from platformio.managers.lib import LibraryManager
from platformio.managers.platform import PlatformFactory, PlatformManager
from platformio.managers.lib import LibraryManager, get_builtin_libs
from platformio.util import get_api_result
@@ -99,7 +97,7 @@ def cli(ctx, **options):
help="Reinstall/redownload library if exists")
@click.pass_obj
def lib_install(lm, libraries, silent, interactive, force):
# @TODO "save" option
# @TODO: "save" option
for library in libraries:
lm.install(
library, silent=silent, interactive=interactive, force=force)
@@ -188,6 +186,7 @@ def print_lib_item(item):
@click.argument("query", required=False, nargs=-1)
@click.option("--json-output", is_flag=True)
@click.option("--page", type=click.INT, default=1)
@click.option("--id", multiple=True)
@click.option("-n", "--name", multiple=True)
@click.option("-a", "--author", multiple=True)
@click.option("-k", "--keyword", multiple=True)
@@ -204,7 +203,7 @@ def lib_search(query, json_output, page, noninteractive, **filters):
if not isinstance(query, list):
query = list(query)
for key, values in filters.iteritems():
for key, values in filters.items():
for value in values:
query.append('%s:"%s"' % (key, value))
@@ -229,7 +228,7 @@ def lib_search(query, json_output, page, noninteractive, **filters):
click.echo("For more examples and advanced search syntax, "
"please use documentation:")
click.secho(
"http://docs.platformio.org/page/userguide/lib/cmd_search.html\n",
"https://docs.platformio.org/page/userguide/lib/cmd_search.html\n",
fg="cyan")
return
@@ -252,13 +251,14 @@ def lib_search(query, json_output, page, noninteractive, **filters):
result['perpage'],
fg="yellow")
click.echo()
sleep(5)
time.sleep(5)
elif not click.confirm("Show next libraries?"):
break
result = get_api_result(
"/v2/lib/search",
{"query": " ".join(query),
"page": int(result['page']) + 1},
"/v2/lib/search", {
"query": " ".join(query),
"page": int(result['page']) + 1
},
cache_valid="1d")
@@ -280,25 +280,6 @@ def lib_list(lm, json_output):
return True
@util.memoized
def get_builtin_libs(storage_names=None):
items = []
storage_names = storage_names or []
pm = PlatformManager()
for manifest in pm.get_installed():
p = PlatformFactory.newPlatform(manifest['__pkg_dir'])
for storage in p.get_lib_storages():
if storage_names and storage['name'] not in storage_names:
continue
lm = LibraryManager(storage['path'])
items.append({
"name": storage['name'],
"path": storage['path'],
"items": lm.get_installed()
})
return items
@cli.command("builtin", short_help="List built-in libraries")
@click.option("--storage", multiple=True)
@click.option("--json-output", is_flag=True)
@@ -326,8 +307,12 @@ def lib_builtin(storage, json_output):
def lib_show(library, json_output):
lm = LibraryManager()
name, requirements, _ = lm.parse_pkg_uri(library)
lib_id = lm.get_pkg_id_by_name(
name, requirements, silent=json_output, interactive=not json_output)
lib_id = lm.search_lib_id({
"name": name,
"requirements": requirements
},
silent=json_output,
interactive=not json_output)
lib = get_api_result("/lib/info/%d" % lib_id, cache_valid="1d")
if json_output:
return click.echo(json.dumps(lib))
@@ -338,9 +323,10 @@ def lib_show(library, json_output):
click.echo(lib['description'])
click.echo()
click.echo("Version: %s, released %s" %
(lib['version']['name'],
arrow.get(lib['version']['released']).humanize()))
click.echo(
"Version: %s, released %s" %
(lib['version']['name'],
time.strftime("%c", util.parse_date(lib['version']['released']))))
click.echo("Manifest: %s" % lib['confurl'])
for key in ("homepage", "repository", "license"):
if key not in lib or not lib[key]:
@@ -376,7 +362,8 @@ def lib_show(library, json_output):
blocks.append(("Headers", lib['headers']))
blocks.append(("Examples", lib['examples']))
blocks.append(("Versions", [
"%s, released %s" % (v['name'], arrow.get(v['released']).humanize())
"%s, released %s" %
(v['name'], time.strftime("%c", util.parse_date(v['released'])))
for v in lib['versions']
]))
blocks.append(("Unique Downloads", [
@@ -435,24 +422,24 @@ def lib_stats(json_output):
click.echo("-" * terminal_width)
def _print_lib_item(item):
click.echo((printitemdate_tpl
if "date" in item else printitem_tpl).format(
name=click.style(item['name'], fg="cyan"),
date=str(
arrow.get(item['date']).humanize()
if "date" in item else ""),
url=click.style(
"http://platformio.org/lib/show/%s/%s" %
(item['id'], quote(item['name'])),
fg="blue")))
date = str(
time.strftime("%c", util.parse_date(item['date'])) if "date" in
item else "")
url = click.style(
"https://platformio.org/lib/show/%s/%s" % (item['id'],
quote(item['name'])),
fg="blue")
click.echo(
(printitemdate_tpl if "date" in item else printitem_tpl).format(
name=click.style(item['name'], fg="cyan"), date=date, url=url))
def _print_tag_item(name):
click.echo(
printitem_tpl.format(
name=click.style(name, fg="cyan"),
url=click.style(
"http://platformio.org/lib/search?query=" +
quote("keyword:%s" % name),
"https://platformio.org/lib/search?query=" + quote(
"keyword:%s" % name),
fg="blue")))
for key in ("updated", "added"):

View File

@@ -85,6 +85,7 @@ def _get_installed_platform_data(platform,
homepage=p.homepage,
repository=p.repository_url,
url=p.vendor_url,
docs=p.docs_url,
license=p.license,
forDesktop=not p.is_embedded(),
frameworks=sorted(p.frameworks.keys() if p.frameworks else []),
@@ -195,7 +196,7 @@ def platform_frameworks(query, json_output):
if query and query.lower() not in search_data.lower():
continue
framework['homepage'] = (
"http://platformio.org/frameworks/" + framework['name'])
"https://platformio.org/frameworks/" + framework['name'])
framework['platforms'] = [
platform['name'] for platform in _get_registry_platforms()
if framework['name'] in platform['frameworks']
@@ -364,8 +365,8 @@ def platform_update(platforms, only_packages, only_check, json_output):
if not pkg_dir:
continue
latest = pm.outdated(pkg_dir, requirements)
if (not latest and not PlatformFactory.newPlatform(pkg_dir)
.are_outdated_packages()):
if (not latest and not PlatformFactory.newPlatform(
pkg_dir).are_outdated_packages()):
continue
data = _get_installed_platform_data(
pkg_dir, with_boards=False, expose_packages=False)

View File

@@ -12,7 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from hashlib import sha1
from os import getcwd, makedirs, walk
from os.path import getmtime, isdir, isfile, join
@@ -23,10 +22,9 @@ import click
from platformio import __version__, exception, telemetry, util
from platformio.commands.device import device_monitor as cmd_device_monitor
from platformio.commands.lib import lib_install as cmd_lib_install
from platformio.commands.lib import get_builtin_libs
from platformio.commands.platform import \
platform_install as cmd_platform_install
from platformio.managers.lib import LibraryManager
from platformio.managers.lib import LibraryManager, is_builtin_lib
from platformio.managers.platform import PlatformFactory
# pylint: disable=too-many-arguments,too-many-locals,too-many-branches
@@ -60,26 +58,26 @@ def cli(ctx, environment, target, upload_port, project_dir, silent, verbose,
raise exception.NotPlatformIOProject(project_dir)
with util.cd(project_dir):
# clean obsolete .pioenvs dir
# clean obsolete build dir
if not disable_auto_clean:
try:
_clean_pioenvs_dir(util.get_projectpioenvs_dir())
_clean_build_dir(util.get_projectbuild_dir())
except: # pylint: disable=bare-except
click.secho(
"Can not remove temporary directory `%s`. Please remove "
"`.pioenvs` directory from the project manually to avoid "
"build issues" % util.get_projectpioenvs_dir(force=True),
"it manually to avoid build issues" %
util.get_projectbuild_dir(force=True),
fg="yellow")
config = util.load_project_config()
check_project_defopts(config)
assert check_project_envs(config, environment)
env_default = None
if config.has_option("platformio", "env_default"):
env_default = util.parse_conf_multi_values(
config.get("platformio", "env_default"))
check_project_defopts(config)
check_project_envs(config, environment or env_default)
results = []
start_time = time()
for section in config.sections():
@@ -110,9 +108,11 @@ def cli(ctx, environment, target, upload_port, project_dir, silent, verbose,
results.append(result)
if result[1] and "monitor" in ep.get_build_targets() and \
"nobuild" not in ep.get_build_targets():
ctx.invoke(cmd_device_monitor)
ctx.invoke(
cmd_device_monitor,
environment=environment[0] if environment else None)
found_error = any([status is False for (_, status) in results])
found_error = any(status is False for (_, status) in results)
if (found_error or not silent) and len(results) > 1:
click.echo()
@@ -125,32 +125,47 @@ def cli(ctx, environment, target, upload_port, project_dir, silent, verbose,
class EnvironmentProcessor(object):
KNOWN_OPTIONS = ("platform", "framework", "board", "board_mcu",
"board_f_cpu", "board_f_flash", "board_flash_mode",
"build_flags", "src_build_flags", "build_unflags",
"src_filter", "extra_scripts", "targets", "upload_port",
"upload_protocol", "upload_speed", "upload_flags",
"upload_resetmethod", "lib_deps", "lib_ignore",
"lib_extra_dirs", "lib_ldf_mode", "lib_compat_mode",
"lib_archive", "piotest", "test_transport", "test_filter",
"test_ignore", "test_port", "debug_tool", "debug_port",
"debug_init_cmds", "debug_extra_cmds", "debug_server",
"debug_init_break", "debug_load_cmd", "monitor_port",
"monitor_baud", "monitor_rts", "monitor_dtr")
DEFAULT_DUMP_OPTIONS = ("platform", "framework", "board")
IGNORE_BUILD_OPTIONS = ("test_transport", "test_filter", "test_ignore",
"test_port", "debug_tool", "debug_port",
"debug_init_cmds", "debug_extra_cmds",
"debug_server", "debug_init_break",
"debug_load_cmd", "monitor_port", "monitor_baud",
"monitor_rts", "monitor_dtr")
KNOWN_PLATFORMIO_OPTIONS = [
"description", "env_default", "home_dir", "lib_dir", "libdeps_dir",
"include_dir", "src_dir", "build_dir", "data_dir", "test_dir",
"boards_dir", "lib_extra_dirs"
]
KNOWN_ENV_OPTIONS = [
"platform", "framework", "board", "build_flags", "src_build_flags",
"build_unflags", "src_filter", "extra_scripts", "targets",
"upload_port", "upload_protocol", "upload_speed", "upload_flags",
"upload_resetmethod", "lib_deps", "lib_ignore", "lib_extra_dirs",
"lib_ldf_mode", "lib_compat_mode", "lib_archive", "piotest",
"test_transport", "test_filter", "test_ignore", "test_port",
"test_speed", "test_build_project_src", "debug_tool", "debug_port",
"debug_init_cmds", "debug_extra_cmds", "debug_server",
"debug_init_break", "debug_load_cmd", "debug_load_mode",
"debug_svd_path", "monitor_port", "monitor_speed", "monitor_rts",
"monitor_dtr"
]
IGNORE_BUILD_OPTIONS = [
"test_transport", "test_filter", "test_ignore", "test_port",
"test_speed", "debug_port", "debug_init_cmds", "debug_extra_cmds",
"debug_server", "debug_init_break", "debug_load_cmd",
"debug_load_mode", "monitor_port", "monitor_speed", "monitor_rts",
"monitor_dtr"
]
REMAPED_OPTIONS = {"framework": "pioframework", "platform": "pioplatform"}
RENAMED_OPTIONS = {
"lib_use": "lib_deps",
"lib_force": "lib_deps",
"extra_script": "extra_scripts"
"extra_script": "extra_scripts",
"monitor_baud": "monitor_speed",
"board_mcu": "board_build.mcu",
"board_f_cpu": "board_build.f_cpu",
"board_f_flash": "board_build.f_flash",
"board_flash_mode": "board_build.flash_mode"
}
RENAMED_PLATFORMS = {"espressif": "espressif8266"}
@@ -175,19 +190,17 @@ class EnvironmentProcessor(object):
def process(self):
terminal_width, _ = click.get_terminal_size()
start_time = time()
env_dump = []
for k, v in self.options.items():
self.options[k] = self.options[k].strip()
if self.verbose or k in self.DEFAULT_DUMP_OPTIONS:
env_dump.append(
"%s: %s" % (k, ", ".join(util.parse_conf_multi_values(v))))
if not self.silent:
click.echo("[%s] Processing %s (%s)" %
(datetime.now().strftime("%c"),
click.style(self.name, fg="cyan", bold=True),
"; ".join([
"%s: %s" %
(k, ", ".join(util.parse_conf_multi_values(v)))
for k, v in self.options.items()
])))
click.echo("Processing %s (%s)" % (click.style(
self.name, fg="cyan", bold=True), "; ".join(env_dump)))
click.secho("-" * terminal_width, bold=True)
self.options = self._validate_options(self.options)
@@ -199,10 +212,10 @@ class EnvironmentProcessor(object):
if is_error or "piotest_processor" not in self.cmd_ctx.meta:
print_header(
"[%s] Took %.2f seconds" %
((click.style("ERROR", fg="red", bold=True)
if is_error else click.style(
"SUCCESS", fg="green", bold=True)), time() - start_time),
"[%s] Took %.2f seconds" % (
(click.style("ERROR", fg="red", bold=True) if is_error else
click.style("SUCCESS", fg="green", bold=True)),
time() - start_time),
is_error=is_error)
return not is_error
@@ -228,7 +241,11 @@ class EnvironmentProcessor(object):
v = self.RENAMED_PLATFORMS[v]
# warn about unknown options
if k not in self.KNOWN_OPTIONS and not k.startswith("custom_"):
unknown_conditions = [
k not in self.KNOWN_ENV_OPTIONS, not k.startswith("custom_"),
not k.startswith("board_")
]
if all(unknown_conditions):
click.secho(
"Detected non-PlatformIO `%s` option in `[env:%s]` section"
% (k, self.name),
@@ -279,10 +296,10 @@ class EnvironmentProcessor(object):
if d.strip()
], self.verbose)
if "lib_deps" in self.options:
_autoinstall_libdeps(self.cmd_ctx,
util.parse_conf_multi_values(
self.options['lib_deps']),
self.verbose)
_autoinstall_libdeps(
self.cmd_ctx,
util.parse_conf_multi_values(self.options['lib_deps']),
self.verbose)
try:
p = PlatformFactory.newPlatform(self.options['platform'])
@@ -307,36 +324,31 @@ def _autoinstall_libdeps(ctx, libraries, verbose=False):
try:
ctx.invoke(cmd_lib_install, libraries=[lib], silent=not verbose)
except exception.LibNotFound as e:
if not _is_builtin_lib(lib):
if verbose or not is_builtin_lib(lib):
click.secho("Warning! %s" % e, fg="yellow")
except exception.InternetIsOffline as e:
click.secho(str(e), fg="yellow")
def _is_builtin_lib(lib_name):
for storage in get_builtin_libs():
if any([l.get("name") == lib_name for l in storage['items']]):
return True
return False
def _clean_pioenvs_dir(pioenvs_dir):
structhash_file = join(pioenvs_dir, "structure.hash")
def _clean_build_dir(build_dir):
structhash_file = join(build_dir, "structure.hash")
proj_hash = calculate_project_hash()
# if project's config is modified
if (isdir(pioenvs_dir)
if (isdir(build_dir)
and getmtime(join(util.get_project_dir(),
"platformio.ini")) > getmtime(pioenvs_dir)):
util.rmtree_(pioenvs_dir)
"platformio.ini")) > getmtime(build_dir)):
util.rmtree_(build_dir)
# check project structure
if isdir(pioenvs_dir) and isfile(structhash_file):
if isdir(build_dir) and isfile(structhash_file):
with open(structhash_file) as f:
if f.read() == proj_hash:
return
util.rmtree_(pioenvs_dir)
util.rmtree_(build_dir)
if not isdir(pioenvs_dir):
makedirs(pioenvs_dir)
if not isdir(build_dir):
makedirs(build_dir)
with open(structhash_file, "w") as f:
f.write(proj_hash)
@@ -373,48 +385,51 @@ def print_summary(results, start_time):
err=status is False)
print_header(
"[%s] Took %.2f seconds" %
((click.style("SUCCESS", fg="green", bold=True)
if successed else click.style("ERROR", fg="red", bold=True)),
time() - start_time),
"[%s] Took %.2f seconds" % (
(click.style("SUCCESS", fg="green", bold=True) if successed else
click.style("ERROR", fg="red", bold=True)), time() - start_time),
is_error=not successed)
def check_project_defopts(config):
if not config.has_section("platformio"):
return True
known = ("env_default", "home_dir", "lib_dir", "libdeps_dir", "src_dir",
"envs_dir", "data_dir", "test_dir", "boards_dir",
"lib_extra_dirs")
unknown = set([k for k, _ in config.items("platformio")]) - set(known)
unknown = set([k for k, _ in config.items("platformio")]) - set(
EnvironmentProcessor.KNOWN_PLATFORMIO_OPTIONS)
if not unknown:
return True
click.secho(
"Warning! Ignore unknown `%s` option from `[platformio]` section" %
"Warning! Ignore unknown `%s` option in `[platformio]` section" %
", ".join(unknown),
fg="yellow")
return False
def check_project_envs(config, environments):
def check_project_envs(config, environments=None):
if not config.sections():
raise exception.ProjectEnvsNotAvailable()
known = set([s[4:] for s in config.sections() if s.startswith("env:")])
unknown = set(environments) - known
unknown = set(environments or []) - known
if unknown:
raise exception.UnknownEnvNames(", ".join(unknown), ", ".join(known))
return True
def calculate_project_hash():
structure = [__version__]
check_suffixes = (".c", ".cc", ".cpp", ".h", ".hpp", ".s", ".S")
chunks = [__version__]
for d in (util.get_projectsrc_dir(), util.get_projectlib_dir()):
if not isdir(d):
continue
for root, _, files in walk(d):
for f in files:
path = join(root, f)
if not any([s in path for s in (".git", ".svn", ".pioenvs")]):
structure.append(path)
return sha1(",".join(sorted(structure))).hexdigest() if structure else ""
if path.endswith(check_suffixes):
chunks.append(path)
chunks_to_str = ",".join(sorted(chunks))
if "windows" in util.get_systype():
# Fix issue with useless project rebuilding for case insensitive FS.
# A case of disk drive can differ...
chunks_to_str = chunks_to_str.lower()
return sha1(chunks_to_str).hexdigest()

View File

@@ -32,8 +32,8 @@ def settings_get(name):
click.echo(
list_tpl.format(
name=click.style("Name", fg="cyan"),
value=(click.style("Value", fg="green") +
click.style(" [Default]", fg="yellow")),
value=(click.style("Value", fg="green") + click.style(
" [Default]", fg="yellow")),
description="Description"))
click.echo("-" * terminal_width)

View File

@@ -32,14 +32,14 @@ from platformio.managers.lib import LibraryManager
help="Do not update, only check for new version")
@click.pass_context
def cli(ctx, core_packages, only_check):
# cleanup lib search results, cached board and platform lists
app.clean_cache()
update_core_packages(only_check)
if core_packages:
return
# cleanup lib search results, cached board and platform lists
app.clean_cache()
click.echo()
click.echo("Platform Manager")
click.echo("================")

View File

@@ -12,13 +12,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
from zipfile import ZipFile
import click
import requests
from platformio import VERSION, __version__, exception, util
from platformio.commands.home import shutdown_servers
from platformio.managers.core import shutdown_piohome_servers
@click.command(
@@ -34,14 +36,11 @@ def cli(dev):
click.secho("Please wait while upgrading PlatformIO ...", fg="yellow")
# kill all PIO Home servers, they block `pioplus` binary
shutdown_servers()
shutdown_piohome_servers()
to_develop = dev or not all([c.isdigit() for c in __version__ if c != "."])
cmds = ([
"pip", "install", "--upgrade",
"https://github.com/platformio/platformio-core/archive/develop.zip"
if to_develop else "platformio"
], ["platformio", "--version"])
to_develop = dev or not all(c.isdigit() for c in __version__ if c != ".")
cmds = (["pip", "install", "--upgrade",
get_pip_package(to_develop)], ["platformio", "--version"])
cmd = None
r = None
@@ -64,12 +63,12 @@ def cli(dev):
fg="green")
click.echo("Release notes: ", nl=False)
click.secho(
"http://docs.platformio.org/en/latest/history.html", fg="cyan")
"https://docs.platformio.org/en/latest/history.html", fg="cyan")
except Exception as e: # pylint: disable=broad-except
if not r:
raise exception.UpgradeError("\n".join([str(cmd), str(e)]))
permission_errors = ("permission denied", "not permitted")
if (any([m in r['err'].lower() for m in permission_errors])
if (any(m in r['err'].lower() for m in permission_errors)
and "windows" not in util.get_systype()):
click.secho(
"""
@@ -92,6 +91,30 @@ WARNING! Don't use `sudo` for the rest PlatformIO commands.
return True
def get_pip_package(to_develop):
if not to_develop:
return "platformio"
dl_url = ("https://github.com/platformio/"
"platformio-core/archive/develop.zip")
cache_dir = util.get_cache_dir()
if not os.path.isdir(cache_dir):
os.makedirs(cache_dir)
pkg_name = os.path.join(cache_dir, "piocoredevelop.zip")
try:
with open(pkg_name, "w") as fp:
r = util.exec_command(["curl", "-fsSL", dl_url],
stdout=fp,
universal_newlines=True)
assert r['returncode'] == 0
# check ZIP structure
with ZipFile(pkg_name) as zp:
assert zp.testzip() is None
return pkg_name
except: # pylint: disable=bare-except
pass
return dl_url
def get_latest_version():
try:
if not str(VERSION[2]).isdigit():
@@ -128,7 +151,7 @@ def get_develop_latest_version():
def get_pypi_latest_version():
r = requests.get(
"https://pypi.python.org/pypi/platformio/json",
"https://pypi.org/pypi/platformio/json",
headers=util.get_request_defheaders())
r.raise_for_status()
return r.json()['info']['version']

View File

@@ -21,7 +21,7 @@ from time import mktime
import click
import requests
from platformio import app, util
from platformio import util
from platformio.exception import (FDSHASumMismatch, FDSizeMismatch,
FDUnrecognizedStatusCode)
@@ -43,14 +43,12 @@ class FileDownloader(object):
disposition = self._request.headers.get("content-disposition")
if disposition and "filename=" in disposition:
self._fname = disposition[
disposition.index("filename=") + 9:].replace('"', "").replace(
"'", "")
self._fname = disposition[disposition.index("filename=") +
9:].replace('"', "").replace("'", "")
self._fname = self._fname.encode("utf8")
else:
self._fname = url.split("/")[-1]
self._fname = [p for p in url.split("/") if p][-1]
self._progressbar = None
self._destination = self._fname
if dest_dir:
self.set_destination(
@@ -70,12 +68,12 @@ class FileDownloader(object):
return -1
return int(self._request.headers['content-length'])
def start(self):
def start(self, with_progress=True):
label = "Downloading"
itercontent = self._request.iter_content(chunk_size=self.CHUNK_SIZE)
f = open(self._destination, "wb")
try:
if app.is_disabled_progressbar() or self.get_size() == -1:
if not with_progress or self.get_size() == -1:
click.echo("%s..." % label)
for chunk in itercontent:
if chunk:
@@ -85,12 +83,6 @@ class FileDownloader(object):
with click.progressbar(length=chunks, label=label) as pb:
for _ in pb:
f.write(next(itercontent))
except IOError as e:
click.secho(
"Error: Please read http://bit.ly/package-manager-ioerror",
fg="red",
err=True)
raise e
finally:
f.close()
self._request.close()
@@ -98,6 +90,8 @@ class FileDownloader(object):
if self.get_lmtime():
self._preserve_filemtime(self.get_lmtime())
return True
def verify(self, sha1=None):
_dlsize = getsize(self._destination)
if self.get_size() != -1 and _dlsize != self.get_size():

View File

@@ -28,6 +28,10 @@ class ReturnErrorCode(PlatformioException):
MESSAGE = "{0}"
class LockFileTimeoutError(PlatformioException):
pass
class MinitermException(PlatformioException):
pass
@@ -97,10 +101,16 @@ class UndefinedPackageVersion(PlatformioException):
class PackageInstallError(PlatformioException):
MESSAGE = ("Could not install '{0}' with version requirements '{1}' "
"for your system '{2}'.\n\n"
"Please try this solution -> http://bit.ly/faq-package-manager")
class ExtractArchiveItemError(PlatformioException):
MESSAGE = (
"Could not install '{0}' with version requirements '{1}' for your "
"system '{2}'.\n If you use Antivirus, it can block PlatformIO "
"Package Manager. Try to disable it for a while.")
"Could not extract `{0}` to `{1}`. Try to disable antivirus "
"tool or check this solution -> http://bit.ly/faq-package-manager")
class FDUnrecognizedStatusCode(PlatformioException):
@@ -168,7 +178,7 @@ class InternetIsOffline(PlatformioException):
MESSAGE = (
"You are not connected to the Internet.\n"
"If you build a project first time, we need Internet connection "
"to install all dependencies and toolchain.")
"to install all dependencies and toolchains.")
class LibNotFound(PlatformioException):
@@ -193,6 +203,11 @@ class InvalidLibConfURL(PlatformioException):
MESSAGE = "Invalid library config URL '{0}'"
class InvalidProjectConf(PlatformioException):
MESSAGE = "Invalid `platformio.ini`, project configuration file: '{0}'"
class BuildScriptNotFound(PlatformioException):
MESSAGE = "Invalid path '{0}' to build script"
@@ -208,6 +223,11 @@ class InvalidSettingValue(PlatformioException):
MESSAGE = "Invalid value '{0}' for the setting '{1}'"
class InvalidJSONFile(PlatformioException):
MESSAGE = "Could not load broken JSON: {0}"
class CIBuildEnvsEmpty(PlatformioException):
MESSAGE = ("Can't find PlatformIO build environments.\n"
@@ -215,13 +235,32 @@ class CIBuildEnvsEmpty(PlatformioException):
"predefined environments using `--project-conf` option")
class InvalidUdevRules(PlatformioException):
pass
class MissedUdevRules(InvalidUdevRules):
MESSAGE = (
"Warning! Please install `99-platformio-udev.rules`. \nMode details: "
"https://docs.platformio.org/en/latest/faq.html#platformio-udev-rules")
class OutdatedUdevRules(InvalidUdevRules):
MESSAGE = (
"Warning! Your `{0}` are outdated. Please update or reinstall them."
"\n Mode details: https://docs.platformio.org"
"/en/latest/faq.html#platformio-udev-rules")
class UpgradeError(PlatformioException):
MESSAGE = """{0}
* Upgrade using `pip install -U platformio`
* Try different installation/upgrading steps:
http://docs.platformio.org/page/installation.html
https://docs.platformio.org/page/installation.html
"""
@@ -239,3 +278,15 @@ class CygwinEnvDetected(PlatformioException):
MESSAGE = ("PlatformIO does not work within Cygwin environment. "
"Use native Terminal instead.")
class DebugSupportError(PlatformioException):
MESSAGE = ("Currently, PlatformIO does not support debugging for `{0}`.\n"
"Please contact support@pioplus.com or visit "
"< https://docs.platformio.org/page/plus/debugging.html >")
class DebugInvalidOptions(PlatformioException):
pass

View File

@@ -15,6 +15,7 @@
import json
import os
import re
import sys
from os.path import abspath, basename, expanduser, isdir, isfile, join, relpath
import bottle
@@ -30,11 +31,8 @@ class ProjectGenerator(object):
self.project_dir = project_dir
self.ide = ide
self.env_name = env_name
self._tplvars = {}
with util.cd(self.project_dir):
self.project_src_dir = util.get_projectsrc_dir()
self._gather_tplvars()
@staticmethod
@@ -43,7 +41,7 @@ class ProjectGenerator(object):
return sorted(
[d for d in os.listdir(tpls_dir) if isdir(join(tpls_dir, d))])
@util.memoized
@util.memoized()
def get_project_env(self):
data = {}
config = util.load_project_config(self.project_dir)
@@ -57,7 +55,6 @@ class ProjectGenerator(object):
data[k] = v
return data
@util.memoized
def get_project_build_data(self):
data = {
"defines": [],
@@ -92,7 +89,7 @@ class ProjectGenerator(object):
def get_src_files(self):
result = []
with util.cd(self.project_dir):
for root, _, files in os.walk(self.project_src_dir):
for root, _, files in os.walk(util.get_projectsrc_dir()):
for f in files:
result.append(relpath(join(root, f)))
return result
@@ -131,48 +128,30 @@ class ProjectGenerator(object):
@staticmethod
def _merge_contents(dst_path, contents):
file_name = basename(dst_path)
# merge .gitignore
if file_name == ".gitignore" and isfile(dst_path):
modified = False
default = [l.strip() for l in contents.split("\n")]
with open(dst_path) as fp:
current = [l.strip() for l in fp.readlines()]
for d in default:
if d and d not in current:
modified = True
current.append(d)
if not modified:
return
contents = "\n".join(current) + "\n"
if basename(dst_path) == ".gitignore" and isfile(dst_path):
return
with open(dst_path, "w") as f:
f.write(contents)
def _gather_tplvars(self):
self._tplvars.update(self.get_project_env())
self._tplvars.update(self.get_project_build_data())
self._tplvars.update({
"project_name":
self.get_project_name(),
"src_files":
self.get_src_files(),
"user_home_dir":
abspath(expanduser("~")),
"project_dir":
self.project_dir,
"project_src_dir":
self.project_src_dir,
"systype":
util.get_systype(),
"platformio_path":
self._fix_os_path(util.where_is_program("platformio")),
"env_pathsep":
os.pathsep,
"env_path":
self._fix_os_path(os.getenv("PATH"))
})
with util.cd(self.project_dir):
self._tplvars.update({
"project_name": self.get_project_name(),
"src_files": self.get_src_files(),
"user_home_dir": abspath(expanduser("~")),
"project_dir": self.project_dir,
"project_src_dir": util.get_projectsrc_dir(),
"project_lib_dir": util.get_projectlib_dir(),
"project_libdeps_dir": util.get_projectlibdeps_dir(),
"systype": util.get_systype(),
"platformio_path": self._fix_os_path(
sys.argv[0] if isfile(sys.argv[0])
else util.where_is_program("platformio")),
"env_pathsep": os.pathsep,
"env_path": self._fix_os_path(os.getenv("PATH"))
}) # yapf: disable
@staticmethod
def _fix_os_path(path):

View File

@@ -3,4 +3,4 @@
% end
% for define in defines:
-D{{!define}}
% end
% end

View File

@@ -1,7 +1,8 @@
% _defines = " ".join(["-D%s" % d for d in defines])
{
"execPath": "{{ cxx_path.replace("\\", "/") }}",
"gccDefaultCFlags": "-fsyntax-only {{! cc_flags.replace(' -MMD ', ' ').replace('"', '\\"') }}",
"gccDefaultCppFlags": "-fsyntax-only {{! cxx_flags.replace(' -MMD ', ' ').replace('"', '\\"') }}",
"gccDefaultCFlags": "-fsyntax-only {{! cc_flags.replace(' -MMD ', ' ').replace('"', '\\"') }} {{ !_defines.replace('"', '\\"') }}",
"gccDefaultCppFlags": "-fsyntax-only {{! cxx_flags.replace(' -MMD ', ' ').replace('"', '\\"') }} {{ !_defines.replace('"', '\\"') }}",
"gccErrorLimit": 15,
"gccIncludePaths": "{{ ','.join(includes).replace("\\", "/") }}",
"gccSuppressWarnings": false

View File

@@ -1,3 +1,4 @@
.pio
.pioenvs
.piolibdeps
.clang_complete

View File

@@ -1,3 +1,4 @@
.pio
.pioenvs
.piolibdeps
CMakeListsPrivate.txt

View File

@@ -0,0 +1,19 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="CMakeWorkspace" PROJECT_DIR="$PROJECT_DIR$" />
<component name="CidrRootsConfiguration">
<sourceRoots>
<file path="$PROJECT_DIR$/src" />
</sourceRoots>
<libraryRoots>
<file path="$PROJECT_DIR$/lib" />
<file path="$PROJECT_DIR$/.piolibdeps" />
</libraryRoots>
<excludeRoots>
<file path="$PROJECT_DIR$/.pio" />
</excludeRoots>
<excludeRoots>
<file path="$PROJECT_DIR$/.pioenvs" />
</excludeRoots>
</component>
</project>

View File

@@ -9,6 +9,12 @@ add_custom_target(
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
)
add_custom_target(
PLATFORMIO_BUILD_VERBOSE ALL
COMMAND ${PLATFORMIO_CMD} -f -c clion run --verbose
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
)
add_custom_target(
PLATFORMIO_UPLOAD ALL
COMMAND ${PLATFORMIO_CMD} -f -c clion run --target upload
@@ -21,6 +27,12 @@ add_custom_target(
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
)
add_custom_target(
PLATFORMIO_MONITOR ALL
COMMAND ${PLATFORMIO_CMD} -f -c clion device monitor
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
)
add_custom_target(
PLATFORMIO_TEST ALL
COMMAND ${PLATFORMIO_CMD} -f -c clion test

View File

@@ -1,26 +1,35 @@
set(ENV{PATH} "{{env_path}}")
set(PLATFORMIO_CMD "{{platformio_path}}")
# !!! WARNING !!!
# PLEASE DO NOT MODIFY THIS FILE!
# USE https://docs.platformio.org/page/projectconf/section_env_build.html#build-flags
SET(CMAKE_C_COMPILER "{{cc_path.replace("\\", "/")}}")
SET(CMAKE_CXX_COMPILER "{{cxx_path.replace("\\", "/")}}")
% def _normalize_path(path):
% if project_dir in path:
% path = path.replace(project_dir, "${CMAKE_CURRENT_LIST_DIR}")
% elif user_home_dir in path:
% if "windows" in systype:
% path = path.replace(user_home_dir, "$ENV{HOMEDRIVE}$ENV{HOMEPATH}")
% else:
% path = path.replace(user_home_dir, "$ENV{HOME}")
% end
% end
% return path.replace("\\", "/")
% end
set(PLATFORMIO_CMD "{{ _normalize_path(platformio_path) }}")
SET(CMAKE_C_COMPILER "{{ _normalize_path(cc_path) }}")
SET(CMAKE_CXX_COMPILER "{{ _normalize_path(cxx_path) }}")
SET(CMAKE_CXX_FLAGS_DISTRIBUTION "{{cxx_flags}}")
SET(CMAKE_C_FLAGS_DISTRIBUTION "{{cc_flags}}")
set(CMAKE_CXX_STANDARD 11)
% import re
% for define in defines:
add_definitions(-D{{!define}})
add_definitions(-D'{{!re.sub(r"([\"\(\)#])", r"\\\1", define)}}')
% end
% for include in includes:
% if include.startswith(user_home_dir):
% if "windows" in systype:
include_directories("$ENV{HOMEDRIVE}$ENV{HOMEPATH}{{include.replace(user_home_dir, '').replace("\\", "/")}}")
% else:
include_directories("$ENV{HOME}{{include.replace(user_home_dir, '').replace("\\", "/")}}")
% end
% else:
include_directories("{{include.replace("\\", "/")}}")
% end
include_directories("{{ _normalize_path(include) }}")
% end
FILE(GLOB_RECURSE SRC_LIST "{{project_src_dir.replace("\\", "/")}}/*.*")
FILE(GLOB_RECURSE SRC_LIST "{{ _normalize_path(project_src_dir) }}/*.*" "{{ _normalize_path(project_lib_dir) }}/*.*" "{{ _normalize_path(project_libdeps_dir) }}/*.*")

View File

@@ -5,13 +5,13 @@
<storageModule buildSystemId="org.eclipse.cdt.managedbuilder.core.configurationDataProvider" id="0.910961921" moduleId="org.eclipse.cdt.core.settings" name="Default">
<externalSettings/>
<extensions>
<extension id="org.eclipse.cdt.core.ELF" point="org.eclipse.cdt.core.BinaryParser"/>
<extension id="org.eclipse.cdt.core.VCErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
<extension id="org.eclipse.cdt.core.GmakeErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
<extension id="org.eclipse.cdt.core.CWDLocator" point="org.eclipse.cdt.core.ErrorParser"/>
<extension id="org.eclipse.cdt.core.GCCErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
<extension id="org.eclipse.cdt.core.GASErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
<extension id="org.eclipse.cdt.core.GLDErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
<extension id="org.eclipse.cdt.core.ELF" point="org.eclipse.cdt.core.BinaryParser"/>
</extensions>
</storageModule>
<storageModule moduleId="cdtBuildSystem" version="4.0.0">
@@ -99,6 +99,104 @@
</storageModule>
<storageModule moduleId="org.eclipse.cdt.core.externalSettings"/>
</cconfiguration>
<cconfiguration id="0.910961921.1363900502">
<storageModule buildSystemId="org.eclipse.cdt.managedbuilder.core.configurationDataProvider" id="0.910961921.1363900502" moduleId="org.eclipse.cdt.core.settings" name="Debug">
<externalSettings/>
<extensions>
<extension id="org.eclipse.cdt.core.ELF" point="org.eclipse.cdt.core.BinaryParser"/>
<extension id="org.eclipse.cdt.core.VCErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
<extension id="org.eclipse.cdt.core.GmakeErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
<extension id="org.eclipse.cdt.core.CWDLocator" point="org.eclipse.cdt.core.ErrorParser"/>
<extension id="org.eclipse.cdt.core.GCCErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
<extension id="org.eclipse.cdt.core.GASErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
<extension id="org.eclipse.cdt.core.GLDErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
</extensions>
</storageModule>
<storageModule moduleId="cdtBuildSystem" version="4.0.0">
<configuration artifactName="mbed" buildProperties="" description="" id="0.910961921.1363900502" name="Debug" parent="org.eclipse.cdt.build.core.prefbase.cfg">
<folderInfo id="0.910961921.1363900502." name="/" resourcePath="">
<toolChain id="org.eclipse.cdt.build.core.prefbase.toolchain.2116690625" name="No ToolChain" resourceTypeBasedDiscovery="false" superClass="org.eclipse.cdt.build.core.prefbase.toolchain">
<targetPlatform binaryParser="org.eclipse.cdt.core.ELF" id="org.eclipse.cdt.build.core.prefbase.toolchain.2116690625.848954921" name=""/>
<builder arguments="-f -c eclipse debug" cleanBuildTarget="run --target clean" command="platformio" enableCleanBuild="false" id="org.eclipse.cdt.build.core.settings.default.builder.985867833" incrementalBuildTarget="" keepEnvironmentInBuildfile="false" managedBuildOn="false" name="Gnu Make Builder" superClass="org.eclipse.cdt.build.core.settings.default.builder"/>
<tool id="org.eclipse.cdt.build.core.settings.holder.libs.1855678035" name="holder for library settings" superClass="org.eclipse.cdt.build.core.settings.holder.libs"/>
<tool id="org.eclipse.cdt.build.core.settings.holder.30528994" name="Assembly" superClass="org.eclipse.cdt.build.core.settings.holder">
<option id="org.eclipse.cdt.build.core.settings.holder.incpaths.794801023" name="Include Paths" superClass="org.eclipse.cdt.build.core.settings.holder.incpaths" valueType="includePath">
% for include in includes:
% if "toolchain" in include:
% continue
% end
% if include.startswith(user_home_dir):
% if "windows" in systype:
<listOptionValue builtIn="false" value="${USERPROFILE}{{include.replace(user_home_dir, '')}}"/>
% else:
<listOptionValue builtIn="false" value="${HOME}{{include.replace(user_home_dir, '')}}"/>
% end
% else:
<listOptionValue builtIn="false" value="{{include}}"/>
% end
% end
</option>
<option id="org.eclipse.cdt.build.core.settings.holder.symbols.1743427839" name="Symbols" superClass="org.eclipse.cdt.build.core.settings.holder.symbols" valueType="definedSymbols">
% for define in defines:
<listOptionValue builtIn="false" value="{{define}}"/>
% end
</option>
<inputType id="org.eclipse.cdt.build.core.settings.holder.inType.919136836" languageId="org.eclipse.cdt.core.assembly" languageName="Assembly" sourceContentType="org.eclipse.cdt.core.asmSource" superClass="org.eclipse.cdt.build.core.settings.holder.inType"/>
</tool>
<tool id="org.eclipse.cdt.build.core.settings.holder.1146422798" name="GNU C++" superClass="org.eclipse.cdt.build.core.settings.holder">
<option id="org.eclipse.cdt.build.core.settings.holder.incpaths.650084869" name="Include Paths" superClass="org.eclipse.cdt.build.core.settings.holder.incpaths" useByScannerDiscovery="false" valueType="includePath">
% for include in includes:
% if "toolchain" in include:
% continue
% end
% if include.startswith(user_home_dir):
% if "windows" in systype:
<listOptionValue builtIn="false" value="${USERPROFILE}{{include.replace(user_home_dir, '')}}"/>
% else:
<listOptionValue builtIn="false" value="${HOME}{{include.replace(user_home_dir, '')}}"/>
% end
% else:
<listOptionValue builtIn="false" value="{{include}}"/>
% end
% end
</option>
<option id="org.eclipse.cdt.build.core.settings.holder.symbols.2055633423" name="Symbols" superClass="org.eclipse.cdt.build.core.settings.holder.symbols" useByScannerDiscovery="false" valueType="definedSymbols">
% for define in defines:
<listOptionValue builtIn="false" value="{{define}}"/>
% end
</option>
<inputType id="org.eclipse.cdt.build.core.settings.holder.inType.445650141" languageId="org.eclipse.cdt.core.g++" languageName="GNU C++" sourceContentType="org.eclipse.cdt.core.cxxSource,org.eclipse.cdt.core.cxxHeader" superClass="org.eclipse.cdt.build.core.settings.holder.inType"/>
</tool>
<tool id="org.eclipse.cdt.build.core.settings.holder.1637357529" name="GNU C" superClass="org.eclipse.cdt.build.core.settings.holder">
<option id="org.eclipse.cdt.build.core.settings.holder.incpaths.1246337321" name="Include Paths" superClass="org.eclipse.cdt.build.core.settings.holder.incpaths" useByScannerDiscovery="false" valueType="includePath">
% for include in includes:
% if "toolchain" in include:
% continue
% end
% if include.startswith(user_home_dir):
% if "windows" in systype:
<listOptionValue builtIn="false" value="${USERPROFILE}{{include.replace(user_home_dir, '')}}"/>
% else:
<listOptionValue builtIn="false" value="${HOME}{{include.replace(user_home_dir, '')}}"/>
% end
% else:
<listOptionValue builtIn="false" value="{{include}}"/>
% end
% end
</option>
<option id="org.eclipse.cdt.build.core.settings.holder.symbols.2122043341" name="Symbols" superClass="org.eclipse.cdt.build.core.settings.holder.symbols" useByScannerDiscovery="false" valueType="definedSymbols">
% for define in defines:
<listOptionValue builtIn="false" value="{{define}}"/>
% end
</option>
<inputType id="org.eclipse.cdt.build.core.settings.holder.inType.207004812" languageId="org.eclipse.cdt.core.gcc" languageName="GNU C" sourceContentType="org.eclipse.cdt.core.cSource,org.eclipse.cdt.core.cHeader" superClass="org.eclipse.cdt.build.core.settings.holder.inType"/>
</tool>
</toolChain>
</folderInfo>
</configuration>
</storageModule>
<storageModule moduleId="org.eclipse.cdt.core.externalSettings"/>
</cconfiguration>
</storageModule>
<storageModule moduleId="cdtBuildSystem" version="4.0.0">
<project id="{{project_name}}.null.189551033" name="{{project_name}}"/>

View File

@@ -2,7 +2,7 @@
<launchConfiguration type="org.eclipse.cdt.launch.applicationLaunchType">
<booleanAttribute key="org.eclipse.cdt.dsf.gdb.AUTO_SOLIB" value="true"/>
<listAttribute key="org.eclipse.cdt.dsf.gdb.AUTO_SOLIB_LIST"/>
<stringAttribute key="org.eclipse.cdt.dsf.gdb.DEBUG_NAME" value="platformio -c eclipse debug -d ${project_loc} --interface=gdb"/>
<stringAttribute key="org.eclipse.cdt.dsf.gdb.DEBUG_NAME" value="piodebuggdb"/>
<booleanAttribute key="org.eclipse.cdt.dsf.gdb.DEBUG_ON_FORK" value="false"/>
<stringAttribute key="org.eclipse.cdt.dsf.gdb.GDB_INIT" value=".pioinit"/>
<booleanAttribute key="org.eclipse.cdt.dsf.gdb.NON_STOP" value="false"/>
@@ -12,17 +12,17 @@
<stringAttribute key="org.eclipse.cdt.dsf.gdb.TRACEPOINT_MODE" value="TP_NORMAL_ONLY"/>
<booleanAttribute key="org.eclipse.cdt.dsf.gdb.UPDATE_THREADLIST_ON_SUSPEND" value="false"/>
<booleanAttribute key="org.eclipse.cdt.dsf.gdb.internal.ui.launching.LocalApplicationCDebuggerTab.DEFAULTS_SET" value="true"/>
<intAttribute key="org.eclipse.cdt.launch.ATTR_BUILD_BEFORE_LAUNCH_ATTR" value="0"/>
<intAttribute key="org.eclipse.cdt.launch.ATTR_BUILD_BEFORE_LAUNCH_ATTR" value="1"/>
<stringAttribute key="org.eclipse.cdt.launch.COREFILE_PATH" value=""/>
<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_ID" value="gdb"/>
<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_REGISTER_GROUPS" value=""/>
<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_START_MODE" value="run"/>
<booleanAttribute key="org.eclipse.cdt.launch.DEBUGGER_STOP_AT_MAIN" value="true"/>
<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_STOP_AT_MAIN_SYMBOL" value="main"/>
<stringAttribute key="org.eclipse.cdt.launch.PROGRAM_NAME" value=".pioenvs/{{env_name}}/firmware.elf"/>
<booleanAttribute key="org.eclipse.cdt.launch.DEBUGGER_STOP_AT_MAIN" value="false"/>
<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_STOP_AT_MAIN_SYMBOL" value=""/>
<stringAttribute key="org.eclipse.cdt.launch.PROGRAM_NAME" value="{{prog_path}}"/>
<stringAttribute key="org.eclipse.cdt.launch.PROJECT_ATTR" value="{{project_name}}"/>
<booleanAttribute key="org.eclipse.cdt.launch.PROJECT_BUILD_CONFIG_AUTO_ATTR" value="false"/>
<stringAttribute key="org.eclipse.cdt.launch.PROJECT_BUILD_CONFIG_ID_ATTR" value=""/>
<stringAttribute key="org.eclipse.cdt.launch.PROJECT_BUILD_CONFIG_ID_ATTR" value="0.910961921.1363900502"/>
<listAttribute key="org.eclipse.debug.core.MAPPED_RESOURCE_PATHS">
<listEntry value="/{{project_name}}"/>
</listAttribute>

View File

@@ -1,3 +1,8 @@
% import re
% STD_RE = re.compile(r"(\-std=[a-z\+]+\d+)")
% cxx_stds = STD_RE.findall(cxx_flags)
% cxx_std = cxx_stds[-1] if cxx_stds else ""
%
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<project>
<configuration id="0.910961921" name="Default">
@@ -6,9 +11,24 @@
<provider-reference id="org.eclipse.cdt.core.ReferencedProjectsLanguageSettingsProvider" ref="shared-provider"/>
<provider-reference id="org.eclipse.cdt.managedbuilder.core.MBSLanguageSettingsProvider" ref="shared-provider"/>
% if "windows" in systype:
<provider class="org.eclipse.cdt.internal.build.crossgcc.CrossGCCBuiltinSpecsDetector" console="false" env-hash="1291887707783033084" id="org.eclipse.cdt.build.crossgcc.CrossGCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT Cross GCC Built-in Compiler Settings" parameter="${USERPROFILE}{{cxx_path.replace(user_home_dir, '')}} ${FLAGS} -E -P -v -dD &quot;${INPUTS}&quot;" prefer-non-shared="true">
<provider class="org.eclipse.cdt.internal.build.crossgcc.CrossGCCBuiltinSpecsDetector" console="false" env-hash="1291887707783033084" id="org.eclipse.cdt.build.crossgcc.CrossGCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT Cross GCC Built-in Compiler Settings" parameter="${USERPROFILE}{{cxx_path.replace(user_home_dir, '')}} ${FLAGS} {{ cxx_std }} -E -P -v -dD &quot;${INPUTS}&quot;" prefer-non-shared="true">
% else:
<provider class="org.eclipse.cdt.internal.build.crossgcc.CrossGCCBuiltinSpecsDetector" console="false" env-hash="-869785120007741010" id="org.eclipse.cdt.build.crossgcc.CrossGCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT Cross GCC Built-in Compiler Settings" parameter="${HOME}{{cxx_path.replace(user_home_dir, '')}} ${FLAGS} -E -P -v -dD &quot;${INPUTS}&quot;" prefer-non-shared="true">
<provider class="org.eclipse.cdt.internal.build.crossgcc.CrossGCCBuiltinSpecsDetector" console="false" env-hash="-869785120007741010" id="org.eclipse.cdt.build.crossgcc.CrossGCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT Cross GCC Built-in Compiler Settings" parameter="${HOME}{{cxx_path.replace(user_home_dir, '')}} ${FLAGS} {{ cxx_std }} -E -P -v -dD &quot;${INPUTS}&quot;" prefer-non-shared="true">
% end
<language-scope id="org.eclipse.cdt.core.gcc"/>
<language-scope id="org.eclipse.cdt.core.g++"/>
</provider>
</extension>
</configuration>
<configuration id="0.910961921.1363900502" name="Debug">
<extension point="org.eclipse.cdt.core.LanguageSettingsProvider">
<provider copy-of="extension" id="org.eclipse.cdt.ui.UserLanguageSettingsProvider"/>
<provider-reference id="org.eclipse.cdt.core.ReferencedProjectsLanguageSettingsProvider" ref="shared-provider"/>
<provider-reference id="org.eclipse.cdt.managedbuilder.core.MBSLanguageSettingsProvider" ref="shared-provider"/>
% if "windows" in systype:
<provider class="org.eclipse.cdt.internal.build.crossgcc.CrossGCCBuiltinSpecsDetector" console="false" env-hash="1291887707783033084" id="org.eclipse.cdt.build.crossgcc.CrossGCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT Cross GCC Built-in Compiler Settings" parameter="${USERPROFILE}{{cxx_path.replace(user_home_dir, '')}} ${FLAGS} {{ cxx_std }} -E -P -v -dD &quot;${INPUTS}&quot;" prefer-non-shared="true">
% else:
<provider class="org.eclipse.cdt.internal.build.crossgcc.CrossGCCBuiltinSpecsDetector" console="false" env-hash="-869785120007741010" id="org.eclipse.cdt.build.crossgcc.CrossGCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT Cross GCC Built-in Compiler Settings" parameter="${HOME}{{cxx_path.replace(user_home_dir, '')}} ${FLAGS} {{ cxx_std }} -E -P -v -dD &quot;${INPUTS}&quot;" prefer-non-shared="true">
% end
<language-scope id="org.eclipse.cdt.core.gcc"/>
<language-scope id="org.eclipse.cdt.core.g++"/>

View File

@@ -3,4 +3,9 @@ environment/project/0.910961921/PATH/delimiter={{env_pathsep.replace(":", "\\:")
environment/project/0.910961921/PATH/operation=replace
environment/project/0.910961921/PATH/value={{env_path.replace(":", "\\:")}}
environment/project/0.910961921/append=true
environment/project/0.910961921/appendContributed=true
environment/project/0.910961921/appendContributed=true
environment/project/0.910961921.1363900502/PATH/delimiter={{env_pathsep.replace(":", "\\:")}}
environment/project/0.910961921.1363900502/PATH/operation=replace
environment/project/0.910961921.1363900502/PATH/value={{env_path.replace(":", "\\:")}}
environment/project/0.910961921.1363900502/append=true
environment/project/0.910961921.1363900502/appendContributed=true

View File

@@ -3,4 +3,4 @@
% end
% for define in defines:
-D{{!define}}
% end
% end

View File

@@ -1,3 +1,4 @@
.pio
.pioenvs
.piolibdeps
.clang_complete

View File

@@ -11,7 +11,7 @@
<itemPath>nbproject/private/launcher.properties</itemPath>
</logicalFolder>
</logicalFolder>
<sourceFolderFilter>^(nbproject|.pioenvs)$</sourceFolderFilter>
<sourceFolderFilter>^(nbproject|.pio|.pioenvs)$</sourceFolderFilter>
<sourceRootList>
<Elem>.</Elem>
</sourceRootList>

View File

@@ -14,7 +14,8 @@ INCLUDEPATH += "{{include}}"
% end
% for define in defines:
DEFINES += "{{define}}"
% tokens = define.split("##", 1)
DEFINES += "{{tokens[0].strip()}}"
% end
OTHER_FILES += platformio.ini

View File

@@ -4,7 +4,7 @@
{
"cmd":
[
"platformio",
"{{ platformio_path }}",
"-f", "-c", "sublimetext",
"run"
],
@@ -14,7 +14,7 @@
{
"cmd":
[
"platformio",
"{{ platformio_path }}",
"-f", "-c", "sublimetext",
"run"
],
@@ -23,27 +23,7 @@
{
"cmd":
[
"platformio",
"-f", "-c", "sublimetext",
"run",
"--target",
"clean"
],
"name": "Clean"
},
{
"cmd":
[
"platformio",
"-f", "-c", "sublimetext",
"test"
],
"name": "Test"
},
{
"cmd":
[
"platformio",
"{{ platformio_path }}",
"-f", "-c", "sublimetext",
"run",
"--target",
@@ -54,7 +34,27 @@
{
"cmd":
[
"platformio",
"{{ platformio_path }}",
"-f", "-c", "sublimetext",
"run",
"--target",
"clean"
],
"name": "Clean"
},
{
"cmd":
[
"{{ platformio_path }}",
"-f", "-c", "sublimetext",
"test"
],
"name": "Test"
},
{
"cmd":
[
"{{ platformio_path }}",
"-f", "-c", "sublimetext",
"run",
"--target",
@@ -65,7 +65,7 @@
{
"cmd":
[
"platformio",
"{{ platformio_path }}",
"-f", "-c", "sublimetext",
"run",
"--target",
@@ -76,16 +76,24 @@
{
"cmd":
[
"platformio",
"{{ platformio_path }}",
"-f", "-c", "sublimetext",
"update"
],
"name": "Update platforms and libraries"
},
{
"cmd":
[
"{{ platformio_path }}",
"-f", "-c", "sublimetext",
"upgrade"
],
"name": "Upgrade PlatformIO Core"
}
],
"working_dir": "${project_path:${folder}}",
"selector": "source.c, source.c++",
"path": "{{env_path}}"
"selector": "source.c, source.c++"
}
],
"folders":
@@ -97,8 +105,8 @@
"settings":
{
"sublimegdb_workingdir": "{{project_dir}}",
"sublimegdb_exec_cmd": "-exec-continue",
"sublimegdb_commandline": "{{platformio_path}} -f -c sublimetext debug --interface=gdb --interpreter=mi -x .pioinit"
"sublimegdb_exec_cmd": "",
"sublimegdb_commandline": "{{ platformio_path }} -f -c sublimetext debug --interface=gdb --interpreter=mi -x .pioinit"
}
}

View File

@@ -1,6 +1,6 @@
% for include in includes:
-I{{include}}
-I"{{include}}"
% end
% for define in defines:
-D{{!define}}
% end
% end

View File

@@ -1,8 +1,9 @@
% _defines = " ".join(["-D%s" % d for d in defines])
{
"execPath": "{{ cxx_path.replace("\\", "/") }}",
"gccDefaultCFlags": "-fsyntax-only {{! cc_flags.replace(' -MMD ', ' ').replace('"', '\\"') }}",
"gccDefaultCppFlags": "-fsyntax-only {{! cxx_flags.replace(' -MMD ', ' ').replace('"', '\\"') }}",
"gccDefaultCFlags": "-fsyntax-only {{! cc_flags.replace(' -MMD ', ' ').replace('"', '\\"') }} {{ !_defines.replace('"', '\\"') }}",
"gccDefaultCppFlags": "-fsyntax-only {{! cxx_flags.replace(' -MMD ', ' ').replace('"', '\\"') }} {{ !_defines.replace('"', '\\"') }}",
"gccErrorLimit": 15,
"gccIncludePaths": "{{ ','.join(includes).replace("\\", "/") }}",
"gccIncludePaths": "{{! ','.join("'{}'".format(w.replace("\\", '/')) for w in includes)}}",
"gccSuppressWarnings": false
}

View File

@@ -1,3 +1,4 @@
.pio
.pioenvs
.piolibdeps
.clang_complete

View File

@@ -15,7 +15,7 @@
</ItemGroup>
% for file in src_files:
<ItemGroup>
% if any([file.endswith(".%s" % e) for e in ("h", "hh", "hpp", "inc")]):
% if any(file.endswith(".%s" % e) for e in ("h", "hh", "hpp", "inc")):
<ClInclude Include="{{file}}">
<Filter>Header Files</Filter>
</ClInclude>

View File

@@ -60,7 +60,7 @@
</ItemGroup>
% for file in src_files:
<ItemGroup>
% if any([file.endswith(".%s" % e) for e in ("h", "hh", "hpp", "inc")]):
% if any(file.endswith(".%s" % e) for e in ("h", "hh", "hpp", "inc")):
<ClInclude Include="{{file}}">
<Filter>Header Files</Filter>
</ClInclude>

View File

@@ -1,3 +1,6 @@
.pio
.pioenvs
.piolibdeps
.vscode/.browse.c_cpp.db*
.vscode/c_cpp_properties.json
.vscode/launch.json

View File

@@ -1,38 +1,69 @@
{
"!!! WARNING !!!": "PLEASE DO NOT MODIFY THIS FILE! USE https://docs.platformio.org/page/projectconf/section_env_build.html#build-flags",
"configurations": [
{
% import platform
% from os.path import commonprefix, dirname
%
% systype = platform.system().lower()
%
% def _escape(text):
% return text.replace('\\\\', '/').replace('\\', '/').replace('"', '\\"')
% end
%
% cleaned_includes = []
% for include in includes:
% if "toolchain-" not in dirname(commonprefix([include, cc_path])):
% cleaned_includes.append(include)
% end
% end
%
% if systype == "windows":
"name": "Win32",
% elif systype == "darwin":
"name": "Mac",
"macFrameworkPath": [],
% else:
"name": "Linux",
% end
"includePath": [
% for include in includes:
"{{include.replace('\\\\', '/').replace('\\', '/').replace('"', '\\"')}}",
% for include in cleaned_includes:
"{{! _escape(include) }}",
% end
""
],
"browse": {
"limitSymbolsToIncludedHeaders": true,
"databaseFilename": "",
"databaseFilename": "${workspaceRoot}/.vscode/.browse.c_cpp.db",
"path": [
% for include in includes:
"{{include.replace('\\\\', '/').replace('\\', '/').replace('"', '\\"')}}",
% for include in cleaned_includes:
"{{! _escape(include) }}",
% end
""
]
},
"defines": [
% for define in defines:
"{{!define.replace('"', '\\"')}}",
"{{! _escape(define) }}",
% end
""
],
"intelliSenseMode": "clang-x64"
"intelliSenseMode": "clang-x64",
% import re
% STD_RE = re.compile(r"\-std=[a-z\+]+(\d+)")
% cc_stds = STD_RE.findall(cc_flags)
% cxx_stds = STD_RE.findall(cxx_flags)
%
% # pass only architecture specific flags
% cc_m_flags = " ".join([f.strip() for f in cc_flags.split(" ") if f.strip().startswith("-m")])
%
% if cc_stds:
"cStandard": "c{{ cc_stds[-1] }}",
% end
% if cxx_stds:
"cppStandard": "c++{{ cxx_stds[-1] }}",
% end
"compilerPath": "{{! _escape(cc_path) }} {{! _escape(cc_m_flags) }}"
}
]
}

View File

@@ -0,0 +1,7 @@
{
// See http://go.microsoft.com/fwlink/?LinkId=827846
// for the documentation about the extensions.json format
"recommendations": [
"platformio.platformio-ide"
]
}

View File

@@ -1,15 +1,44 @@
// AUTOMATICALLY GENERATED FILE. PLEASE DO NOT MODIFY IT MANUALLY
// PIO Unified Debugger
//
// Documentation: https://docs.platformio.org/page/plus/debugging.html
// Configuration: https://docs.platformio.org/page/projectconf/section_env_debug.html
% from os.path import dirname, join
%
% def _escape_path(path):
% return path.replace('\\\\', '/').replace('\\', '/').replace('"', '\\"')
% end
%
{
"version": "0.2.0",
"configurations": [
{
"type": "gdb",
"type": "platformio-debug",
"request": "launch",
"cwd": "${workspaceRoot}",
"name": "PlatformIO Debugger",
"target": "{{prog_path.replace('\\\\', '/').replace('\\', '/').replace('"', '\\"')}}",
"gdbpath": "{{join(dirname(platformio_path), "piodebuggdb").replace('\\\\', '/').replace('\\', '/').replace('"', '\\"')}}",
"autorun": [ "source .pioinit" ]
"name": "PIO Debug",
"executable": "{{ _escape_path(prog_path) }}",
"toolchainBinDir": "{{ _escape_path(dirname(gdb_path)) }}",
% if svd_path:
"svdPath": "{{ _escape_path(svd_path) }}",
% end
"preLaunchTask": {
"type": "PlatformIO",
"task": "Pre-Debug"
},
"internalConsoleOptions": "openOnSessionStart"
},
{
"type": "platformio-debug",
"request": "launch",
"name": "PIO Debug (skip Pre-Debug)",
"executable": "{{ _escape_path(prog_path) }}",
"toolchainBinDir": "{{ _escape_path(dirname(gdb_path)) }}",
% if svd_path:
"svdPath": "{{ _escape_path(svd_path) }}",
% end
"internalConsoleOptions": "openOnSessionStart"
}
]
}

108
platformio/lockfile.py Normal file
View File

@@ -0,0 +1,108 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from os import remove
from os.path import abspath, exists, getmtime
from time import sleep, time
from platformio import exception
LOCKFILE_TIMEOUT = 3600 # in seconds, 1 hour
LOCKFILE_DELAY = 0.2
LOCKFILE_INTERFACE_FCNTL = 1
LOCKFILE_INTERFACE_MSVCRT = 2
try:
import fcntl
LOCKFILE_CURRENT_INTERFACE = LOCKFILE_INTERFACE_FCNTL
except ImportError:
try:
import msvcrt
LOCKFILE_CURRENT_INTERFACE = LOCKFILE_INTERFACE_MSVCRT
except ImportError:
LOCKFILE_CURRENT_INTERFACE = None
class LockFileExists(Exception):
pass
class LockFile(object):
def __init__(self, path, timeout=LOCKFILE_TIMEOUT, delay=LOCKFILE_DELAY):
self.timeout = timeout
self.delay = delay
self._lock_path = abspath(path) + ".lock"
self._fp = None
def _lock(self):
if not LOCKFILE_CURRENT_INTERFACE and exists(self._lock_path):
# remove stale lock
if time() - getmtime(self._lock_path) > 10:
try:
remove(self._lock_path)
except: # pylint: disable=bare-except
pass
else:
raise LockFileExists
self._fp = open(self._lock_path, "w")
try:
if LOCKFILE_CURRENT_INTERFACE == LOCKFILE_INTERFACE_FCNTL:
fcntl.flock(self._fp.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
elif LOCKFILE_CURRENT_INTERFACE == LOCKFILE_INTERFACE_MSVCRT:
msvcrt.locking(self._fp.fileno(), msvcrt.LK_NBLCK, 1)
except IOError:
self._fp = None
raise LockFileExists
return True
def _unlock(self):
if not self._fp:
return
if LOCKFILE_CURRENT_INTERFACE == LOCKFILE_INTERFACE_FCNTL:
fcntl.flock(self._fp.fileno(), fcntl.LOCK_UN)
elif LOCKFILE_CURRENT_INTERFACE == LOCKFILE_INTERFACE_MSVCRT:
msvcrt.locking(self._fp.fileno(), msvcrt.LK_UNLCK, 1)
self._fp.close()
self._fp = None
def acquire(self):
elapsed = 0
while elapsed < self.timeout:
try:
return self._lock()
except LockFileExists:
sleep(self.delay)
elapsed += self.delay
raise exception.LockFileTimeoutError()
def release(self):
self._unlock()
if exists(self._lock_path):
try:
remove(self._lock_path)
except: # pylint: disable=bare-except
pass
def __enter__(self):
self.acquire()
def __exit__(self, type_, value, traceback):
self.release()
def __del__(self):
self.release()

View File

@@ -66,11 +66,12 @@ def on_platformio_exception(e):
def in_silence(ctx=None):
ctx = ctx or app.get_session_var("command_ctx")
assert ctx
ctx_args = ctx.args or []
return ctx_args and any([
ctx.args[0] == "upgrade", "--json-output" in ctx_args,
"--version" in ctx_args
if not ctx:
return True
return ctx.args and any([
ctx.args[0] == "debug" and "--interpreter" in " ".join(ctx.args),
ctx.args[0] == "upgrade", "--json-output" in ctx.args,
"--version" in ctx.args
])
@@ -179,7 +180,7 @@ def after_upgrade(ctx):
click.secho(
"Please remove multiple PIO Cores from a system:", fg="yellow")
click.secho(
"http://docs.platformio.org/page/faq.html"
"https://docs.platformio.org/page/faq.html"
"#multiple-pio-cores-in-a-system",
fg="cyan")
click.secho("*" * terminal_width, fg="yellow")
@@ -208,8 +209,8 @@ def after_upgrade(ctx):
# PlatformIO banner
click.echo("*" * terminal_width)
click.echo("If you like %s, please:" %
(click.style("PlatformIO", fg="cyan")))
click.echo(
"If you like %s, please:" % (click.style("PlatformIO", fg="cyan")))
click.echo("- %s us on Twitter to stay up-to-date "
"on the latest project news > %s" %
(click.style("follow", fg="cyan"),
@@ -222,11 +223,11 @@ def after_upgrade(ctx):
click.echo(
"- %s PlatformIO IDE for IoT development > %s" %
(click.style("try", fg="cyan"),
click.style("http://platformio.org/platformio-ide", fg="cyan")))
click.style("https://platformio.org/platformio-ide", fg="cyan")))
if not util.is_ci():
click.echo("- %s us with PlatformIO Plus > %s" %
(click.style("support", fg="cyan"),
click.style("https://pioplus.com", fg="cyan")))
click.echo("- %s us with PlatformIO Plus > %s" % (click.style(
"support", fg="cyan"), click.style(
"https://pioplus.com", fg="cyan")))
click.echo("*" * terminal_width)
click.echo("")
@@ -274,7 +275,8 @@ def check_platformio_upgrade():
click.secho("pip install -U platformio", fg="cyan", nl=False)
click.secho("` command.", fg="yellow")
click.secho("Changes: ", fg="yellow", nl=False)
click.secho("http://docs.platformio.org/en/latest/history.html", fg="cyan")
click.secho(
"https://docs.platformio.org/en/latest/history.html", fg="cyan")
click.echo("*" * terminal_width)
click.echo("")
@@ -296,8 +298,8 @@ def check_internal_updates(ctx, what):
if manifest['name'] in outdated_items:
continue
conds = [
pm.outdated(manifest['__pkg_dir']),
what == "platforms" and PlatformFactory.newPlatform(
pm.outdated(manifest['__pkg_dir']), what == "platforms"
and PlatformFactory.newPlatform(
manifest['__pkg_dir']).are_outdated_packages()
]
if any(conds):

View File

@@ -16,16 +16,19 @@ import os
import subprocess
import sys
from os.path import dirname, join
from time import sleep
import requests
from platformio import __version__, exception, util
from platformio.managers.package import PackageManager
CORE_PACKAGES = {
"contrib-piohome": ">=0.6.0,<2",
"contrib-pysite": ">=0.1.2,<2",
"tool-pioplus": ">=0.12.1,<2",
"tool-unity": "~1.20302.1",
"tool-scons": "~3.20501.2"
"contrib-piohome": "^2.0.0",
"contrib-pysite": "~2.%d%d.0" % (sys.version_info[0], sys.version_info[1]),
"tool-pioplus": "^2.0.0",
"tool-unity": "~1.20403.0",
"tool-scons": "~2.20501.7"
}
PIOPLUS_AUTO_UPDATES_MAX = 100
@@ -36,11 +39,12 @@ PIOPLUS_AUTO_UPDATES_MAX = 100
class CorePackageManager(PackageManager):
def __init__(self):
PackageManager.__init__(self, join(util.get_home_dir(), "packages"), [
"https://dl.bintray.com/platformio/dl-packages/manifest.json",
"http%s://dl.platformio.org/packages/manifest.json" %
("" if sys.version_info < (2, 7, 9) else "s")
])
super(CorePackageManager, self).__init__(
join(util.get_home_dir(), "packages"), [
"https://dl.bintray.com/platformio/dl-packages/manifest.json",
"http%s://dl.platformio.org/packages/manifest.json" %
("" if sys.version_info < (2, 7, 9) else "s")
])
def install( # pylint: disable=keyword-arg-before-vararg
self,
@@ -69,7 +73,7 @@ class CorePackageManager(PackageManager):
if manifest['name'] not in best_pkg_versions:
continue
if manifest['version'] != best_pkg_versions[manifest['name']]:
self.uninstall(manifest['__pkg_dir'], trigger_event=False)
self.uninstall(manifest['__pkg_dir'], after_update=True)
self.cache_reset()
return True
@@ -92,22 +96,37 @@ def update_core_packages(only_check=False, silent=False):
if not pkg_dir:
continue
if not silent or pm.outdated(pkg_dir, requirements):
if name == "tool-pioplus" and not only_check:
shutdown_piohome_servers()
if "windows" in util.get_systype():
sleep(1)
pm.update(name, requirements, only_check=only_check)
return True
def shutdown_piohome_servers():
port = 8010
while port < 8050:
try:
requests.get(
"http://127.0.0.1:%d?__shutdown__=1" % port, timeout=0.01)
except: # pylint: disable=bare-except
pass
port += 1
def pioplus_call(args, **kwargs):
if "windows" in util.get_systype() and sys.version_info < (2, 7, 6):
raise exception.PlatformioException(
"PlatformIO Core Plus v%s does not run under Python version %s.\n"
"Minimum supported version is 2.7.6, please upgrade Python.\n"
"Python 3 is not yet supported.\n" % (__version__,
sys.version.split()[0]))
"Python 3 is not yet supported.\n" % (__version__, sys.version))
pioplus_path = join(get_core_package_dir("tool-pioplus"), "pioplus")
pythonexe_path = util.get_pythonexe_path()
os.environ['PYTHONEXEPATH'] = pythonexe_path
os.environ['PYTHONPYSITEDIR'] = get_core_package_dir("contrib-pysite")
os.environ['PIOCOREPYSITEDIR'] = dirname(util.get_source_dir() or "")
os.environ['PATH'] = (os.pathsep).join(
[dirname(pythonexe_path), os.environ['PATH']])
util.copy_pythonpath_to_osenv()

View File

@@ -13,17 +13,18 @@
# limitations under the License.
# pylint: disable=too-many-arguments, too-many-locals, too-many-branches
# pylint: disable=too-many-return-statements
import json
import re
from glob import glob
from os.path import isdir, join
import arrow
import click
from platformio import app, commands, exception, util
from platformio.managers.package import BasePkgManager
from platformio.managers.platform import PlatformFactory, PlatformManager
class LibraryManager(BasePkgManager):
@@ -31,7 +32,7 @@ class LibraryManager(BasePkgManager):
def __init__(self, package_dir=None):
if not package_dir:
package_dir = join(util.get_home_dir(), "lib")
BasePkgManager.__init__(self, package_dir)
super(LibraryManager, self).__init__(package_dir)
@property
def manifest_names(self):
@@ -155,8 +156,8 @@ class LibraryManager(BasePkgManager):
def max_satisfying_repo_version(self, versions, requirements=None):
def _cmp_dates(datestr1, datestr2):
date1 = arrow.get(datestr1)
date2 = arrow.get(datestr2)
date1 = util.parse_date(datestr1)
date2 = util.parse_date(datestr2)
if date1 == date2:
return 0
return -1 if date1 < date2 else 1
@@ -186,29 +187,15 @@ class LibraryManager(BasePkgManager):
def get_latest_repo_version(self, name, requirements, silent=False):
item = self.max_satisfying_repo_version(
util.get_api_result(
"/lib/info/%d" % self.get_pkg_id_by_name(
name, requirements, silent=silent),
"/lib/info/%d" % self.search_lib_id(
{
"name": name,
"requirements": requirements
},
silent=silent),
cache_valid="1h")['versions'], requirements)
return item['name'] if item else None
def get_pkg_id_by_name(self,
name,
requirements,
silent=False,
interactive=False):
if name.startswith("id="):
return int(name[3:])
# try to find ID from installed packages
package_dir = self.get_package_dir(name, requirements)
if package_dir:
manifest = self.load_manifest(package_dir)
if "id" in manifest:
return int(manifest['id'])
return int(
self.search_for_library({
"name": name
}, silent, interactive)['id'])
def _install_from_piorepo(self, name, requirements):
assert name.startswith("id="), name
version = self.get_latest_repo_version(name, requirements)
@@ -221,92 +208,24 @@ class LibraryManager(BasePkgManager):
cache_valid="30d")
assert dl_data
return self._install_from_url(name, dl_data['url'].replace(
"http://", "https://") if app.get_setting("enable_ssl") else
dl_data['url'], requirements)
return self._install_from_url(
name, dl_data['url'].replace("http://", "https://")
if app.get_setting("enable_ssl") else dl_data['url'], requirements)
def install( # pylint: disable=arguments-differ
def search_lib_id( # pylint: disable=too-many-branches
self,
name,
requirements=None,
silent=False,
trigger_event=True,
interactive=False,
force=False):
pkg_dir = None
try:
_name, _requirements, _url = self.parse_pkg_uri(name, requirements)
if not _url:
name = "id=%d" % self.get_pkg_id_by_name(
_name,
_requirements,
silent=silent,
interactive=interactive)
requirements = _requirements
pkg_dir = BasePkgManager.install(
self,
name,
requirements,
silent=silent,
trigger_event=trigger_event,
force=force)
except exception.InternetIsOffline as e:
if not silent:
click.secho(str(e), fg="yellow")
return None
if not pkg_dir:
return None
manifest = self.load_manifest(pkg_dir)
if "dependencies" not in manifest:
return pkg_dir
if not silent:
click.secho("Installing dependencies", fg="yellow")
for filters in self.normalize_dependencies(manifest['dependencies']):
assert "name" in filters
if any([s in filters.get("version", "") for s in ("\\", "/")]):
self.install(
"{name}={version}".format(**filters),
silent=silent,
trigger_event=trigger_event,
interactive=interactive,
force=force)
else:
try:
lib_info = self.search_for_library(filters, silent,
interactive)
except exception.LibNotFound as e:
if not silent:
click.secho("Warning! %s" % e, fg="yellow")
continue
if filters.get("version"):
self.install(
lib_info['id'],
filters.get("version"),
silent=silent,
trigger_event=trigger_event,
interactive=interactive,
force=force)
else:
self.install(
lib_info['id'],
silent=silent,
trigger_event=trigger_event,
interactive=interactive,
force=force)
return pkg_dir
@staticmethod
def search_for_library( # pylint: disable=too-many-branches
filters,
silent=False,
interactive=False):
assert isinstance(filters, dict)
assert "name" in filters
# try to find ID within installed packages
lib_id = self._get_lib_id_from_installed(filters)
if lib_id:
return lib_id
# looking in PIO Library Registry
if not silent:
click.echo("Looking for %s library in registry" % click.style(
filters['name'], fg="cyan"))
@@ -318,9 +237,8 @@ class LibraryManager(BasePkgManager):
if not isinstance(values, list):
values = [v.strip() for v in values.split(",") if v]
for value in values:
query.append('%s:"%s"' % (key[:-1]
if key.endswith("s") else key,
value))
query.append('%s:"%s"' %
(key[:-1] if key.endswith("s") else key, value))
lib_info = None
result = util.get_api_result(
@@ -363,7 +281,152 @@ class LibraryManager(BasePkgManager):
raise exception.LibNotFound(str(filters))
if not silent:
click.echo("Found: %s" % click.style(
"http://platformio.org/lib/show/{id}/{name}".format(
"https://platformio.org/lib/show/{id}/{name}".format(
**lib_info),
fg="blue"))
return lib_info
return int(lib_info['id'])
def _get_lib_id_from_installed(self, filters):
if filters['name'].startswith("id="):
return int(filters['name'][3:])
package_dir = self.get_package_dir(
filters['name'], filters.get("requirements",
filters.get("version")))
if not package_dir:
return None
manifest = self.load_manifest(package_dir)
if "id" not in manifest:
return None
for key in ("frameworks", "platforms"):
if key not in filters:
continue
if key not in manifest:
return None
if not util.items_in_list(
util.items_to_list(filters[key]),
util.items_to_list(manifest[key])):
return None
if "authors" in filters:
if "authors" not in manifest:
return None
manifest_authors = manifest['authors']
if not isinstance(manifest_authors, list):
manifest_authors = [manifest_authors]
manifest_authors = [
a['name'] for a in manifest_authors
if isinstance(a, dict) and "name" in a
]
filter_authors = filters['authors']
if not isinstance(filter_authors, list):
filter_authors = [filter_authors]
if not set(filter_authors) <= set(manifest_authors):
return None
return int(manifest['id'])
def install( # pylint: disable=arguments-differ
self,
name,
requirements=None,
silent=False,
after_update=False,
interactive=False,
force=False):
_name, _requirements, _url = self.parse_pkg_uri(name, requirements)
if not _url:
name = "id=%d" % self.search_lib_id({
"name": _name,
"requirements": _requirements
},
silent=silent,
interactive=interactive)
requirements = _requirements
pkg_dir = BasePkgManager.install(
self,
name,
requirements,
silent=silent,
after_update=after_update,
force=force)
if not pkg_dir:
return None
manifest = self.load_manifest(pkg_dir)
if "dependencies" not in manifest:
return pkg_dir
if not silent:
click.secho("Installing dependencies", fg="yellow")
for filters in self.normalize_dependencies(manifest['dependencies']):
assert "name" in filters
# avoid circle dependencies
if not self.INSTALL_HISTORY:
self.INSTALL_HISTORY = []
history_key = str(filters)
if history_key in self.INSTALL_HISTORY:
continue
self.INSTALL_HISTORY.append(history_key)
if any(s in filters.get("version", "") for s in ("\\", "/")):
self.install(
"{name}={version}".format(**filters),
silent=silent,
after_update=after_update,
interactive=interactive,
force=force)
else:
try:
lib_id = self.search_lib_id(filters, silent, interactive)
except exception.LibNotFound as e:
if not silent or is_builtin_lib(filters['name']):
click.secho("Warning! %s" % e, fg="yellow")
continue
if filters.get("version"):
self.install(
lib_id,
filters.get("version"),
silent=silent,
after_update=after_update,
interactive=interactive,
force=force)
else:
self.install(
lib_id,
silent=silent,
after_update=after_update,
interactive=interactive,
force=force)
return pkg_dir
@util.memoized()
def get_builtin_libs(storage_names=None):
items = []
storage_names = storage_names or []
pm = PlatformManager()
for manifest in pm.get_installed():
p = PlatformFactory.newPlatform(manifest['__pkg_dir'])
for storage in p.get_lib_storages():
if storage_names and storage['name'] not in storage_names:
continue
lm = LibraryManager(storage['path'])
items.append({
"name": storage['name'],
"path": storage['path'],
"items": lm.get_installed()
})
return items
@util.memoized()
def is_builtin_lib(name):
for storage in get_builtin_libs():
if any(l.get("name") == name for l in storage['items']):
return True
return False

View File

@@ -18,7 +18,7 @@ import json
import os
import re
import shutil
from os.path import basename, getsize, isdir, isfile, islink, join
from os.path import abspath, basename, getsize, isdir, isfile, islink, join
from tempfile import mkdtemp
import click
@@ -27,6 +27,7 @@ import semantic_version
from platformio import __version__, app, exception, telemetry, util
from platformio.downloader import FileDownloader
from platformio.lockfile import LockFile
from platformio.unpacker import FileUnpacker
from platformio.vcsclient import VCSClientFactory
@@ -90,7 +91,8 @@ class PkgRepoMixin(object):
reqspec = None
if requirements:
try:
reqspec = semantic_version.Spec(requirements)
reqspec = self.parse_semver_spec(
requirements, raise_exception=True)
except ValueError:
pass
@@ -98,8 +100,8 @@ class PkgRepoMixin(object):
if not self.is_system_compatible(v.get("system")):
continue
if "platformio" in v.get("engines", {}):
if PkgRepoMixin.PIO_VERSION not in semantic_version.Spec(
v['engines']['platformio']):
if PkgRepoMixin.PIO_VERSION not in self.parse_semver_spec(
v['engines']['platformio'], raise_exception=True):
continue
specver = semantic_version.Version(v['version'])
if reqspec and specver not in reqspec:
@@ -134,6 +136,7 @@ class PkgRepoMixin(object):
class PkgInstallerMixin(object):
SRC_MANIFEST_NAME = ".piopkgmanager.json"
TMP_FOLDER_PREFIX = "_tmp_installing-"
FILE_CACHE_VALID = "1m" # 1 month
FILE_CACHE_MAX_SIZE = 1024 * 1024
@@ -176,8 +179,25 @@ class PkgInstallerMixin(object):
shutil.copy(cache_path, dst_path)
return dst_path
fd = FileDownloader(url, dest_dir)
fd.start()
with_progress = not app.is_disabled_progressbar()
try:
fd = FileDownloader(url, dest_dir)
fd.start(with_progress=with_progress)
except IOError as e:
raise_error = not with_progress
if with_progress:
try:
fd = FileDownloader(url, dest_dir)
fd.start(with_progress=False)
except IOError:
raise_error = True
if raise_error:
click.secho(
"Error: Please read http://bit.ly/package-manager-ioerror",
fg="red",
err=True)
raise e
if sha1:
fd.verify(sha1)
dst_path = fd.get_filepath()
@@ -193,13 +213,33 @@ class PkgInstallerMixin(object):
@staticmethod
def unpack(source_path, dest_dir):
with FileUnpacker(source_path) as fu:
return fu.unpack(dest_dir)
with_progress = not app.is_disabled_progressbar()
try:
with FileUnpacker(source_path) as fu:
return fu.unpack(dest_dir, with_progress=with_progress)
except IOError as e:
if not with_progress:
raise e
with FileUnpacker(source_path) as fu:
return fu.unpack(dest_dir, with_progress=False)
@staticmethod
def parse_semver_spec(value, raise_exception=False):
try:
return semantic_version.Spec(value)
# Workaround for ^ issue and pre-releases
# https://github.com/rbarrois/python-semanticversion/issues/61
requirements = []
for item in str(value).split(","):
item = item.strip()
if not item:
continue
if item.startswith("^"):
major = semantic_version.Version.coerce(item[1:]).major
requirements.append(">=%s" % major)
requirements.append("<%s" % (int(major) + 1))
else:
requirements.append(item)
return semantic_version.Spec(*requirements)
except ValueError as e:
if raise_exception:
raise e
@@ -211,12 +251,77 @@ class PkgInstallerMixin(object):
try:
return semantic_version.Version(value)
except ValueError:
if "." not in str(value) and not str(value).isdigit():
raise ValueError("Invalid SemVer version %s" % value)
return semantic_version.Version.coerce(value)
except ValueError as e:
if raise_exception:
raise e
return None
@staticmethod
def parse_pkg_uri( # pylint: disable=too-many-branches
text, requirements=None):
text = str(text)
name, url = None, None
# Parse requirements
req_conditions = [
"@" in text, not requirements, ":" not in text
or text.rfind("/") < text.rfind("@")
]
if all(req_conditions):
text, requirements = text.rsplit("@", 1)
# Handle PIO Library Registry ID
if text.isdigit():
text = "id=" + text
# Parse custom name
elif "=" in text and not text.startswith("id="):
name, text = text.split("=", 1)
# Parse URL
# if valid URL with scheme vcs+protocol://
if "+" in text and text.find("+") < text.find("://"):
url = text
elif "/" in text or "\\" in text:
git_conditions = [
# Handle GitHub URL (https://github.com/user/package)
text.startswith("https://github.com/") and not text.endswith(
(".zip", ".tar.gz")),
(text.split("#", 1)[0]
if "#" in text else text).endswith(".git")
]
hg_conditions = [
# Handle Developer Mbed URL
# (https://developer.mbed.org/users/user/code/package/)
# (https://os.mbed.com/users/user/code/package/)
text.startswith("https://developer.mbed.org"),
text.startswith("https://os.mbed.com")
]
if any(git_conditions):
url = "git+" + text
elif any(hg_conditions):
url = "hg+" + text
elif "://" not in text and (isfile(text) or isdir(text)):
url = "file://" + text
elif "://" in text:
url = text
# Handle short version of GitHub URL
elif text.count("/") == 1:
url = "git+https://github.com/" + text
# Parse name from URL
if url and not name:
_url = url.split("#", 1)[0] if "#" in url else url
if _url.endswith(("\\", "/")):
_url = _url[:-1]
name = basename(_url)
if "." in name and not name.startswith("."):
name = name.rsplit(".", 1)[0]
return (name or text, requirements, url)
@staticmethod
def get_install_dirname(manifest):
name = re.sub(r"[^\da-z\_\-\. ]", "_", manifest['name'], flags=re.I)
@@ -274,11 +379,13 @@ class PkgInstallerMixin(object):
manifest[key.strip()] = value.strip()
if src_manifest:
if "name" not in manifest:
manifest['name'] = src_manifest['name']
if "version" in src_manifest:
manifest['version'] = src_manifest['version']
manifest['__src_url'] = src_manifest['url']
# handle a custom package name
autogen_name = self.parse_pkg_uri(manifest['__src_url'])[0]
if "name" not in manifest or autogen_name != src_manifest['name']:
manifest['name'] = src_manifest['name']
if "name" not in manifest:
manifest['name'] = basename(pkg_dir)
@@ -292,6 +399,8 @@ class PkgInstallerMixin(object):
def get_installed(self):
items = []
for pkg_dir in self.read_dirs(self.package_dir):
if self.TMP_FOLDER_PREFIX in pkg_dir:
continue
manifest = self.load_manifest(pkg_dir)
if not manifest:
continue
@@ -338,6 +447,12 @@ class PkgInstallerMixin(object):
return manifest.get("__pkg_dir") if manifest and isdir(
manifest.get("__pkg_dir")) else None
def get_package_by_dir(self, pkg_dir):
for manifest in self.get_installed():
if manifest['__pkg_dir'] == util.path_to_unicode(abspath(pkg_dir)):
return manifest
return None
def find_pkg_root(self, src_dir):
if self.manifest_exists(src_dir):
return src_dir
@@ -361,9 +476,10 @@ class PkgInstallerMixin(object):
break
except Exception as e: # pylint: disable=broad-except
click.secho("Warning! Package Mirror: %s" % e, fg="yellow")
click.secho("Looking for other mirror...", fg="yellow")
click.secho("Looking for another mirror...", fg="yellow")
if versions is None:
util.internet_on(raise_exception=True)
raise exception.UnknownPackage(name)
elif not pkgdata:
raise exception.UndefinedPackageVersion(requirements or "latest",
@@ -376,7 +492,7 @@ class PkgInstallerMixin(object):
requirements=None,
sha1=None,
track=False):
tmp_dir = mkdtemp("-package", "_tmp_installing-", self.package_dir)
tmp_dir = mkdtemp("-package", self.TMP_FOLDER_PREFIX, self.package_dir)
src_manifest_dir = None
src_manifest = {"name": name, "url": url, "requirements": requirements}
@@ -445,8 +561,8 @@ class PkgInstallerMixin(object):
"Package version %s doesn't satisfy requirements %s" %
(tmp_manifest['version'], requirements))
try:
assert tmp_semver and tmp_semver in semantic_version.Spec(
requirements), mismatch_error
assert tmp_semver and tmp_semver in self.parse_semver_spec(
requirements, raise_exception=True), mismatch_error
except (AssertionError, ValueError):
assert tmp_manifest['version'] == requirements, mismatch_error
@@ -471,23 +587,23 @@ class PkgInstallerMixin(object):
cur_manifest['version'])
if "__src_url" in cur_manifest:
target_dirname = "%s@src-%s" % (
pkg_dirname,
hashlib.md5(cur_manifest['__src_url']).hexdigest())
os.rename(pkg_dir, join(self.package_dir, target_dirname))
pkg_dirname, hashlib.md5(
cur_manifest['__src_url']).hexdigest())
shutil.move(pkg_dir, join(self.package_dir, target_dirname))
# fix to a version
elif action == 2:
target_dirname = "%s@%s" % (pkg_dirname,
tmp_manifest['version'])
if "__src_url" in tmp_manifest:
target_dirname = "%s@src-%s" % (
pkg_dirname,
hashlib.md5(tmp_manifest['__src_url']).hexdigest())
pkg_dirname, hashlib.md5(
tmp_manifest['__src_url']).hexdigest())
pkg_dir = join(self.package_dir, target_dirname)
# remove previous/not-satisfied package
if isdir(pkg_dir):
util.rmtree_(pkg_dir)
os.rename(tmp_dir, pkg_dir)
shutil.move(tmp_dir, pkg_dir)
assert isdir(pkg_dir)
self.cache_reset()
return pkg_dir
@@ -512,71 +628,10 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
def print_message(self, message, nl=True):
click.echo("%s: %s" % (self.__class__.__name__, message), nl=nl)
@staticmethod
def parse_pkg_uri( # pylint: disable=too-many-branches
text, requirements=None):
text = str(text)
name, url = None, None
# Parse requirements
req_conditions = [
"@" in text, not requirements, ":" not in text
or text.rfind("/") < text.rfind("@")
]
if all(req_conditions):
text, requirements = text.rsplit("@", 1)
# Handle PIO Library Registry ID
if text.isdigit():
text = "id=" + text
# Parse custom name
elif "=" in text and not text.startswith("id="):
name, text = text.split("=", 1)
# Parse URL
# if valid URL with scheme vcs+protocol://
if "+" in text and text.find("+") < text.find("://"):
url = text
elif "/" in text or "\\" in text:
git_conditions = [
# Handle GitHub URL (https://github.com/user/package)
text.startswith("https://github.com/") and not text.endswith(
(".zip", ".tar.gz")),
(text.split("#", 1)[0]
if "#" in text else text).endswith(".git")
]
hg_conditions = [
# Handle Developer Mbed URL
# (https://developer.mbed.org/users/user/code/package/)
text.startswith("https://developer.mbed.org")
]
if any(git_conditions):
url = "git+" + text
elif any(hg_conditions):
url = "hg+" + text
elif "://" not in text and (isfile(text) or isdir(text)):
url = "file://" + text
elif "://" in text:
url = text
# Handle short version of GitHub URL
elif text.count("/") == 1:
url = "git+https://github.com/" + text
# Parse name from URL
if url and not name:
_url = url.split("#", 1)[0] if "#" in url else url
if _url.endswith(("\\", "/")):
_url = _url[:-1]
name = basename(_url)
if "." in name and not name.startswith("."):
name = name.rsplit(".", 1)[0]
return (name or text, requirements, url)
def outdated(self, pkg_dir, requirements=None):
"""
Has 3 different results:
`None` - unknown package, VCS is fixed to commit
`None` - unknown package, VCS is detached to commit
`False` - package is up-to-date
`String` - a found latest version
"""
@@ -584,7 +639,7 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
return None
latest = None
manifest = self.load_manifest(pkg_dir)
# skip fixed package to a specific version
# skip detached package to a specific version
if "@" in pkg_dir and "__src_url" not in manifest and not requirements:
return None
@@ -626,111 +681,123 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
name,
requirements=None,
silent=False,
trigger_event=True,
after_update=False,
force=False):
name, requirements, url = self.parse_pkg_uri(name, requirements)
package_dir = self.get_package_dir(name, requirements, url)
pkg_dir = None
# interprocess lock
with LockFile(self.package_dir):
self.cache_reset()
# avoid circle dependencies
if not self.INSTALL_HISTORY:
self.INSTALL_HISTORY = []
history_key = "%s-%s-%s" % (name, requirements or "", url or "")
if history_key in self.INSTALL_HISTORY:
return package_dir
self.INSTALL_HISTORY.append(history_key)
name, requirements, url = self.parse_pkg_uri(name, requirements)
package_dir = self.get_package_dir(name, requirements, url)
if package_dir and force:
self.uninstall(package_dir)
package_dir = None
# avoid circle dependencies
if not self.INSTALL_HISTORY:
self.INSTALL_HISTORY = []
history_key = "%s-%s-%s" % (name, requirements or "", url or "")
if history_key in self.INSTALL_HISTORY:
return package_dir
self.INSTALL_HISTORY.append(history_key)
if package_dir and force:
self.uninstall(package_dir)
package_dir = None
if not package_dir or not silent:
msg = "Installing " + click.style(name, fg="cyan")
if requirements:
msg += " @ " + requirements
self.print_message(msg)
if package_dir:
if not silent:
click.secho(
"{name} @ {version} is already installed".format(
**self.load_manifest(package_dir)),
fg="yellow")
return package_dir
if url:
pkg_dir = self._install_from_url(
name, url, requirements, track=True)
else:
pkg_dir = self._install_from_piorepo(name, requirements)
if not pkg_dir or not self.manifest_exists(pkg_dir):
raise exception.PackageInstallError(name, requirements or "*",
util.get_systype())
manifest = self.load_manifest(pkg_dir)
assert manifest
if not after_update:
telemetry.on_event(
category=self.__class__.__name__,
action="Install",
label=manifest['name'])
if not package_dir or not silent:
msg = "Installing " + click.style(name, fg="cyan")
if requirements:
msg += " @ " + requirements
self.print_message(msg)
if package_dir:
if not silent:
click.secho(
"{name} @ {version} is already installed".format(
**self.load_manifest(package_dir)),
fg="yellow")
return package_dir
if url:
pkg_dir = self._install_from_url(
name, url, requirements, track=True)
else:
pkg_dir = self._install_from_piorepo(name, requirements)
if not pkg_dir or not self.manifest_exists(pkg_dir):
raise exception.PackageInstallError(name, requirements or "*",
util.get_systype())
manifest = self.load_manifest(pkg_dir)
assert manifest
if trigger_event:
telemetry.on_event(
category=self.__class__.__name__,
action="Install",
label=manifest['name'])
if not silent:
click.secho(
"{name} @ {version} has been successfully installed!".format(
**manifest),
fg="green")
"{name} @ {version} has been successfully installed!".
format(**manifest),
fg="green")
return pkg_dir
def uninstall(self, package, requirements=None, trigger_event=True):
if isdir(package):
pkg_dir = package
else:
name, requirements, url = self.parse_pkg_uri(package, requirements)
pkg_dir = self.get_package_dir(name, requirements, url)
if not pkg_dir:
raise exception.UnknownPackage("%s @ %s" % (package,
requirements or "*"))
manifest = self.load_manifest(pkg_dir)
click.echo(
"Uninstalling %s @ %s: \t" %
(click.style(manifest['name'], fg="cyan"), manifest['version']),
nl=False)
if islink(pkg_dir):
os.unlink(pkg_dir)
else:
util.rmtree_(pkg_dir)
self.cache_reset()
# unfix package with the same name
pkg_dir = self.get_package_dir(manifest['name'])
if pkg_dir and "@" in pkg_dir:
os.rename(pkg_dir,
join(self.package_dir,
self.get_install_dirname(manifest)))
def uninstall(self, package, requirements=None, after_update=False):
# interprocess lock
with LockFile(self.package_dir):
self.cache_reset()
click.echo("[%s]" % click.style("OK", fg="green"))
if isdir(package) and self.get_package_by_dir(package):
pkg_dir = package
else:
name, requirements, url = self.parse_pkg_uri(
package, requirements)
pkg_dir = self.get_package_dir(name, requirements, url)
if not pkg_dir:
raise exception.UnknownPackage(
"%s @ %s" % (package, requirements or "*"))
manifest = self.load_manifest(pkg_dir)
click.echo(
"Uninstalling %s @ %s: \t" % (click.style(
manifest['name'], fg="cyan"), manifest['version']),
nl=False)
if islink(pkg_dir):
os.unlink(pkg_dir)
else:
util.rmtree_(pkg_dir)
self.cache_reset()
# unfix package with the same name
pkg_dir = self.get_package_dir(manifest['name'])
if pkg_dir and "@" in pkg_dir:
shutil.move(
pkg_dir,
join(self.package_dir, self.get_install_dirname(manifest)))
self.cache_reset()
click.echo("[%s]" % click.style("OK", fg="green"))
if not after_update:
telemetry.on_event(
category=self.__class__.__name__,
action="Uninstall",
label=manifest['name'])
if trigger_event:
telemetry.on_event(
category=self.__class__.__name__,
action="Uninstall",
label=manifest['name'])
return True
def update(self, package, requirements=None, only_check=False):
if isdir(package):
if isdir(package) and self.get_package_by_dir(package):
pkg_dir = package
else:
pkg_dir = self.get_package_dir(*self.parse_pkg_uri(package))
if not pkg_dir:
raise exception.UnknownPackage("%s @ %s" % (package,
requirements or "*"))
raise exception.UnknownPackage(
"%s @ %s" % (package, requirements or "*"))
manifest = self.load_manifest(pkg_dir)
name = manifest['name']
@@ -750,7 +817,7 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
elif latest is False:
click.echo("[%s]" % (click.style("Up-to-date", fg="green")))
else:
click.echo("[%s]" % (click.style("Fixed", fg="yellow")))
click.echo("[%s]" % (click.style("Detached", fg="yellow")))
if only_check or not latest:
return True
@@ -761,8 +828,8 @@ class BasePkgManager(PkgRepoMixin, PkgInstallerMixin):
self._update_src_manifest(
dict(version=vcs.get_current_revision()), vcs.storage_dir)
else:
self.uninstall(pkg_dir, trigger_event=False)
self.install(name, latest, trigger_event=False)
self.uninstall(pkg_dir, after_update=True)
self.install(name, latest, after_update=True)
telemetry.on_event(
category=self.__class__.__name__,

View File

@@ -30,7 +30,7 @@ from platformio.managers.package import BasePkgManager, PackageManager
class PlatformManager(BasePkgManager):
FILE_CACHE_VALID = None # disable platform caching
FILE_CACHE_VALID = None # disable platform download caching
def __init__(self, package_dir=None, repositories=None):
if not repositories:
@@ -39,9 +39,9 @@ class PlatformManager(BasePkgManager):
"{0}://dl.platformio.org/platforms/manifest.json".format(
"https" if app.get_setting("enable_ssl") else "http")
]
BasePkgManager.__init__(self, package_dir
or join(util.get_home_dir(), "platforms"),
repositories)
BasePkgManager.__init__(
self, package_dir or join(util.get_home_dir(), "platforms"),
repositories)
@property
def manifest_names(self):
@@ -62,7 +62,7 @@ class PlatformManager(BasePkgManager):
with_packages=None,
without_packages=None,
skip_default_package=False,
trigger_event=True,
after_update=False,
silent=False,
force=False,
**_): # pylint: disable=too-many-arguments, arguments-differ
@@ -70,20 +70,20 @@ class PlatformManager(BasePkgManager):
self, name, requirements, silent=silent, force=force)
p = PlatformFactory.newPlatform(platform_dir)
# @Hook: when 'update' operation (trigger_event is False),
# don't cleanup packages or install them
if not trigger_event:
# don't cleanup packages or install them after update
# we check packages for updates in def update()
if after_update:
return True
p.install_packages(
with_packages,
without_packages,
skip_default_package,
silent=silent,
force=force)
self.cleanup_packages(p.packages.keys())
return True
return self.cleanup_packages(p.packages.keys())
def uninstall(self, package, requirements=None, trigger_event=True):
def uninstall(self, package, requirements=None, after_update=False):
if isdir(package):
pkg_dir = package
else:
@@ -96,13 +96,12 @@ class PlatformManager(BasePkgManager):
p = PlatformFactory.newPlatform(pkg_dir)
BasePkgManager.uninstall(self, pkg_dir, requirements)
# @Hook: when 'update' operation (trigger_event is False),
# don't cleanup packages or install them
if not trigger_event:
# don't cleanup packages or install them after update
# we check packages for updates in def update()
if after_update:
return True
self.cleanup_packages(p.packages.keys())
return True
return self.cleanup_packages(p.packages.keys())
def update( # pylint: disable=arguments-differ
self,
@@ -154,11 +153,15 @@ class PlatformManager(BasePkgManager):
continue
if (manifest['name'] not in deppkgs
or manifest['version'] not in deppkgs[manifest['name']]):
pm.uninstall(manifest['__pkg_dir'], trigger_event=False)
try:
pm.uninstall(manifest['__pkg_dir'], after_update=True)
except exception.UnknownPackage:
pass
self.cache_reset()
return True
@util.memoized(expire=5000)
def get_installed_boards(self):
boards = []
for manifest in self.get_installed():
@@ -170,7 +173,7 @@ class PlatformManager(BasePkgManager):
return boards
@staticmethod
@util.memoized
@util.memoized()
def get_registered_boards():
return util.get_api_result("/boards", cache_valid="7d")
@@ -232,8 +235,8 @@ class PlatformFactory(object):
name = pm.load_manifest(platform_dir)['name']
if not platform_dir:
raise exception.UnknownPlatform(name if not requirements else
"%s@%s" % (name, requirements))
raise exception.UnknownPlatform(
name if not requirements else "%s@%s" % (name, requirements))
platform_cls = None
if isfile(join(platform_dir, "platform.py")):
@@ -280,21 +283,25 @@ class PlatformPackagesMixin(object):
return True
def find_pkg_names(self, items):
def find_pkg_names(self, candidates):
result = []
for item in items:
candidate = item
for candidate in candidates:
found = False
# lookup by package types
for _name, _opts in self.packages.items():
if _opts.get("type") == item:
candidate = _name
if _opts.get("type") == candidate:
result.append(_name)
found = True
if (self.frameworks and item.startswith("framework-")
and item[10:] in self.frameworks):
candidate = self.frameworks[item[10:]]['package']
if (self.frameworks and candidate.startswith("framework-")
and candidate[10:] in self.frameworks):
result.append(self.frameworks[candidate[10:]]['package'])
found = True
if not found:
result.append(candidate)
result.append(candidate)
return result
def update_packages(self, only_check=False):
@@ -324,8 +331,8 @@ class PlatformPackagesMixin(object):
def get_package_dir(self, name):
version = self.packages[name].get("version", "")
if ":" in version:
return self.pm.get_package_dir(*self.pm.parse_pkg_uri(
"%s=%s" % (name, version)))
return self.pm.get_package_dir(
*self.pm.parse_pkg_uri("%s=%s" % (name, version)))
return self.pm.get_package_dir(name, version)
def get_package_version(self, name):
@@ -427,10 +434,10 @@ class PlatformRunMixin(object):
""".format(filename=filename,
filename_styled=click.style(filename, fg="cyan"),
link=click.style(
"http://platformio.org/lib/search?query=header:%s" % quote(
"https://platformio.org/lib/search?query=header:%s" % quote(
filename, safe=""),
fg="blue"),
dots="*" * (55 + len(filename)))
dots="*" * (56 + len(filename)))
click.echo(banner, err=True)
@staticmethod
@@ -453,8 +460,7 @@ class PlatformBase( # pylint: disable=too-many-public-methods
self._manifest = util.load_json(manifest_path)
self.pm = PackageManager(
join(util.get_home_dir(), "packages"),
self._manifest.get("packageRepositories"))
join(util.get_home_dir(), "packages"), self.package_repositories)
self.silent = False
self.verbose = False
@@ -489,6 +495,10 @@ class PlatformBase( # pylint: disable=too-many-public-methods
def vendor_url(self):
return self._manifest.get("url")
@property
def docs_url(self):
return self._manifest.get("docs")
@property
def repository_url(self):
return self._manifest.get("repository", {}).get("url")
@@ -505,6 +515,10 @@ class PlatformBase( # pylint: disable=too-many-public-methods
def engines(self):
return self._manifest.get("engines")
@property
def package_repositories(self):
return self._manifest.get("packageRepositories")
@property
def manifest(self):
return self._manifest
@@ -563,9 +577,9 @@ class PlatformBase( # pylint: disable=too-many-public-methods
if not isdir(boards_dir):
continue
manifest_path = join(boards_dir, "%s.json" % id_)
if not isfile(manifest_path):
continue
_append_board(id_, manifest_path)
if isfile(manifest_path):
_append_board(id_, manifest_path)
break
if id_ not in self._BOARDS_CACHE:
raise exception.UnknownBoard(id_)
return self._BOARDS_CACHE[id_] if id_ else self._BOARDS_CACHE
@@ -593,12 +607,13 @@ class PlatformBase( # pylint: disable=too-many-public-methods
# enable upload tools for upload targets
if any(["upload" in t for t in targets] + ["program" in targets]):
for _name, _opts in self.packages.iteritems():
if _opts.get("type") == "uploader":
self.packages[_name]['optional'] = False
elif "nobuild" in targets:
# skip all packages, allow only upload tools
self.packages[_name]['optional'] = True
for name, opts in self.packages.items():
if opts.get("type") == "uploader":
self.packages[name]['optional'] = False
# skip all packages in "nobuild" mode
# allow only upload tools and frameworks
elif "nobuild" in targets and opts.get("type") != "framework":
self.packages[name]['optional'] = True
def get_lib_storages(self):
storages = []
@@ -618,10 +633,8 @@ class PlatformBase( # pylint: disable=too-many-public-methods
if not isdir(libcore_dir):
continue
storages.append({
"name":
"%s-core-%s" % (opts['package'], item),
"path":
libcore_dir
"name": "%s-core-%s" % (opts['package'], item),
"path": libcore_dir
})
return storages
@@ -654,6 +667,15 @@ class PlatformBoardConfig(object):
else:
raise KeyError("Invalid board option '%s'" % path)
def update(self, path, value):
newdict = None
for key in path.split(".")[::-1]:
if newdict is None:
newdict = {key: value}
else:
newdict = {key: newdict}
util.merge_dicts(self._manifest, newdict)
def __contains__(self, key):
try:
self.get(key)
@@ -684,9 +706,11 @@ class PlatformBoardConfig(object):
"mcu":
self._manifest.get("build", {}).get("mcu", "").upper(),
"fcpu":
int(
re.sub(r"[^\d]+", "",
self._manifest.get("build", {}).get("f_cpu", "0L"))),
int("".join([
c for c in str(
self._manifest.get("build", {}).get("f_cpu", "0L"))
if c.isdigit()
])),
"ram":
self._manifest.get("upload", {}).get("maximum_ram_size", 0),
"rom":
@@ -713,3 +737,33 @@ class PlatformBoardConfig(object):
if key in ("default", "onboard"):
tools[name][key] = value
return {"tools": tools}
def get_debug_tool_name(self, custom=None):
debug_tools = self._manifest.get("debug", {}).get("tools")
tool_name = custom
if tool_name == "custom":
return tool_name
if not debug_tools:
raise exception.DebugSupportError(self._manifest['name'])
if tool_name:
if tool_name in debug_tools:
return tool_name
raise exception.DebugInvalidOptions(
"Unknown debug tool `%s`. Please use one of `%s` or `custom`" %
(tool_name, ", ".join(sorted(debug_tools.keys()))))
# automatically select best tool
data = {"default": [], "onboard": [], "external": []}
for key, value in debug_tools.items():
if value.get("default"):
data['default'].append(key)
elif value.get("onboard"):
data['onboard'].append(key)
data['external'].append(key)
for key, value in data.items():
if not value:
continue
return sorted(value)[0]
assert any(item for item in data)

View File

@@ -6,4 +6,4 @@
; Advanced options: extra scripting
;
; Please visit documentation for the other options and examples
; http://docs.platformio.org/page/projectconf.html
; https://docs.platformio.org/page/projectconf.html

View File

@@ -16,6 +16,7 @@ import atexit
import platform
import Queue
import re
import sys
import threading
from collections import deque
from os import getenv, sep
@@ -60,7 +61,7 @@ class MeasurementProtocol(TelemetryBase):
}
def __init__(self):
TelemetryBase.__init__(self)
super(MeasurementProtocol, self).__init__()
self['v'] = 1
self['tid'] = self.TID
self['cid'] = app.get_cid()
@@ -111,8 +112,8 @@ class MeasurementProtocol(TelemetryBase):
self['cd2'] = "Python/%s %s" % (platform.python_version(),
platform.platform())
# self['cd3'] = " ".join(_filter_args(sys.argv[1:]))
self['cd4'] = 1 if (not util.is_ci()
and (caller_id or not util.is_container())) else 0
self['cd4'] = 1 if (not util.is_ci() and
(caller_id or not util.is_container())) else 0
if caller_id:
self['cd5'] = caller_id.lower()
@@ -152,16 +153,22 @@ class MeasurementProtocol(TelemetryBase):
cmd_path.append(sub_cmd)
self['screen_name'] = " ".join([p.title() for p in cmd_path])
def send(self, hittype):
@staticmethod
def _ignore_hit():
if not app.get_setting("enable_telemetry"):
return True
if app.get_session_var("caller_id") and \
all(c in sys.argv for c in ("run", "idedata")):
return True
return False
def send(self, hittype):
if self._ignore_hit():
return
self['t'] = hittype
# correct queue time
if "qt" in self._params and isinstance(self['qt'], float):
self['qt'] = int((time() - self['qt']) * 1000)
MPDataPusher().push(self._params)
@@ -284,7 +291,7 @@ def measure_ci():
}
}
for key, value in envmap.iteritems():
for key, value in envmap.items():
if getenv(key, "").lower() != "true":
continue
event.update({"action": key, "label": value['label']})

View File

@@ -13,15 +13,14 @@
# limitations under the License.
from os import chmod
from os.path import join
from os.path import exists, join
from tarfile import open as tarfile_open
from time import mktime
from zipfile import ZipFile
import click
from platformio import app, util
from platformio.exception import UnsupportedArchiveType
from platformio import exception, util
class ArchiveBase(object):
@@ -32,6 +31,9 @@ class ArchiveBase(object):
def get_items(self):
raise NotImplementedError()
def get_item_filename(self, item):
raise NotImplementedError()
def extract_item(self, item, dest_dir):
self._afo.extract(item, dest_dir)
self.after_extract(item, dest_dir)
@@ -46,16 +48,23 @@ class ArchiveBase(object):
class TARArchive(ArchiveBase):
def __init__(self, archpath):
ArchiveBase.__init__(self, tarfile_open(archpath))
super(TARArchive, self).__init__(tarfile_open(archpath))
def get_items(self):
return self._afo.getmembers()
def get_item_filename(self, item):
return item.name
@staticmethod
def islink(item):
return item.islnk() or item.issym()
class ZIPArchive(ArchiveBase):
def __init__(self, archpath):
ArchiveBase.__init__(self, ZipFile(archpath))
super(ZIPArchive, self).__init__(ZipFile(archpath))
@staticmethod
def preserve_permissions(item, dest_dir):
@@ -72,6 +81,12 @@ class ZIPArchive(ArchiveBase):
def get_items(self):
return self._afo.infolist()
def get_item_filename(self, item):
return item.filename
def islink(self, item):
raise NotImplementedError()
def after_extract(self, item, dest_dir):
self.preserve_permissions(item, dest_dir)
self.preserve_mtime(item, dest_dir)
@@ -89,16 +104,16 @@ class FileUnpacker(object):
elif self.archpath.lower().endswith(".zip"):
self._unpacker = ZIPArchive(self.archpath)
if not self._unpacker:
raise UnsupportedArchiveType(self.archpath)
raise exception.UnsupportedArchiveType(self.archpath)
return self
def __exit__(self, *args):
if self._unpacker:
self._unpacker.close()
def unpack(self, dest_dir="."):
def unpack(self, dest_dir=".", with_progress=True):
assert self._unpacker
if app.is_disabled_progressbar():
if not with_progress:
click.echo("Unpacking...")
for item in self._unpacker.get_items():
self._unpacker.extract_item(item, dest_dir)
@@ -107,4 +122,14 @@ class FileUnpacker(object):
with click.progressbar(items, label="Unpacking") as pb:
for item in pb:
self._unpacker.extract_item(item, dest_dir)
# check on disk
for item in self._unpacker.get_items():
filename = self._unpacker.get_item_filename(item)
item_path = join(dest_dir, filename)
try:
if not self._unpacker.islink(item) and not exists(item_path):
raise exception.ExtractArchiveItemError(filename, dest_dir)
except NotImplementedError:
pass
return True

View File

@@ -12,8 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import functools
import json
import os
import platform
@@ -22,13 +20,14 @@ import socket
import stat
import subprocess
import sys
import time
from functools import wraps
from glob import glob
from hashlib import sha1
from os.path import (abspath, basename, dirname, expanduser, isdir, isfile,
join, normpath, splitdrive)
from shutil import rmtree
from threading import Thread
from time import sleep, time
import click
import requests
@@ -38,30 +37,40 @@ from platformio import __apiurl__, __version__, exception
# pylint: disable=wrong-import-order, too-many-ancestors
try:
from configparser import ConfigParser
import configparser as ConfigParser
except ImportError:
from ConfigParser import ConfigParser
import ConfigParser as ConfigParser
class ProjectConfig(ConfigParser):
class ProjectConfig(ConfigParser.ConfigParser):
VARTPL_RE = re.compile(r"\$\{([^\.\}]+)\.([^\}]+)\}")
def items(self, section, **_): # pylint: disable=arguments-differ
items = []
for option in ConfigParser.options(self, section):
for option in ConfigParser.ConfigParser.options(self, section):
items.append((option, self.get(section, option)))
return items
def get(self, section, option, **kwargs):
value = ConfigParser.get(self, section, option, **kwargs)
try:
value = ConfigParser.ConfigParser.get(self, section, option,
**kwargs)
except ConfigParser.Error as e:
raise exception.InvalidProjectConf(str(e))
if "${" not in value or "}" not in value:
return value
return self.VARTPL_RE.sub(self._re_sub_handler, value)
def _re_sub_handler(self, match):
section, option = match.group(1), match.group(2)
if section == "env" and not self.has_section(section):
if section in ("env", "sysenv") and not self.has_section(section):
if section == "env":
click.secho(
"Warning! Access to system environment variable via "
"`${{env.{0}}}` is deprecated. Please use "
"`${{sysenv.{0}}}` instead".format(option),
fg="yellow")
return os.getenv(option)
return self.get(section, option)
@@ -69,7 +78,7 @@ class ProjectConfig(ConfigParser):
class AsyncPipe(Thread):
def __init__(self, outcallback=None):
Thread.__init__(self)
super(AsyncPipe, self).__init__()
self.outcallback = outcallback
self._fd_read, self._fd_write = os.pipe()
@@ -91,7 +100,7 @@ class AsyncPipe(Thread):
if self.outcallback:
self.outcallback(line)
else:
print line
print(line)
self._pipe_reader.close()
def close(self):
@@ -113,37 +122,24 @@ class cd(object):
class memoized(object):
'''
Decorator. Caches a function's return value each time it is called.
If called later with the same arguments, the cached value is returned
(not reevaluated).
https://wiki.python.org/moin/PythonDecoratorLibrary#Memoize
'''
def __init__(self, func):
self.func = func
def __init__(self, expire=0):
self.expire = expire / 1000 # milliseconds
self.cache = {}
def __call__(self, *args):
if not isinstance(args, collections.Hashable):
# uncacheable. a list, for instance.
# better to not cache than blow up.
return self.func(*args)
if args in self.cache:
return self.cache[args]
value = self.func(*args)
self.cache[args] = value
return value
def __call__(self, func):
def __repr__(self):
'''Return the function's docstring.'''
return self.func.__doc__
@wraps(func)
def wrapper(*args, **kwargs):
key = str(args) + str(kwargs)
if (key not in self.cache
or (self.expire > 0
and self.cache[key][0] < time.time() - self.expire)):
self.cache[key] = (time.time(), func(*args, **kwargs))
return self.cache[key][1]
def __get__(self, obj, objtype):
'''Support instance methods.'''
fn = functools.partial(self.__call__, obj)
fn.reset = self._reset
return fn
wrapper.reset = self._reset
return wrapper
def _reset(self):
self.cache = {}
@@ -155,15 +151,15 @@ class throttle(object):
self.threshhold = threshhold # milliseconds
self.last = 0
def __call__(self, fn):
def __call__(self, func):
@wraps(fn)
@wraps(func)
def wrapper(*args, **kwargs):
diff = int(round((time() - self.last) * 1000))
diff = int(round((time.time() - self.last) * 1000))
if diff < self.threshhold:
sleep((self.threshhold - diff) * 0.001)
self.last = time()
return fn(*args, **kwargs)
time.sleep((self.threshhold - diff) * 0.001)
self.last = time.time()
return func(*args, **kwargs)
return wrapper
@@ -189,8 +185,7 @@ def load_json(file_path):
with open(file_path, "r") as f:
return json.load(f)
except ValueError:
raise exception.PlatformioException(
"Could not load broken JSON: %s" % file_path)
raise exception.InvalidJSONFile(file_path)
def get_systype():
@@ -208,28 +203,34 @@ def pioversion_to_intstr():
def get_project_optional_dir(name, default=None):
data = None
paths = None
var_name = "PLATFORMIO_%s" % name.upper()
if var_name in os.environ:
data = os.getenv(var_name)
paths = os.getenv(var_name)
else:
try:
config = load_project_config()
if (config.has_section("platformio")
and config.has_option("platformio", name)):
data = config.get("platformio", name)
paths = config.get("platformio", name)
except exception.NotPlatformIOProject:
pass
if not data:
if not paths:
return default
items = []
for item in data.split(", "):
for item in paths.split(", "):
if item.startswith("~"):
item = expanduser(item)
items.append(abspath(item))
return ", ".join(items)
paths = ", ".join(items)
while "$PROJECT_HASH" in paths:
paths = paths.replace("$PROJECT_HASH",
sha1(get_project_dir()).hexdigest()[:10])
return paths
def get_home_dir():
@@ -253,6 +254,11 @@ def get_home_dir():
return home_dir
def get_cache_dir():
return get_project_optional_dir("cache_dir", join(get_home_dir(),
".cache"))
def get_source_dir():
curpath = abspath(__file__)
if not isfile(curpath):
@@ -311,8 +317,8 @@ def get_projectboards_dir():
join(get_project_dir(), "boards"))
def get_projectpioenvs_dir(force=False):
path = get_project_optional_dir("envs_dir",
def get_projectbuild_dir(force=False):
path = get_project_optional_dir("build_dir",
join(get_project_dir(), ".pioenvs"))
try:
if not isdir(path):
@@ -322,7 +328,7 @@ def get_projectpioenvs_dir(force=False):
with open(dontmod_path, "w") as fp:
fp.write("""
[InternetShortcut]
URL=http://docs.platformio.org/page/projectconf.html#envs-dir
URL=https://docs.platformio.org/page/projectconf/section_platformio.html#build-dir
""")
except Exception as e: # pylint: disable=broad-except
if not force:
@@ -330,6 +336,10 @@ URL=http://docs.platformio.org/page/projectconf.html#envs-dir
return path
# compatibility with PIO Core+
get_projectpioenvs_dir = get_projectbuild_dir
def get_projectdata_dir():
return get_project_optional_dir("data_dir", join(get_project_dir(),
"data"))
@@ -342,17 +352,27 @@ def load_project_config(path=None):
raise exception.NotPlatformIOProject(
dirname(path) if path.endswith("platformio.ini") else path)
cp = ProjectConfig()
cp.read(path)
try:
cp.read(path)
except ConfigParser.Error as e:
raise exception.InvalidProjectConf(str(e))
return cp
def parse_conf_multi_values(items):
result = []
if not items:
return []
return [
item.strip() for item in items.split("\n" if "\n" in items else ", ")
if item.strip()
]
return result
inline_comment_re = re.compile(r"\s+;.*$")
for item in items.split("\n" if "\n" in items else ", "):
item = item.strip()
# comment
if not item or item.startswith((";", "#")):
continue
if ";" in item:
item = inline_comment_re.sub("", item).strip()
result.append(item)
return result
def change_filemtime(path, mtime):
@@ -396,7 +416,7 @@ def exec_command(*args, **kwargs):
if isinstance(kwargs[s], AsyncPipe):
result[s[3:]] = "\n".join(kwargs[s].get_buffer())
for k, v in result.iteritems():
for k, v in result.items():
if v and isinstance(v, basestring):
result[k].strip()
@@ -427,7 +447,7 @@ def get_serial_ports(filter_hwid=False):
for p, d, h in comports():
if not p:
continue
if platform.system() == "Windows":
if "windows" in get_systype():
try:
d = unicode(d, errors="ignore")
except TypeError:
@@ -439,15 +459,19 @@ def get_serial_ports(filter_hwid=False):
return result
# fix for PySerial
if not result and platform.system() == "Darwin":
if not result and "darwin" in get_systype():
for p in glob("/dev/tty.*"):
result.append({"port": p, "description": "n/a", "hwid": "n/a"})
return result
# Backward compatibility for PIO Core <3.5
get_serialports = get_serial_ports
def get_logical_devices():
items = []
if platform.system() == "Windows":
if "windows" in get_systype():
try:
result = exec_command(
["wmic", "logicaldisk", "get", "name,VolumeName"]).get(
@@ -483,14 +507,6 @@ def get_logical_devices():
return items
### Backward compatibility for PIO Core <3.5
get_serialports = get_serial_ports
get_logicaldisks = lambda: [{
"disk": d['path'],
"name": d['name']
} for d in get_logical_devices()]
def get_mdns_services():
try:
import zeroconf
@@ -541,19 +557,22 @@ def get_mdns_services():
items = []
with mDNSListener() as mdns:
sleep(3)
time.sleep(3)
for service in mdns.get_services():
properties = None
try:
if service.properties:
json.dumps(service.properties)
properties = service.properties
except UnicodeDecodeError:
pass
items.append({
"type":
service.type,
"name":
service.name,
"ip":
".".join([str(ord(c)) for c in service.address]),
"port":
service.port,
"properties":
service.properties
"type": service.type,
"name": service.name,
"ip": ".".join([str(ord(c)) for c in service.address]),
"port": service.port,
"properties": properties
})
return items
@@ -563,7 +582,7 @@ def get_request_defheaders():
return {"User-Agent": "PlatformIO/%s CI/%d %s" % data}
@memoized
@memoized(expire=10000)
def _api_request_session():
return requests.Session()
@@ -604,6 +623,7 @@ def _get_api_result(
verify=verify_ssl)
result = r.json()
r.raise_for_status()
return r.text
except requests.exceptions.HTTPError as e:
if result and "message" in result:
raise exception.APIRequestError(result['message'])
@@ -617,11 +637,10 @@ def _get_api_result(
finally:
if r:
r.close()
return result
return None
def get_api_result(url, params=None, data=None, auth=None, cache_valid=None):
internet_on(raise_exception=True)
from platformio.app import ContentCache
total = 0
max_retries = 5
@@ -633,12 +652,16 @@ def get_api_result(url, params=None, data=None, auth=None, cache_valid=None):
if cache_key:
result = cc.get(cache_key)
if result is not None:
return result
return json.loads(result)
# check internet before and resolve issue with 60 seconds timeout
internet_on(raise_exception=True)
result = _get_api_result(url, params, data)
if cache_valid:
with ContentCache() as cc:
cc.set(cache_key, result, cache_valid)
return result
return json.loads(result)
except (requests.exceptions.ConnectionError,
requests.exceptions.Timeout) as e:
from platformio.maintenance import in_silence
@@ -648,7 +671,7 @@ def get_api_result(url, params=None, data=None, auth=None, cache_valid=None):
"[API] ConnectionError: {0} (incremented retry: max={1}, "
"total={2})".format(e, max_retries, total),
fg="yellow")
sleep(2 * total)
time.sleep(2 * total)
raise exception.APIRequestError(
"Could not connect to PlatformIO API Service. "
@@ -657,12 +680,12 @@ def get_api_result(url, params=None, data=None, auth=None, cache_valid=None):
PING_INTERNET_IPS = [
"192.30.253.113", # github.com
"159.122.18.156", # dl.bintray.com
"18.195.111.75", # dl.bintray.com
"193.222.52.25" # dl.platformio.org
]
@memoized
@memoized(expire=5000)
def _internet_on():
timeout = 2
socket.setdefaulttimeout(timeout)
@@ -720,6 +743,95 @@ def pepver_to_semver(pepver):
return re.sub(r"(\.\d+)\.?(dev|a|b|rc|post)", r"\1-\2.", pepver, 1)
def items_to_list(items):
if not isinstance(items, list):
items = [i.strip() for i in items.split(",")]
return [i.lower() for i in items if i]
def items_in_list(needle, haystack):
needle = items_to_list(needle)
haystack = items_to_list(haystack)
if "*" in needle or "*" in haystack:
return True
return set(needle) & set(haystack)
def parse_date(datestr):
if "T" in datestr and "Z" in datestr:
return time.strptime(datestr, "%Y-%m-%dT%H:%M:%SZ")
return time.strptime(datestr)
def format_filesize(filesize):
base = 1024
unit = 0
suffix = "B"
filesize = float(filesize)
if filesize < base:
return "%d%s" % (filesize, suffix)
for i, suffix in enumerate("KMGTPEZY"):
unit = base**(i + 2)
if filesize >= unit:
continue
if filesize % (base**(i + 1)):
return "%.2f%sB" % ((base * filesize / unit), suffix)
break
return "%d%sB" % ((base * filesize / unit), suffix)
def merge_dicts(d1, d2, path=None):
if path is None:
path = []
for key in d2:
if (key in d1 and isinstance(d1[key], dict)
and isinstance(d2[key], dict)):
merge_dicts(d1[key], d2[key], path + [str(key)])
else:
d1[key] = d2[key]
return d1
def get_file_contents(path):
try:
with open(path) as f:
return f.read()
except UnicodeDecodeError:
with open(path, encoding="latin-1") as f:
return f.read()
def ensure_udev_rules():
def _rules_to_set(rules_path):
return set(l.strip() for l in get_file_contents(rules_path).split("\n")
if l.strip() and not l.startswith("#"))
if "linux" not in get_systype():
return None
installed_rules = [
"/etc/udev/rules.d/99-platformio-udev.rules",
"/lib/udev/rules.d/99-platformio-udev.rules"
]
if not any(isfile(p) for p in installed_rules):
raise exception.MissedUdevRules
origin_path = abspath(
join(get_source_dir(), "..", "scripts", "99-platformio-udev.rules"))
if not isfile(origin_path):
return None
origin_rules = _rules_to_set(origin_path)
for rules_path in installed_rules:
if not isfile(rules_path):
continue
current_rules = _rules_to_set(rules_path)
if not origin_rules <= current_rules:
raise exception.OutdatedUdevRules(rules_path)
return True
def rmtree_(path):
def _onerror(_, name, __):
@@ -728,8 +840,9 @@ def rmtree_(path):
os.remove(name)
except Exception as e: # pylint: disable=broad-except
click.secho(
"Please manually remove file `%s`" % name, fg="red", err=True)
raise e
"%s \nPlease manually remove the file `%s`" % (str(e), name),
fg="red",
err=True)
return rmtree(path, onerror=_onerror)
@@ -746,8 +859,7 @@ except ImportError:
magic_check_bytes = re.compile(b'([*?[])')
def glob_escape(pathname):
"""Escape all special characters.
"""
"""Escape all special characters."""
# Escaping is done by wrapping any of "*?[" between square brackets.
# Metacharacters do not work in the drive part and shouldn't be
# escaped.

View File

@@ -1,163 +0,0 @@
# UDEV Rules for debug adapters/boards supported by OpenOCD
#
# INSTALLATION
#
#
# The latest version of this file may be found at:
# https://github.com/platformio/platformio-core/blob/develop/scripts/98-openocd-udev.rules
#
# This file must be placed at:
# /etc/udev/rules.d/98-openocd-udev.rules (preferred location)
# or
# /lib/udev/rules.d/98-openocd-udev.rules (req'd on some broken systems)
#
# To install, type this command in a terminal:
# sudo cp 98-openocd-udev.rules /etc/udev/rules.d/98-openocd-udev.rules
#
# Restart "udev" management tool:
# sudo service udev restart
# or
# sudo udevadm control --reload-rules
# sudo udevadm trigger
#
# Ubuntu/Debian users may need to add own “username” to the “dialout” group if
# they are not “root”, doing this issuing a
# sudo usermod -a -G dialout $USER
# sudo usermod -a -G plugdev $USER
#
# After this file is installed, physically unplug and reconnect your adapter/board.
ACTION!="add|change", GOTO="openocd_rules_end"
SUBSYSTEM!="usb|tty|hidraw", GOTO="openocd_rules_end"
# Please keep this list sorted by VID:PID
# opendous and estick
ATTRS{idVendor}=="03eb", ATTRS{idProduct}=="204f", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Original FT232/FT245 VID:PID
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="6001", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Original FT2232 VID:PID
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="6010", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Original FT4232 VID:PID
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="6011", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Original FT232H VID:PID
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="6014", MODE="660", GROUP="plugdev", TAG+="uaccess"
# DISTORTEC JTAG-lock-pick Tiny 2
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="8220", MODE="660", GROUP="plugdev", TAG+="uaccess"
# TUMPA, TUMPA Lite
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="8a98", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="8a99", MODE="660", GROUP="plugdev", TAG+="uaccess"
# XDS100v2
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="a6d0", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Xverve Signalyzer Tool (DT-USB-ST), Signalyzer LITE (DT-USB-SLITE)
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="bca0", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="bca1", MODE="660", GROUP="plugdev", TAG+="uaccess"
# TI/Luminary Stellaris Evaluation Board FTDI (several)
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="bcd9", MODE="660", GROUP="plugdev", TAG+="uaccess"
# TI/Luminary Stellaris In-Circuit Debug Interface FTDI (ICDI) Board
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="bcda", MODE="660", GROUP="plugdev", TAG+="uaccess"
# egnite Turtelizer 2
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="bdc8", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Section5 ICEbear
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="c140", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="c141", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Amontec JTAGkey and JTAGkey-tiny
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="cff8", MODE="660", GROUP="plugdev", TAG+="uaccess"
# TI ICDI
ATTRS{idVendor}=="0451", ATTRS{idProduct}=="c32a", MODE="660", GROUP="plugdev", TAG+="uaccess"
# STLink v1
ATTRS{idVendor}=="0483", ATTRS{idProduct}=="3744", MODE="660", GROUP="plugdev", TAG+="uaccess"
# STLink v2
ATTRS{idVendor}=="0483", ATTRS{idProduct}=="3748", MODE="660", GROUP="plugdev", TAG+="uaccess"
# STLink v2-1
ATTRS{idVendor}=="0483", ATTRS{idProduct}=="374b", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Hilscher NXHX Boards
ATTRS{idVendor}=="0640", ATTRS{idProduct}=="0028", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Hitex STR9-comStick
ATTRS{idVendor}=="0640", ATTRS{idProduct}=="002c", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Hitex STM32-PerformanceStick
ATTRS{idVendor}=="0640", ATTRS{idProduct}=="002d", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Altera USB Blaster
ATTRS{idVendor}=="09fb", ATTRS{idProduct}=="6001", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Amontec JTAGkey-HiSpeed
ATTRS{idVendor}=="0fbb", ATTRS{idProduct}=="1000", MODE="660", GROUP="plugdev", TAG+="uaccess"
# SEGGER J-Link
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="0101", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="0102", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="0103", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="0104", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="0105", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="0107", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="0108", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="1010", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="1011", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="1012", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="1013", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="1014", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="1015", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="1016", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="1017", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="1018", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Raisonance RLink
ATTRS{idVendor}=="138e", ATTRS{idProduct}=="9000", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Debug Board for Neo1973
ATTRS{idVendor}=="1457", ATTRS{idProduct}=="5118", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Olimex ARM-USB-OCD
ATTRS{idVendor}=="15ba", ATTRS{idProduct}=="0003", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Olimex ARM-USB-OCD-TINY
ATTRS{idVendor}=="15ba", ATTRS{idProduct}=="0004", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Olimex ARM-JTAG-EW
ATTRS{idVendor}=="15ba", ATTRS{idProduct}=="001e", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Olimex ARM-USB-OCD-TINY-H
ATTRS{idVendor}=="15ba", ATTRS{idProduct}=="002a", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Olimex ARM-USB-OCD-H
ATTRS{idVendor}=="15ba", ATTRS{idProduct}=="002b", MODE="660", GROUP="plugdev", TAG+="uaccess"
# USBprog with OpenOCD firmware
ATTRS{idVendor}=="1781", ATTRS{idProduct}=="0c63", MODE="660", GROUP="plugdev", TAG+="uaccess"
# TI/Luminary Stellaris In-Circuit Debug Interface (ICDI) Board
ATTRS{idVendor}=="1cbe", ATTRS{idProduct}=="00fd", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Marvell Sheevaplug
ATTRS{idVendor}=="9e88", ATTRS{idProduct}=="9e8f", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Keil Software, Inc. ULink
ATTRS{idVendor}=="c251", ATTRS{idProduct}=="2710", MODE="660", GROUP="plugdev", TAG+="uaccess"
# CMSIS-DAP compatible adapters
ATTRS{product}=="*CMSIS-DAP*", MODE="660", GROUP="plugdev", TAG+="uaccess"
LABEL="openocd_rules_end"

View File

@@ -12,35 +12,17 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#####################################################################################
#
# INSTALLATION
#
# Please visit > https://docs.platformio.org/en/latest/faq.html#platformio-udev-rules
#
#####################################################################################
# UDEV Rules for PlatformIO supported boards, http://platformio.org/boards
#
# The latest version of this file may be found at:
# https://github.com/platformio/platformio-core/blob/develop/scripts/99-platformio-udev.rules
# Boards
#
# This file must be placed at:
# /etc/udev/rules.d/99-platformio-udev.rules (preferred location)
# or
# /lib/udev/rules.d/99-platformio-udev.rules (req'd on some broken systems)
#
# To install, type this command in a terminal:
# sudo cp 99-platformio-udev.rules /etc/udev/rules.d/99-platformio-udev.rules
#
# Restart "udev" management tool:
# sudo service udev restart
# or
# sudo udevadm control --reload-rules
# sudo udevadm trigger
#
# Ubuntu/Debian users may need to add own “username” to the “dialout” group if
# they are not “root”, doing this issuing a
# sudo usermod -a -G dialout $USER
# sudo usermod -a -G plugdev $USER
#
# After this file is installed, physically unplug and reconnect your board.
# CP210X USB UART
SUBSYSTEMS=="usb", ATTRS{idVendor}=="10c4", ATTRS{idProduct}=="ea60", MODE:="0666"
@@ -71,6 +53,9 @@ KERNEL=="ttyACM*", ATTRS{idVendor}=="16d0", ATTRS{idProduct}=="0753", MODE:="066
# STM32 discovery boards, with onboard st/linkv2
SUBSYSTEMS=="usb", ATTRS{idVendor}=="0483", ATTRS{idProduct}=="374?", MODE:="0666"
# Maple with DFU
SUBSYSTEMS=="usb", ATTRS{idVendor}=="1eaf", ATTRS{idProduct}=="000[34]", MODE:="0666"
# USBtiny
SUBSYSTEMS=="usb", ATTRS{idProduct}=="0c9f", ATTRS{idVendor}=="1781", MODE="0666"
@@ -89,9 +74,186 @@ SUBSYSTEMS=="usb", ATTRS{idVendor}=="1cbe", ATTRS{idProduct}=="00fd", MODE="0666
#TI MSP430 Launchpad
SUBSYSTEMS=="usb", ATTRS{idVendor}=="0451", ATTRS{idProduct}=="f432", MODE="0666"
# CMSIS-DAP compatible adapters
ATTRS{product}=="*CMSIS-DAP*", MODE="664", GROUP="plugdev"
#
# Debuggers
#
# Black Magic Probe
SUBSYSTEM=="tty", ATTRS{interface}=="Black Magic GDB Server"
SUBSYSTEM=="tty", ATTRS{interface}=="Black Magic UART Port"
# opendous and estick
ATTRS{idVendor}=="03eb", ATTRS{idProduct}=="204f", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Original FT232/FT245 VID:PID
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="6001", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Original FT2232 VID:PID
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="6010", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Original FT4232 VID:PID
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="6011", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Original FT232H VID:PID
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="6014", MODE="660", GROUP="plugdev", TAG+="uaccess"
# DISTORTEC JTAG-lock-pick Tiny 2
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="8220", MODE="660", GROUP="plugdev", TAG+="uaccess"
# TUMPA, TUMPA Lite
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="8a98", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="8a99", MODE="660", GROUP="plugdev", TAG+="uaccess"
# XDS100v2
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="a6d0", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Xverve Signalyzer Tool (DT-USB-ST), Signalyzer LITE (DT-USB-SLITE)
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="bca0", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="bca1", MODE="660", GROUP="plugdev", TAG+="uaccess"
# TI/Luminary Stellaris Evaluation Board FTDI (several)
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="bcd9", MODE="660", GROUP="plugdev", TAG+="uaccess"
# TI/Luminary Stellaris In-Circuit Debug Interface FTDI (ICDI) Board
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="bcda", MODE="660", GROUP="plugdev", TAG+="uaccess"
# egnite Turtelizer 2
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="bdc8", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Section5 ICEbear
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="c140", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="c141", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Amontec JTAGkey and JTAGkey-tiny
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="cff8", MODE="660", GROUP="plugdev", TAG+="uaccess"
# TI ICDI
ATTRS{idVendor}=="0451", ATTRS{idProduct}=="c32a", MODE="660", GROUP="plugdev", TAG+="uaccess"
# STLink v1
ATTRS{idVendor}=="0483", ATTRS{idProduct}=="3744", MODE="660", GROUP="plugdev", TAG+="uaccess"
# STLink v2
ATTRS{idVendor}=="0483", ATTRS{idProduct}=="3748", MODE="660", GROUP="plugdev", TAG+="uaccess"
# STLink v2-1
ATTRS{idVendor}=="0483", ATTRS{idProduct}=="374b", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Hilscher NXHX Boards
ATTRS{idVendor}=="0640", ATTRS{idProduct}=="0028", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Hitex STR9-comStick
ATTRS{idVendor}=="0640", ATTRS{idProduct}=="002c", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Hitex STM32-PerformanceStick
ATTRS{idVendor}=="0640", ATTRS{idProduct}=="002d", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Altera USB Blaster
ATTRS{idVendor}=="09fb", ATTRS{idProduct}=="6001", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Amontec JTAGkey-HiSpeed
ATTRS{idVendor}=="0fbb", ATTRS{idProduct}=="1000", MODE="660", GROUP="plugdev", TAG+="uaccess"
# SEGGER J-Link
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="0101", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="0102", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="0103", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="0104", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="0105", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="0107", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="0108", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="1010", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="1011", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="1012", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="1013", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="1014", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="1015", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="1016", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="1017", MODE="660", GROUP="plugdev", TAG+="uaccess"
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="1018", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Raisonance RLink
ATTRS{idVendor}=="138e", ATTRS{idProduct}=="9000", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Debug Board for Neo1973
ATTRS{idVendor}=="1457", ATTRS{idProduct}=="5118", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Olimex ARM-USB-OCD
ATTRS{idVendor}=="15ba", ATTRS{idProduct}=="0003", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Olimex ARM-USB-OCD-TINY
ATTRS{idVendor}=="15ba", ATTRS{idProduct}=="0004", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Olimex ARM-JTAG-EW
ATTRS{idVendor}=="15ba", ATTRS{idProduct}=="001e", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Olimex ARM-USB-OCD-TINY-H
ATTRS{idVendor}=="15ba", ATTRS{idProduct}=="002a", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Olimex ARM-USB-OCD-H
ATTRS{idVendor}=="15ba", ATTRS{idProduct}=="002b", MODE="660", GROUP="plugdev", TAG+="uaccess"
# USBprog with OpenOCD firmware
ATTRS{idVendor}=="1781", ATTRS{idProduct}=="0c63", MODE="660", GROUP="plugdev", TAG+="uaccess"
# TI/Luminary Stellaris In-Circuit Debug Interface (ICDI) Board
ATTRS{idVendor}=="1cbe", ATTRS{idProduct}=="00fd", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Marvell Sheevaplug
ATTRS{idVendor}=="9e88", ATTRS{idProduct}=="9e8f", MODE="660", GROUP="plugdev", TAG+="uaccess"
# Keil Software, Inc. ULink
ATTRS{idVendor}=="c251", ATTRS{idProduct}=="2710", MODE="660", GROUP="plugdev", TAG+="uaccess"
# CMSIS-DAP compatible adapters
ATTRS{product}=="*CMSIS-DAP*", MODE="660", GROUP="plugdev", TAG+="uaccess"
#SEGGER J-LIK
ATTR{idProduct}=="1001", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="1002", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="1003", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="1004", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="1005", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="1006", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="1007", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="1008", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="1009", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="100a", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="100b", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="100c", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="100d", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="100e", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="100f", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="1010", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="1011", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="1012", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="1013", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="1014", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="1015", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="1016", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="1017", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="1018", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="1019", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="101a", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="101b", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="101c", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="101d", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="101e", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="101f", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="1020", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="1021", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="1022", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="1023", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="1024", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="1025", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="1026", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="1027", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="1028", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="1029", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="102a", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="102b", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="102c", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="102d", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="102e", ATTR{idVendor}=="1366", MODE="666"
ATTR{idProduct}=="102f", ATTR{idVendor}=="1366", MODE="666"

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,41 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import subprocess
import sys
from platformio import util
def main():
platforms = json.loads(
subprocess.check_output(
["platformio", "platform", "search", "--json-output"]))
for platform in platforms:
if platform['forDesktop']:
continue
# RISC-V GAP does not support Windows 86
if (util.get_systype() == "windows_x86"
and platform['name'] == "riscv_gap"):
continue
# unknown issue on Linux
if ("linux" in util.get_systype()
and platform['name'] == "aceinna_imu"):
continue
subprocess.check_call(
["platformio", "platform", "install", platform['repository']])
if __name__ == "__main__":
sys.exit(main())

View File

@@ -18,11 +18,9 @@ from platformio import (__author__, __description__, __email__, __license__,
__title__, __url__, __version__)
install_requires = [
"arrow>=0.10.0,!=0.11.0",
"bottle<0.13",
"click>=5,<6",
"colorama",
"lockfile>=0.9.1,<0.13",
"pyserial>=3,<4,!=3.3",
"requests>=2.4.0,<3",
"semantic_version>=2.5.0,<3"
@@ -37,8 +35,9 @@ setup(
author_email=__email__,
url=__url__,
license=__license__,
python_requires='>=2.7, <3',
install_requires=install_requires,
packages=find_packages(),
packages=find_packages() + ["scripts"],
package_data={
"platformio": [
"projectconftpl.ini",
@@ -46,6 +45,9 @@ setup(
"ide/tpls/*/*.tpl",
"ide/tpls/*/*/*.tpl",
"ide/tpls/*/.*/*.tpl"
],
"scripts": [
"99-platformio-udev.rules"
]
},
entry_points={

View File

@@ -15,6 +15,7 @@
from os.path import join
from platformio.commands.ci import cli as cmd_ci
from platformio.commands.lib import cli as cmd_lib
def test_ci_empty(clirunner):
@@ -25,27 +26,31 @@ def test_ci_empty(clirunner):
def test_ci_boards(clirunner, validate_cliresult):
result = clirunner.invoke(cmd_ci, [
join("examples", "atmelavr", "arduino-internal-libs", "src",
"ChatServer.ino"), "-b", "uno", "-b", "leonardo"
join("examples", "wiring-blink", "src", "main.cpp"), "-b", "uno", "-b",
"leonardo"
])
validate_cliresult(result)
def test_ci_project_conf(clirunner, validate_cliresult):
project_dir = join("examples", "atmelavr", "arduino-internal-libs")
project_dir = join("examples", "wiring-blink")
result = clirunner.invoke(cmd_ci, [
join(project_dir, "src", "ChatServer.ino"), "--project-conf",
join(project_dir, "src", "main.cpp"), "--project-conf",
join(project_dir, "platformio.ini")
])
validate_cliresult(result)
assert all([s in result.output for s in ("ethernet", "leonardo", "yun")])
assert "uno" in result.output
def test_ci_lib_and_board(clirunner, validate_cliresult):
example_dir = join("examples", "atmelavr", "arduino-external-libs")
def test_ci_lib_and_board(clirunner, tmpdir_factory, validate_cliresult):
storage_dir = str(tmpdir_factory.mktemp("lib"))
result = clirunner.invoke(
cmd_lib, ["--storage-dir", storage_dir, "install", "1@2.3.2"])
validate_cliresult(result)
result = clirunner.invoke(cmd_ci, [
join(example_dir, "lib", "OneWire", "examples", "DS2408_Switch",
"DS2408_Switch.pde"), "-l", join(example_dir, "lib", "OneWire"),
"-b", "uno"
join(storage_dir, "OneWire_ID1", "examples", "DS2408_Switch",
"DS2408_Switch.pde"), "-l",
join(storage_dir, "OneWire_ID1"), "-b", "uno"
])
validate_cliresult(result)

View File

@@ -54,7 +54,7 @@ def test_init_duplicated_boards(clirunner, validate_cliresult, tmpdir):
assert set(config.sections()) == set(["env:uno"])
def test_init_ide_without_board(clirunner, validate_cliresult, tmpdir):
def test_init_ide_without_board(clirunner, tmpdir):
with tmpdir.as_cwd():
result = clirunner.invoke(cmd_init, ["--ide", "atom"])
assert result.exit_code == -1
@@ -67,13 +67,15 @@ def test_init_ide_atom(clirunner, validate_cliresult, tmpdir):
cmd_init, ["--ide", "atom", "-b", "uno", "-b", "teensy31"])
validate_cliresult(result)
validate_pioproject(str(tmpdir))
assert all([tmpdir.join(f).check()
for f in (".clang_complete", ".gcc-flags.json")])
assert all([
tmpdir.join(f).check()
for f in (".clang_complete", ".gcc-flags.json")
])
assert "arduinoavr" in tmpdir.join(".clang_complete").read()
# switch to NodeMCU
result = clirunner.invoke(
cmd_init, ["--ide", "atom", "-b", "nodemcuv2"])
result = clirunner.invoke(cmd_init,
["--ide", "atom", "-b", "nodemcuv2"])
validate_cliresult(result)
validate_pioproject(str(tmpdir))
assert "arduinoespressif" in tmpdir.join(".clang_complete").read()
@@ -104,15 +106,13 @@ def test_init_special_board(clirunner, validate_cliresult):
boards = json.loads(result.output)
config = util.load_project_config()
expected_result = [
("platform", str(boards[0]['platform'])),
("framework", str(boards[0]['frameworks'][0])), ("board", "uno")
]
expected_result = [("platform", str(boards[0]['platform'])),
("framework",
str(boards[0]['frameworks'][0])), ("board", "uno")]
assert config.has_section("env:uno")
assert len(
set(expected_result).symmetric_difference(
set(config.items("env:uno")))) == 0
assert not set(expected_result).symmetric_difference(
set(config.items("env:uno")))
def test_init_enable_auto_uploading(clirunner, validate_cliresult):
@@ -122,14 +122,11 @@ def test_init_enable_auto_uploading(clirunner, validate_cliresult):
validate_cliresult(result)
validate_pioproject(getcwd())
config = util.load_project_config()
expected_result = [
("platform", "atmelavr"), ("framework", "arduino"),
("board", "uno"), ("targets", "upload")
]
expected_result = [("platform", "atmelavr"), ("framework", "arduino"),
("board", "uno"), ("targets", "upload")]
assert config.has_section("env:uno")
assert len(
set(expected_result).symmetric_difference(
set(config.items("env:uno")))) == 0
assert not set(expected_result).symmetric_difference(
set(config.items("env:uno")))
def test_init_custom_framework(clirunner, validate_cliresult):
@@ -139,14 +136,11 @@ def test_init_custom_framework(clirunner, validate_cliresult):
validate_cliresult(result)
validate_pioproject(getcwd())
config = util.load_project_config()
expected_result = [
("platform", "teensy"), ("framework", "mbed"),
("board", "teensy31")
]
expected_result = [("platform", "teensy"), ("framework", "mbed"),
("board", "teensy31")]
assert config.has_section("env:teensy31")
assert len(
set(expected_result).symmetric_difference(
set(config.items("env:teensy31")))) == 0
assert not set(expected_result).symmetric_difference(
set(config.items("env:teensy31")))
def test_init_incorrect_board(clirunner):

View File

@@ -15,8 +15,7 @@
import json
import re
from platformio import exception, util
from platformio.commands.init import cli as cmd_init
from platformio import exception
from platformio.commands.lib import cli as cmd_lib
@@ -36,25 +35,11 @@ def test_search(clirunner, validate_cliresult):
def test_global_install_registry(clirunner, validate_cliresult,
isolated_pio_home):
result = clirunner.invoke(cmd_lib, [
"-g", "install", "58", "547@2.2.4", "DallasTemperature",
"http://dl.platformio.org/libraries/archives/3/5174.tar.gz",
"ArduinoJson@5.6.7", "ArduinoJson@~5.7.0", "168@00589a3250"
"-g", "install", "64", "ArduinoJson@~5.10.0", "547@2.2.4",
"AsyncMqttClient@<=0.8.2", "999@77d4eb3f8a"
])
validate_cliresult(result)
# check lib with duplicate URL
result = clirunner.invoke(cmd_lib, [
"-g", "install",
"http://dl.platformio.org/libraries/archives/3/5174.tar.gz"
])
validate_cliresult(result)
assert "is already installed" in result.output
# check lib with duplicate ID
result = clirunner.invoke(cmd_lib, ["-g", "install", "305"])
validate_cliresult(result)
assert "is already installed" in result.output
# install unknown library
result = clirunner.invoke(cmd_lib, ["-g", "install", "Unknown"])
assert result.exit_code != 0
@@ -62,9 +47,9 @@ def test_global_install_registry(clirunner, validate_cliresult,
items1 = [d.basename for d in isolated_pio_home.join("lib").listdir()]
items2 = [
"ArduinoJson_ID64", "ArduinoJson_ID64@5.6.7", "DallasTemperature_ID54",
"DHT22_ID58", "ESPAsyncTCP_ID305", "NeoPixelBus_ID547", "OneWire_ID1",
"EspSoftwareSerial_ID168"
"ArduinoJson_ID64", "ArduinoJson_ID64@5.10.1", "NeoPixelBus_ID547",
"AsyncMqttClient_ID346", "ESPAsyncTCP_ID305", "AsyncTCP_ID1826",
"RFcontrol_ID999"
]
assert set(items1) == set(items2)
@@ -72,11 +57,12 @@ def test_global_install_registry(clirunner, validate_cliresult,
def test_global_install_archive(clirunner, validate_cliresult,
isolated_pio_home):
result = clirunner.invoke(cmd_lib, [
"-g", "install", "https://github.com/adafruit/Adafruit-ST7735-Library/"
"archive/master.zip",
"-g", "install",
"http://www.airspayce.com/mikem/arduino/RadioHead/RadioHead-1.62.zip",
"https://github.com/bblanchon/ArduinoJson/archive/v5.8.2.zip",
"https://github.com/bblanchon/ArduinoJson/archive/v5.8.2.zip@5.8.2"
"https://github.com/bblanchon/ArduinoJson/archive/v5.8.2.zip@5.8.2",
"SomeLib=http://dl.platformio.org/libraries/archives/0/9540.tar.gz",
"https://github.com/Pedroalbuquerque/ESP32WebServer/archive/master.zip"
])
validate_cliresult(result)
@@ -87,16 +73,11 @@ def test_global_install_archive(clirunner, validate_cliresult,
])
assert result.exit_code != 0
# check lib with duplicate URL
result = clirunner.invoke(cmd_lib, [
"-g", "install",
"http://www.airspayce.com/mikem/arduino/RadioHead/RadioHead-1.62.zip"
])
validate_cliresult(result)
assert "is already installed" in result.output
items1 = [d.basename for d in isolated_pio_home.join("lib").listdir()]
items2 = ["Adafruit ST7735 Library", "RadioHead-1.62"]
items2 = [
"RadioHead-1.62", "ArduinoJson", "SomeLib_ID54",
"OneWire_ID1", "ESP32WebServer"
]
assert set(items1) >= set(items2)
@@ -113,18 +94,41 @@ def test_global_install_repository(clirunner, validate_cliresult,
"https://gitlab.com/ivankravets/rs485-nodeproto.git",
"https://github.com/platformio/platformio-libmirror.git",
# "https://developer.mbed.org/users/simon/code/TextLCD/",
"knolleary/pubsubclient"
"knolleary/pubsubclient#bef58148582f956dfa772687db80c44e2279a163"
])
validate_cliresult(result)
items1 = [d.basename for d in isolated_pio_home.join("lib").listdir()]
items2 = [
"PJON", "PJON@src-79de467ebe19de18287becff0a1fb42d",
"ArduinoJson@src-69ebddd821f771debe7ee734d3c7fa81", "rs485-nodeproto",
"PubSubClient"
"platformio-libmirror", "PubSubClient"
]
assert set(items1) >= set(items2)
# check lib with duplicate URL
def test_install_duplicates(clirunner, validate_cliresult, without_internet):
# registry
result = clirunner.invoke(cmd_lib, [
"-g", "install",
"http://dl.platformio.org/libraries/archives/0/9540.tar.gz"
])
validate_cliresult(result)
assert "is already installed" in result.output
# by ID
result = clirunner.invoke(cmd_lib, ["-g", "install", "999"])
validate_cliresult(result)
assert "is already installed" in result.output
# archive
result = clirunner.invoke(cmd_lib, [
"-g", "install",
"http://www.airspayce.com/mikem/arduino/RadioHead/RadioHead-1.62.zip"
])
validate_cliresult(result)
assert "is already installed" in result.output
# repository
result = clirunner.invoke(cmd_lib, [
"-g", "install",
"https://github.com/platformio/platformio-libmirror.git"
@@ -136,23 +140,40 @@ def test_global_install_repository(clirunner, validate_cliresult,
def test_global_lib_list(clirunner, validate_cliresult):
result = clirunner.invoke(cmd_lib, ["-g", "list"])
validate_cliresult(result)
assert all([n in result.output for n in ("OneWire", "DHT22", "64")])
assert all([
n in result.output for n in
("Source: https://github.com/Pedroalbuquerque/ESP32WebServer/archive/master.zip",
"Version: 5.10.1",
"Source: git+https://github.com/gioblu/PJON.git#3.0",
"Version: 1fb26fd", "RadioHead-1.62")
])
result = clirunner.invoke(cmd_lib, ["-g", "list", "--json-output"])
assert all([
n in result.output
for n in (
"PJON", "git+https://github.com/knolleary/pubsubclient",
"https://github.com/bblanchon/ArduinoJson/archive/v5.8.2.zip")
n in result.output for n in
("__pkg_dir",
'"__src_url": "git+https://gitlab.com/ivankravets/rs485-nodeproto.git"',
'"version": "5.10.1"')
])
items1 = [i['name'] for i in json.loads(result.output)]
items2 = [
"OneWire", "DHT22", "PJON", "ESPAsyncTCP", "ArduinoJson",
"PubSubClient", "rs485-nodeproto", "Adafruit ST7735 Library",
"RadioHead-1.62", "DallasTemperature", "NeoPixelBus",
"EspSoftwareSerial", "platformio-libmirror"
"ESP32WebServer", "ArduinoJson", "ArduinoJson", "ArduinoJson",
"ArduinoJson", "AsyncMqttClient", "AsyncTCP", "SomeLib",
"ESPAsyncTCP", "NeoPixelBus", "OneWire", "PJON", "PJON",
"PubSubClient", "RFcontrol", "RadioHead-1.62", "platformio-libmirror",
"rs485-nodeproto"
]
assert set(items1) == set(items2)
assert sorted(items1) == sorted(items2)
versions1 = [
"{name}@{version}".format(**item) for item in json.loads(result.output)
]
versions2 = [
'ArduinoJson@5.8.2', 'ArduinoJson@5.10.1', 'AsyncMqttClient@0.8.2',
'NeoPixelBus@2.2.4', 'PJON@07fe9aa', 'PJON@1fb26fd',
'PubSubClient@bef5814', 'RFcontrol@77d4eb3f8a', 'RadioHead-1.62@0.0.0'
]
assert set(versions1) >= set(versions2)
def test_global_lib_update_check(clirunner, validate_cliresult):
@@ -160,7 +181,7 @@ def test_global_lib_update_check(clirunner, validate_cliresult):
cmd_lib, ["-g", "update", "--only-check", "--json-output"])
validate_cliresult(result)
output = json.loads(result.output)
assert set(["ArduinoJson", "EspSoftwareSerial",
assert set(["RFcontrol",
"NeoPixelBus"]) == set([l['name'] for l in output])
@@ -181,11 +202,9 @@ def test_global_lib_update(clirunner, validate_cliresult):
# update rest libraries
result = clirunner.invoke(cmd_lib, ["-g", "update"])
validate_cliresult(result)
validate_cliresult(result)
assert result.output.count("[Fixed]") == 5
assert result.output.count("[Up-to-date]") == 10
assert "Uninstalling ArduinoJson @ 5.7.3" in result.output
assert "Uninstalling EspSoftwareSerial @ 00589a3250" in result.output
assert result.output.count("[Detached]") == 6
assert result.output.count("[Up-to-date]") == 11
assert "Uninstalling RFcontrol @ 77d4eb3f8a" in result.output
# update unknown library
result = clirunner.invoke(cmd_lib, ["-g", "update", "Unknown"])
@@ -200,23 +219,24 @@ def test_global_lib_uninstall(clirunner, validate_cliresult,
validate_cliresult(result)
items = json.loads(result.output)
result = clirunner.invoke(cmd_lib,
["-g", "uninstall", items[0]['__pkg_dir']])
["-g", "uninstall", items[5]['__pkg_dir']])
validate_cliresult(result)
assert "Uninstalling Adafruit ST7735 Library" in result.output
assert "Uninstalling AsyncTCP" in result.output
# uninstall the rest libraries
result = clirunner.invoke(cmd_lib, [
"-g", "uninstall", "1", "https://github.com/bblanchon/ArduinoJson.git",
"ArduinoJson@!=5.6.7", "EspSoftwareSerial@>=3.3.1"
"ArduinoJson@!=5.6.7", "RFcontrol"
])
validate_cliresult(result)
items1 = [d.basename for d in isolated_pio_home.join("lib").listdir()]
items2 = [
"ArduinoJson_ID64", "ArduinoJson_ID64@5.6.7", "DallasTemperature_ID54",
"DHT22_ID58", "ESPAsyncTCP_ID305", "NeoPixelBus_ID547", "PJON",
"PJON@src-79de467ebe19de18287becff0a1fb42d", "PubSubClient",
"RadioHead-1.62", "rs485-nodeproto", "platformio-libmirror"
"RadioHead-1.62", "rs485-nodeproto", "platformio-libmirror",
"PubSubClient", "ArduinoJson@src-69ebddd821f771debe7ee734d3c7fa81",
"ESPAsyncTCP_ID305", "SomeLib_ID54", "NeoPixelBus_ID547",
"PJON", "AsyncMqttClient_ID346", "ArduinoJson_ID64",
"PJON@src-79de467ebe19de18287becff0a1fb42d", "ESP32WebServer"
]
assert set(items1) == set(items2)
@@ -231,7 +251,7 @@ def test_lib_show(clirunner, validate_cliresult):
validate_cliresult(result)
assert all(
[s in result.output for s in ("ArduinoJson", "Arduino", "Atmel AVR")])
result = clirunner.invoke(cmd_lib, ["show", "OneWire"])
result = clirunner.invoke(cmd_lib, ["show", "OneWire", "--json-output"])
validate_cliresult(result)
assert "OneWire" in result.output
@@ -248,7 +268,7 @@ def test_lib_stats(clirunner, validate_cliresult):
validate_cliresult(result)
assert all([
s in result.output
for s in ("UPDATED", "ago", "http://platformio.org/lib/show")
for s in ("UPDATED", "POPULAR", "https://platformio.org/lib/show")
])
result = clirunner.invoke(cmd_lib, ["stats", "--json-output"])

View File

@@ -24,27 +24,25 @@ def test_search_json_output(clirunner, validate_cliresult, isolated_pio_home):
validate_cliresult(result)
search_result = json.loads(result.output)
assert isinstance(search_result, list)
assert len(search_result)
assert search_result
platforms = [item['name'] for item in search_result]
assert "atmelsam" in platforms
def test_search_raw_output(clirunner, validate_cliresult, isolated_pio_home):
def test_search_raw_output(clirunner, validate_cliresult):
result = clirunner.invoke(cli_platform.platform_search, ["arduino"])
validate_cliresult(result)
assert "teensy" in result.output
def test_install_unknown_version(clirunner, validate_cliresult,
isolated_pio_home):
def test_install_unknown_version(clirunner):
result = clirunner.invoke(cli_platform.platform_install,
["atmelavr@99.99.99"])
assert result.exit_code == -1
assert isinstance(result.exception, exception.UndefinedPackageVersion)
def test_install_unknown_from_registry(clirunner, validate_cliresult,
isolated_pio_home):
def test_install_unknown_from_registry(clirunner):
result = clirunner.invoke(cli_platform.platform_install,
["unknown-platform"])
assert result.exit_code == -1
@@ -70,19 +68,20 @@ def test_install_from_vcs(clirunner, validate_cliresult, isolated_pio_home):
])
validate_cliresult(result)
assert "espressif8266" in result.output
assert len(isolated_pio_home.join("packages").listdir()) == 1
def test_list_json_output(clirunner, validate_cliresult, isolated_pio_home):
def test_list_json_output(clirunner, validate_cliresult):
result = clirunner.invoke(cli_platform.platform_list, ["--json-output"])
validate_cliresult(result)
list_result = json.loads(result.output)
assert isinstance(list_result, list)
assert len(list_result)
assert list_result
platforms = [item['name'] for item in list_result]
assert set(["atmelavr", "espressif8266"]) == set(platforms)
def test_list_raw_output(clirunner, validate_cliresult, isolated_pio_home):
def test_list_raw_output(clirunner, validate_cliresult):
result = clirunner.invoke(cli_platform.platform_list)
validate_cliresult(result)
assert all(
@@ -111,4 +110,4 @@ def test_uninstall(clirunner, validate_cliresult, isolated_pio_home):
result = clirunner.invoke(cli_platform.platform_uninstall,
["atmelavr", "espressif8266"])
validate_cliresult(result)
assert len(isolated_pio_home.join("platforms").listdir()) == 0
assert not isolated_pio_home.join("platforms").listdir()

View File

@@ -19,6 +19,6 @@ from platformio.commands.settings import cli
def test_settings_check(clirunner, validate_cliresult):
result = clirunner.invoke(cli, ["get"])
validate_cliresult(result)
assert len(result.output)
assert result.output
for item in app.DEFAULT_SETTINGS.items():
assert item[0] in result.output

View File

@@ -16,11 +16,7 @@ from platformio.commands.update import cli as cmd_update
def test_update(clirunner, validate_cliresult):
matches = (
"Platform Manager",
"Up-to-date",
"Library Manager"
)
matches = ("Platform Manager", "Up-to-date", "Library Manager")
result = clirunner.invoke(cmd_update, ["--only-check"])
validate_cliresult(result)
assert all([m in result.output for m in matches])

View File

@@ -17,22 +17,24 @@ import os
import pytest
from click.testing import CliRunner
@pytest.fixture(scope="module")
def clirunner():
return CliRunner()
from platformio import util
@pytest.fixture(scope="session")
def validate_cliresult():
def decorator(result):
assert result.exit_code == 0
assert not result.exception
assert result.exit_code == 0, result.output
assert not result.exception, result.output
return decorator
@pytest.fixture(scope="module")
def clirunner():
return CliRunner()
@pytest.fixture(scope="module")
def isolated_pio_home(request, tmpdir_factory):
home_dir = tmpdir_factory.mktemp(".platformio")
@@ -43,3 +45,8 @@ def isolated_pio_home(request, tmpdir_factory):
request.addfinalizer(fin)
return home_dir
@pytest.fixture(scope="function")
def without_internet(monkeypatch):
monkeypatch.setattr(util, "_internet_on", lambda: False)

View File

@@ -48,3 +48,5 @@ void myFunction(struct Item *item) {
void fooCallback(){
}
// юнікод

103
tests/test_builder.py Normal file
View File

@@ -0,0 +1,103 @@
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio.commands.run import cli as cmd_run
def test_build_flags(clirunner, validate_cliresult, tmpdir):
build_flags = [("-D TEST_INT=13", "-DTEST_INT=13"),
("-DTEST_SINGLE_MACRO", "-DTEST_SINGLE_MACRO"),
('-DTEST_STR_SPACE="Andrew Smith"',
'"-DTEST_STR_SPACE=Andrew Smith"')]
tmpdir.join("platformio.ini").write("""
[env:native]
platform = native
extra_scripts = extra.py
build_flags =
; -DCOMMENTED_MACRO
%s ; inline comment
""" % " ".join([f[0] for f in build_flags]))
tmpdir.join("extra.py").write("""
Import("projenv")
projenv.Append(CPPDEFINES="POST_SCRIPT_MACRO")
""")
tmpdir.mkdir("src").join("main.cpp").write("""
#if !defined(TEST_INT) || TEST_INT != 13
#error "TEST_INT"
#endif
#ifndef TEST_STR_SPACE
#error "TEST_STR_SPACE"
#endif
#ifndef POST_SCRIPT_MACRO
#error "POST_SCRIPT_MACRO"
#endif
#ifdef COMMENTED_MACRO
#error "COMMENTED_MACRO"
#endif
int main() {
}
""")
result = clirunner.invoke(
cmd_run, ["--project-dir", str(tmpdir), "--verbose"])
validate_cliresult(result)
build_output = result.output[result.output.find(
"Scanning dependencies..."):]
for flag in build_flags:
assert flag[1] in build_output, flag
def test_build_unflags(clirunner, validate_cliresult, tmpdir):
tmpdir.join("platformio.ini").write("""
[env:native]
platform = native
build_unflags = -DTMP_MACRO1=45 -I. -DNON_EXISTING_MACRO -lunknownLib -Os
extra_scripts = pre:extra.py
""")
tmpdir.join("extra.py").write("""
Import("env")
env.Append(CPPPATH="%s")
env.Append(CPPDEFINES="TMP_MACRO1")
env.Append(CPPDEFINES=["TMP_MACRO2"])
env.Append(CPPDEFINES=("TMP_MACRO3", 13))
env.Append(CCFLAGS=["-Os"])
env.Append(LIBS=["unknownLib"])
""" % str(tmpdir))
tmpdir.mkdir("src").join("main.c").write("""
#ifdef TMP_MACRO1
#error "TMP_MACRO1 should be removed"
#endif
int main() {
}
""")
result = clirunner.invoke(
cmd_run, ["--project-dir", str(tmpdir), "--verbose"])
validate_cliresult(result)
build_output = result.output[result.output.find(
"Scanning dependencies..."):]
assert "-DTMP_MACRO1" not in build_output
assert "-Os" not in build_output
assert str(tmpdir) not in build_output

View File

@@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import random
from glob import glob
from os import listdir, walk
from os.path import dirname, getsize, isdir, isfile, join, normpath
@@ -19,43 +20,75 @@ from os.path import dirname, getsize, isdir, isfile, join, normpath
import pytest
from platformio import util
from platformio.managers.platform import PlatformFactory, PlatformManager
def pytest_generate_tests(metafunc):
if "pioproject_dir" not in metafunc.fixturenames:
return
example_dirs = normpath(join(dirname(__file__), "..", "examples"))
project_dirs = []
for root, _, files in walk(example_dirs):
if "platformio.ini" not in files or ".skiptest" in files:
examples_dirs = []
# repo examples
examples_dirs.append(normpath(join(dirname(__file__), "..", "examples")))
# dev/platforms
for manifest in PlatformManager().get_installed():
p = PlatformFactory.newPlatform(manifest['__pkg_dir'])
if not p.is_embedded():
continue
project_dirs.append(root)
# issue with "version `CXXABI_1.3.9' not found (required by sdcc)"
if "linux" in util.get_systype() and p.name in ("intel_mcs51",
"ststm8"):
continue
examples_dir = join(p.get_dir(), "examples")
assert isdir(examples_dir)
examples_dirs.append(examples_dir)
project_dirs = []
for examples_dir in examples_dirs:
platform_examples = []
for root, _, files in walk(examples_dir):
if "platformio.ini" not in files or ".skiptest" in files:
continue
platform_examples.append(root)
# test random 3 examples
random.shuffle(platform_examples)
project_dirs.extend(platform_examples[:3])
project_dirs.sort()
metafunc.parametrize("pioproject_dir", project_dirs)
@pytest.mark.examples
def test_run(pioproject_dir):
if isdir(join(pioproject_dir, ".pioenvs")):
util.rmtree_(join(pioproject_dir, ".pioenvs"))
with util.cd(pioproject_dir):
build_dir = util.get_projectbuild_dir()
if isdir(build_dir):
util.rmtree_(build_dir)
result = util.exec_command(
["platformio", "--force", "run", "--project-dir", pioproject_dir]
)
if result['returncode'] != 0:
pytest.fail(result)
env_names = []
for section in util.load_project_config().sections():
if section.startswith("env:"):
env_names.append(section[4:])
# check .elf file
pioenvs_dir = join(pioproject_dir, ".pioenvs")
for item in listdir(pioenvs_dir):
if not isdir(item):
continue
assert isfile(join(pioenvs_dir, item, "firmware.elf"))
# check .hex or .bin files
firmwares = []
for ext in ("bin", "hex"):
firmwares += glob(join(pioenvs_dir, item, "firmware*.%s" % ext))
if not firmwares:
pytest.fail("Missed firmware file")
for firmware in firmwares:
assert getsize(firmware) > 0
result = util.exec_command(
["platformio", "run", "-e",
random.choice(env_names)])
if result['returncode'] != 0:
pytest.fail(result)
assert isdir(build_dir)
# check .elf file
for item in listdir(build_dir):
if not isdir(item):
continue
assert isfile(join(build_dir, item, "firmware.elf"))
# check .hex or .bin files
firmwares = []
for ext in ("bin", "hex"):
firmwares += glob(join(build_dir, item, "firmware*.%s" % ext))
if not firmwares:
pytest.fail("Missed firmware file")
for firmware in firmwares:
assert getsize(firmware) > 0

View File

@@ -44,9 +44,7 @@ def test_after_upgrade_2_to_3(clirunner, validate_cliresult,
result = clirunner.invoke(cli_pio, ["settings", "get"])
validate_cliresult(result)
assert "upgraded to 3"
assert isolated_pio_home.join("platforms", "native",
"platform.json").check()
assert "upgraded to 3" in result.output
# check PlatformIO 3.0 boards
assert board_ids == set([p.basename[:-5] for p in boards.listdir()])
@@ -57,8 +55,7 @@ def test_after_upgrade_2_to_3(clirunner, validate_cliresult,
assert board_ids == set([b['id'] for b in json.loads(result.output)])
def test_after_upgrade_silence(clirunner, validate_cliresult,
isolated_pio_home):
def test_after_upgrade_silence(clirunner, validate_cliresult):
app.set_state_item("last_version", "2.11.2")
result = clirunner.invoke(cli_pio, ["boards", "--json-output"])
validate_cliresult(result)
@@ -66,7 +63,7 @@ def test_after_upgrade_silence(clirunner, validate_cliresult,
assert any([b['id'] == "uno" for b in boards])
def test_check_pio_upgrade(clirunner, validate_cliresult, isolated_pio_home):
def test_check_pio_upgrade(clirunner, validate_cliresult):
def _patch_pio_version(version):
maintenance.__version__ = version
@@ -96,7 +93,7 @@ def test_check_pio_upgrade(clirunner, validate_cliresult, isolated_pio_home):
_patch_pio_version(origin_version)
def test_check_lib_updates(clirunner, validate_cliresult, isolated_pio_home):
def test_check_lib_updates(clirunner, validate_cliresult):
# install obsolete library
result = clirunner.invoke(cli_pio,
["lib", "-g", "install", "ArduinoJson@<5.7"])
@@ -113,8 +110,7 @@ def test_check_lib_updates(clirunner, validate_cliresult, isolated_pio_home):
result.output)
def test_check_and_update_libraries(clirunner, validate_cliresult,
isolated_pio_home):
def test_check_and_update_libraries(clirunner, validate_cliresult):
# enable library auto-updates
result = clirunner.invoke(
cli_pio, ["settings", "set", "auto_update_libraries", "Yes"])
@@ -168,8 +164,7 @@ def test_check_platform_updates(clirunner, validate_cliresult,
assert "There are the new updates for platforms (native)" in result.output
def test_check_and_update_platforms(clirunner, validate_cliresult,
isolated_pio_home):
def test_check_and_update_platforms(clirunner, validate_cliresult):
# enable library auto-updates
result = clirunner.invoke(
cli_pio, ["settings", "set", "auto_update_platforms", "Yes"])
@@ -190,8 +185,7 @@ def test_check_and_update_platforms(clirunner, validate_cliresult,
validate_cliresult(result)
assert "There are the new updates for platforms (native)" in result.output
assert "Please wait while updating platforms" in result.output
assert re.search(r"Updating native\s+@ 0.0.0\s+\[[\d\.]+\]",
result.output)
assert re.search(r"Updating native\s+@ 0.0.0\s+\[[\d\.]+\]", result.output)
# check updated version
result = clirunner.invoke(cli_pio, ["platform", "list", "--json-output"])

View File

@@ -82,6 +82,11 @@ def test_pkg_input_parser():
("package", None,
"hg+https://developer.mbed.org/users/user/code/package/")
],
[
"https://os.mbed.com/users/user/code/package/",
("package", None,
"hg+https://os.mbed.com/users/user/code/package/")
],
[
"https://github.com/user/package#v1.2.3",
("package", None, "git+https://github.com/user/package#v1.2.3")
@@ -186,7 +191,7 @@ def test_install_packages(isolated_pio_home, tmpdir):
"packages").listdir()]) == set(pkg_dirnames)
def test_get_package(isolated_pio_home):
def test_get_package():
tests = [
[("unknown", ), None],
[("1", ), None],

View File

@@ -12,11 +12,25 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import requests
from platformio import util
from platformio import exception, util
def test_ping_internet_ips():
for ip in util.PING_INTERNET_IPS:
requests.get("http://%s" % ip, allow_redirects=False, timeout=2)
def test_api_internet_offline(without_internet, isolated_pio_home):
with pytest.raises(exception.InternetIsOffline):
util.get_api_result("/stats")
def test_api_cache(monkeypatch, isolated_pio_home):
api_kwargs = {"url": "/stats", "cache_valid": "10s"}
result = util.get_api_result(**api_kwargs)
assert result and "boards" in result
monkeypatch.setattr(util, '_internet_on', lambda: False)
assert util.get_api_result(**api_kwargs) == result

View File

@@ -16,11 +16,11 @@ import pytest
import requests
def validate_response(req):
assert req.status_code == 200
assert int(req.headers['Content-Length']) > 0
assert req.headers['Content-Type'] in ("application/gzip",
"application/octet-stream")
def validate_response(r):
assert r.status_code == 200, r.url
assert int(r.headers['Content-Length']) > 0, r.url
assert r.headers['Content-Type'] in ("application/gzip",
"application/octet-stream")
def test_packages():

View File

@@ -23,6 +23,7 @@ deps =
yapf
pylint
pytest
pytest-xdist
commands = python --version
[testenv:docs]
@@ -57,6 +58,9 @@ deps =
pytest
commands =
{envpython} --version
{envpython} -c "print 'travis_fold:start:install_devplatforms'"
{envpython} scripts/install_devplatforms.py
{envpython} -c "print 'travis_fold:end:install_devplatforms'"
py.test -v --basetemp="{envtmpdir}" tests
[testenv:skipexamples]