mirror of
https://github.com/platformio/platformio-core.git
synced 2025-12-23 07:12:31 +01:00
Compare commits
859 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cef778731e | ||
|
|
37863df67e | ||
|
|
0e56a155f8 | ||
|
|
c6de3ebea0 | ||
|
|
a25c1155c2 | ||
|
|
42ee6fe96e | ||
|
|
a33fd6de27 | ||
|
|
a2830dd527 | ||
|
|
f46e1b7a3a | ||
|
|
bf59dda01b | ||
|
|
1dc15326c9 | ||
|
|
1a3720cfb9 | ||
|
|
5ea3f2bbc6 | ||
|
|
08103cfc59 | ||
|
|
78e88b6115 | ||
|
|
26bb74afd5 | ||
|
|
0587f5b964 | ||
|
|
e0ec4ff435 | ||
|
|
0677bcecb9 | ||
|
|
9023358d9e | ||
|
|
3edf7e6ca8 | ||
|
|
59114bbd86 | ||
|
|
fac45d37f8 | ||
|
|
d0c73af459 | ||
|
|
fcd1862f40 | ||
|
|
a6d42bedc1 | ||
|
|
04ebdf428b | ||
|
|
6a90388649 | ||
|
|
1b2e410f12 | ||
|
|
f7647438ef | ||
|
|
436a6bc521 | ||
|
|
8ea3909a64 | ||
|
|
ba0ab796c6 | ||
|
|
0e61020652 | ||
|
|
c51c35018d | ||
|
|
5505c6c0e3 | ||
|
|
d6a8360b29 | ||
|
|
0687ceb8a4 | ||
|
|
6d7854d113 | ||
|
|
aebe891895 | ||
|
|
e69d5e5873 | ||
|
|
6ea49910d5 | ||
|
|
b17e318373 | ||
|
|
b0aa4c6682 | ||
|
|
4d6c452d79 | ||
|
|
d06d5def72 | ||
|
|
aa3c943651 | ||
|
|
1369b10e76 | ||
|
|
8059e04499 | ||
|
|
71c4201487 | ||
|
|
425c1fb0a8 | ||
|
|
bbf829fe92 | ||
|
|
6f2779fe5d | ||
|
|
b51f2ae722 | ||
|
|
d5dd4d4b3a | ||
|
|
eaecad3d49 | ||
|
|
a8a0cbbbf3 | ||
|
|
94e21bf8a1 | ||
|
|
02c3d870ff | ||
|
|
91f0217d39 | ||
|
|
5e73348263 | ||
|
|
2f871db3ae | ||
|
|
28972b838b | ||
|
|
0473405b92 | ||
|
|
0d00d0534a | ||
|
|
fe210aadcf | ||
|
|
6fcc76e199 | ||
|
|
e927669632 | ||
|
|
24d2b94e79 | ||
|
|
f81b1089c1 | ||
|
|
fee748d384 | ||
|
|
d003dffc5a | ||
|
|
a300168658 | ||
|
|
0a638b7ea5 | ||
|
|
ffcf6b873a | ||
|
|
c69e80249d | ||
|
|
2a4b25705c | ||
|
|
412a1f78cd | ||
|
|
73dacd9418 | ||
|
|
caf5159002 | ||
|
|
d9b6842b6a | ||
|
|
affa54e5fc | ||
|
|
c3ac10fe29 | ||
|
|
0a907627be | ||
|
|
26dda104dd | ||
|
|
7c6fabaee2 | ||
|
|
dc07ea56d2 | ||
|
|
b68e5db46b | ||
|
|
fd9ca0cd15 | ||
|
|
6c47c7506e | ||
|
|
18d93dfcc9 | ||
|
|
900a4d463f | ||
|
|
4b24d6e3e4 | ||
|
|
2988724456 | ||
|
|
c79d5f0cf1 | ||
|
|
148b7dccfd | ||
|
|
83f64cebbd | ||
|
|
69de40c409 | ||
|
|
bf9552bd56 | ||
|
|
5afaa6d0ee | ||
|
|
d2c86ab71c | ||
|
|
6d9de80f12 | ||
|
|
bf77d70d82 | ||
|
|
8525fd6ae8 | ||
|
|
dfca7f0b68 | ||
|
|
500b3e08fe | ||
|
|
6739d5a570 | ||
|
|
f8dd90c9a9 | ||
|
|
8d459d86d3 | ||
|
|
f30bd18bdc | ||
|
|
0035f56e15 | ||
|
|
4123aa4c23 | ||
|
|
1bff3c6615 | ||
|
|
936b04e075 | ||
|
|
4d23ad03c3 | ||
|
|
a75823227d | ||
|
|
96d337388b | ||
|
|
c89793eab9 | ||
|
|
e27b40390d | ||
|
|
1d80914070 | ||
|
|
e61caa37a8 | ||
|
|
d07a1d265e | ||
|
|
54921c5dbd | ||
|
|
5cb2a970fa | ||
|
|
89b951e7d5 | ||
|
|
6daf387c90 | ||
|
|
2c3d8ce695 | ||
|
|
17fa5f77d5 | ||
|
|
c0a9ae5c70 | ||
|
|
4b7916c2af | ||
|
|
6d968a7093 | ||
|
|
634afdcf8a | ||
|
|
3ebeb1bab2 | ||
|
|
5ab564a6d0 | ||
|
|
04dc6230e7 | ||
|
|
51115c1254 | ||
|
|
40c8046546 | ||
|
|
fa761f9616 | ||
|
|
5fabadd059 | ||
|
|
38408c1e1f | ||
|
|
fbdfe31f17 | ||
|
|
bd8ba738cf | ||
|
|
8b8b6c3b9e | ||
|
|
bd3b29c304 | ||
|
|
1339924c2e | ||
|
|
46eab99888 | ||
|
|
461d71c2c7 | ||
|
|
1ccc526960 | ||
|
|
5338a9caa3 | ||
|
|
4ae302762a | ||
|
|
98513c9967 | ||
|
|
b6688db8b7 | ||
|
|
d5c98e4f27 | ||
|
|
b7e9bcb609 | ||
|
|
9de7297d38 | ||
|
|
97acf23a6d | ||
|
|
80718ebb95 | ||
|
|
643d118062 | ||
|
|
15b5a14995 | ||
|
|
68a3b3f9e7 | ||
|
|
5f1bd286c7 | ||
|
|
d18b4f12d0 | ||
|
|
d9010230a4 | ||
|
|
686d615639 | ||
|
|
d205370e9b | ||
|
|
ce66033190 | ||
|
|
bcff26d4d7 | ||
|
|
898d79956d | ||
|
|
522f814811 | ||
|
|
394d272324 | ||
|
|
84ce7db3e3 | ||
|
|
f873bd41f8 | ||
|
|
5c8c10e7d3 | ||
|
|
a504a13fa8 | ||
|
|
d09964a897 | ||
|
|
4416c12747 | ||
|
|
80a1b95887 | ||
|
|
9eb18ca72d | ||
|
|
37653d8446 | ||
|
|
e269c91d26 | ||
|
|
ac3236693f | ||
|
|
d0b3c5ee86 | ||
|
|
23a2022f04 | ||
|
|
c5177efd0b | ||
|
|
d51cd9c277 | ||
|
|
6257480d0d | ||
|
|
4af615a49c | ||
|
|
6186b425d4 | ||
|
|
c038074489 | ||
|
|
d25f1ddc21 | ||
|
|
5011e47709 | ||
|
|
33d16bfcf0 | ||
|
|
bae21f1cdd | ||
|
|
5f9fd9260e | ||
|
|
61db0f1d6a | ||
|
|
1598c8197e | ||
|
|
01db26f204 | ||
|
|
12876c5c2b | ||
|
|
5c60d922ca | ||
|
|
0570fc6c48 | ||
|
|
f3c8277572 | ||
|
|
1dbaed5beb | ||
|
|
19c1574993 | ||
|
|
346579b93c | ||
|
|
0ce2343836 | ||
|
|
d5e277b7cc | ||
|
|
3cc4af1723 | ||
|
|
8d05903bf3 | ||
|
|
7f845ab943 | ||
|
|
ddc8a353cb | ||
|
|
9ce9171a36 | ||
|
|
dec43bec9d | ||
|
|
99377130eb | ||
|
|
3df01405a1 | ||
|
|
3adcf66453 | ||
|
|
b88c262a9d | ||
|
|
5999bcee3f | ||
|
|
078b0af312 | ||
|
|
a0fb88e28a | ||
|
|
3cd4b005d8 | ||
|
|
0a523fc06c | ||
|
|
16864509af | ||
|
|
cb8af5add9 | ||
|
|
3f96dc1432 | ||
|
|
e1aa29cb36 | ||
|
|
6e87089ded | ||
|
|
a84195bb5a | ||
|
|
70a0bd72c0 | ||
|
|
fea7e97112 | ||
|
|
7beb332b31 | ||
|
|
7b2c1f27fc | ||
|
|
67f7b6cda3 | ||
|
|
4266cba53b | ||
|
|
19725fec04 | ||
|
|
a6e5a0c7f5 | ||
|
|
2baea815fe | ||
|
|
b38c57bcf9 | ||
|
|
e6d1805f0b | ||
|
|
9a95b0df56 | ||
|
|
70a5d32925 | ||
|
|
c2a549b0c2 | ||
|
|
0fda79a075 | ||
|
|
21e2ac6695 | ||
|
|
e7d75d1412 | ||
|
|
4386dc56ea | ||
|
|
a30b79c5fc | ||
|
|
f29a74042f | ||
|
|
c46643f0fd | ||
|
|
5fe4de626b | ||
|
|
774380c2ef | ||
|
|
8643f0454e | ||
|
|
f844d9cb47 | ||
|
|
f94fbb951a | ||
|
|
899de600e4 | ||
|
|
971049b41c | ||
|
|
aaf61082c1 | ||
|
|
b14abeff48 | ||
|
|
f26553b451 | ||
|
|
8b93ad00a2 | ||
|
|
5e1a931145 | ||
|
|
abfee8308e | ||
|
|
d2449762c2 | ||
|
|
59848c3115 | ||
|
|
834206ff20 | ||
|
|
ce4ed18ceb | ||
|
|
b710bbd80e | ||
|
|
446176bf5e | ||
|
|
0b2d780618 | ||
|
|
d9b0364aa8 | ||
|
|
76818448e2 | ||
|
|
131144ec34 | ||
|
|
a21d75b273 | ||
|
|
c79b3ff7f1 | ||
|
|
2b5ac57fd0 | ||
|
|
32d317d3cb | ||
|
|
71f606912a | ||
|
|
62b80c396b | ||
|
|
7687a0a929 | ||
|
|
f63fe1699b | ||
|
|
4f98a3fd42 | ||
|
|
693304590c | ||
|
|
947e31ca8d | ||
|
|
45d4b92678 | ||
|
|
07a2a49d93 | ||
|
|
7ddd22209f | ||
|
|
6cd4484be9 | ||
|
|
c235974eb6 | ||
|
|
1b4f945907 | ||
|
|
4fdd51e190 | ||
|
|
95b9ae9f24 | ||
|
|
94e580bf4e | ||
|
|
e4dca37874 | ||
|
|
7f607b742f | ||
|
|
2c0e0b2619 | ||
|
|
8e55c9e4d0 | ||
|
|
41ff1b0188 | ||
|
|
48c1aeae03 | ||
|
|
eab2fd91fd | ||
|
|
ba6d120cf4 | ||
|
|
e9df6166ee | ||
|
|
9bdc85fd52 | ||
|
|
fa48a6460f | ||
|
|
f07854879a | ||
|
|
9ca53c57f4 | ||
|
|
ee420cc35e | ||
|
|
d49d91269d | ||
|
|
a59efc2fc0 | ||
|
|
20f28383a0 | ||
|
|
d3c3491a91 | ||
|
|
e6a7cc2036 | ||
|
|
4d615416f3 | ||
|
|
137a5d1c42 | ||
|
|
65354e995d | ||
|
|
3032cade17 | ||
|
|
948a977fa5 | ||
|
|
c7d8b50474 | ||
|
|
f1da544279 | ||
|
|
40d1bb204c | ||
|
|
21a36f8ee9 | ||
|
|
be24c6ab4d | ||
|
|
3d96e584fb | ||
|
|
963eabc3f5 | ||
|
|
f63041a402 | ||
|
|
c084db1619 | ||
|
|
4edfb8f6cc | ||
|
|
6501c1f171 | ||
|
|
41ab97203a | ||
|
|
a51a03843d | ||
|
|
e9b8478942 | ||
|
|
29cf1c8596 | ||
|
|
e992e156bf | ||
|
|
cf13ec4035 | ||
|
|
c1d01dbe34 | ||
|
|
ff5da3c3cc | ||
|
|
7d9e10095e | ||
|
|
bb17630571 | ||
|
|
7746f7eeee | ||
|
|
e089c4a546 | ||
|
|
8f29d951cb | ||
|
|
f182a6dcae | ||
|
|
0c76116948 | ||
|
|
6b17183cff | ||
|
|
8cd35fb537 | ||
|
|
2d4421e8e5 | ||
|
|
760499c095 | ||
|
|
c141189883 | ||
|
|
00b162608e | ||
|
|
51cb87bc6f | ||
|
|
cae708f2d7 | ||
|
|
e33b0fe291 | ||
|
|
3a77bed0d4 | ||
|
|
06fe557a20 | ||
|
|
10a7367b33 | ||
|
|
9c1cc97776 | ||
|
|
dea6551841 | ||
|
|
4ca71e7df1 | ||
|
|
d7e2d05f60 | ||
|
|
d9e6111ac3 | ||
|
|
86e4641101 | ||
|
|
326eb4a681 | ||
|
|
d4af985eb8 | ||
|
|
0d904ad1cc | ||
|
|
b3ac567b53 | ||
|
|
03aec79cc1 | ||
|
|
9a6d148bdc | ||
|
|
a9288e5a5b | ||
|
|
8280fd557b | ||
|
|
15a1cbf95a | ||
|
|
775797b4e5 | ||
|
|
e37d34b92f | ||
|
|
b562541f20 | ||
|
|
736a1404b4 | ||
|
|
9639626ab3 | ||
|
|
4c8df44a5a | ||
|
|
3f52a6d5ba | ||
|
|
237d55208c | ||
|
|
b99494671a | ||
|
|
5a72e3f2a1 | ||
|
|
1ce913ea74 | ||
|
|
9911a04232 | ||
|
|
f1549e1f0e | ||
|
|
4659a44bd6 | ||
|
|
e39d4f5c32 | ||
|
|
fb9fe8c77c | ||
|
|
9778778830 | ||
|
|
ab914e1566 | ||
|
|
d2c2171ef9 | ||
|
|
856798488b | ||
|
|
00ba88911f | ||
|
|
e0b9d080fa | ||
|
|
42c22758bb | ||
|
|
522b42c2a9 | ||
|
|
5a707a4849 | ||
|
|
1bed2650f3 | ||
|
|
65f30dd7b4 | ||
|
|
59b27d5d0a | ||
|
|
6575ddab26 | ||
|
|
dc4a5df2af | ||
|
|
2e4c9411af | ||
|
|
e477d1fbcf | ||
|
|
6059e0dce6 | ||
|
|
234bb75b9b | ||
|
|
38dfa40e32 | ||
|
|
5add5f1cfb | ||
|
|
f1b809f8dd | ||
|
|
47aa63fc04 | ||
|
|
08000a6f62 | ||
|
|
31d4706abd | ||
|
|
e23ca2c109 | ||
|
|
17efa89047 | ||
|
|
527efe3359 | ||
|
|
5aed0efd61 | ||
|
|
27ac17e8c3 | ||
|
|
639c086728 | ||
|
|
d2c545eb27 | ||
|
|
035ab14202 | ||
|
|
4504080027 | ||
|
|
367e4d663c | ||
|
|
28bca48eca | ||
|
|
eb57e14ac1 | ||
|
|
69065d8bd6 | ||
|
|
3cc996d89f | ||
|
|
d7d981f522 | ||
|
|
eedc1c3ccd | ||
|
|
db0bbcc043 | ||
|
|
63075c9607 | ||
|
|
b594c11718 | ||
|
|
b6ccda3568 | ||
|
|
35f96a534a | ||
|
|
7e8349d45e | ||
|
|
88d06b4437 | ||
|
|
3019e35724 | ||
|
|
38bb2c61c1 | ||
|
|
c7949ecd07 | ||
|
|
d627a42268 | ||
|
|
2b467f3fee | ||
|
|
f058b8f18f | ||
|
|
a750b06fc8 | ||
|
|
13430aa628 | ||
|
|
286a53991c | ||
|
|
b92a8467c9 | ||
|
|
56cd55ba7d | ||
|
|
a092f87c50 | ||
|
|
19b5285d50 | ||
|
|
d5d95092c4 | ||
|
|
02937216b0 | ||
|
|
e02d7528ad | ||
|
|
68e3f9dc00 | ||
|
|
2ae41c8434 | ||
|
|
aa2bc4a63b | ||
|
|
74218f4f93 | ||
|
|
a60c57ac58 | ||
|
|
fabaadec60 | ||
|
|
7f697961ec | ||
|
|
9334f31ff2 | ||
|
|
579de32d4e | ||
|
|
c3702391ea | ||
|
|
826418a443 | ||
|
|
4dfa885a85 | ||
|
|
9f4dde4b5e | ||
|
|
3748219cac | ||
|
|
4b55767fb9 | ||
|
|
5aef182652 | ||
|
|
6db47cec2b | ||
|
|
6f8b9d70bc | ||
|
|
d8cbe99f2c | ||
|
|
a690b8c085 | ||
|
|
b874359482 | ||
|
|
3a18e668c2 | ||
|
|
3ca9527da4 | ||
|
|
f539513376 | ||
|
|
afdfaeec68 | ||
|
|
676c87d081 | ||
|
|
db3b0499c9 | ||
|
|
98032ec548 | ||
|
|
8ef6ea8053 | ||
|
|
d87ee0b286 | ||
|
|
6f01f10f59 | ||
|
|
59a0d2b618 | ||
|
|
16df5474e4 | ||
|
|
33ea6ef123 | ||
|
|
a485e563f0 | ||
|
|
cf35f9dbf8 | ||
|
|
710b150fcd | ||
|
|
13731b4461 | ||
|
|
3d52710935 | ||
|
|
d475f44e49 | ||
|
|
7574798a3a | ||
|
|
9ef8d4cfe0 | ||
|
|
b42d0efa73 | ||
|
|
4a17a9b5b3 | ||
|
|
d3909bdfa2 | ||
|
|
a2b0b2893b | ||
|
|
9d2499ab98 | ||
|
|
579a973512 | ||
|
|
b861e9c192 | ||
|
|
375006ee65 | ||
|
|
23af9c9027 | ||
|
|
7322df26ad | ||
|
|
32bb9c9d83 | ||
|
|
b22ca10f8c | ||
|
|
95beb03aad | ||
|
|
f65ab58c88 | ||
|
|
c06a018d88 | ||
|
|
7789e3bc62 | ||
|
|
1287e51bf8 | ||
|
|
151823f80e | ||
|
|
09d58d0d49 | ||
|
|
0a6fb68840 | ||
|
|
38fb5b2234 | ||
|
|
ab6a323aca | ||
|
|
50ed828e7a | ||
|
|
692af90161 | ||
|
|
543a1dddae | ||
|
|
fce84b5a48 | ||
|
|
67a6f66a35 | ||
|
|
fdbebb178c | ||
|
|
0747fe9dea | ||
|
|
331cd0aa0d | ||
|
|
8b74b12990 | ||
|
|
7a0c1e13f3 | ||
|
|
e94d758131 | ||
|
|
080369f597 | ||
|
|
729178731c | ||
|
|
5c278b54f7 | ||
|
|
2007491be9 | ||
|
|
e96078b4e3 | ||
|
|
118f22bed3 | ||
|
|
2134022565 | ||
|
|
cf2a2395e5 | ||
|
|
8947b63e41 | ||
|
|
fc8bffdd81 | ||
|
|
75105e18ba | ||
|
|
3507290a20 | ||
|
|
7cc4e8ce15 | ||
|
|
08dc5dec89 | ||
|
|
d92349c8f7 | ||
|
|
92289d373b | ||
|
|
4b9e8f0ba4 | ||
|
|
bfee896378 | ||
|
|
e4c112608b | ||
|
|
04eb531ac2 | ||
|
|
8e3020c0f8 | ||
|
|
51acd02421 | ||
|
|
8a1b94b48c | ||
|
|
e11013189b | ||
|
|
98deefc4f5 | ||
|
|
058a5e854d | ||
|
|
7b998c8cda | ||
|
|
98a1fd79b6 | ||
|
|
e344194f86 | ||
|
|
05b656e6b0 | ||
|
|
9c30472777 | ||
|
|
016caa731d | ||
|
|
5b0befef45 | ||
|
|
4b588a589d | ||
|
|
1598b0632a | ||
|
|
a32c67a0ce | ||
|
|
1183105557 | ||
|
|
d1e4f22e7f | ||
|
|
8a5b3a90cb | ||
|
|
2b53ecb111 | ||
|
|
0159b1cf7f | ||
|
|
d9dd83e327 | ||
|
|
05fe52bda9 | ||
|
|
6294580e25 | ||
|
|
69d01c4bc1 | ||
|
|
d4e553fb5a | ||
|
|
ff8fefb797 | ||
|
|
b77fb79cd6 | ||
|
|
00b173f13f | ||
|
|
13ff30788e | ||
|
|
842db2643d | ||
|
|
aee0c7b9c2 | ||
|
|
f67cc1770d | ||
|
|
159cd7c073 | ||
|
|
e83a11d02a | ||
|
|
ba2275fbba | ||
|
|
59a3a7dd55 | ||
|
|
0a7d6fb814 | ||
|
|
94bf067639 | ||
|
|
4cd13b9d47 | ||
|
|
34325dbc4c | ||
|
|
ec9fbca181 | ||
|
|
e9f2334e59 | ||
|
|
c10b8633ab | ||
|
|
18a8b05214 | ||
|
|
22ceae0149 | ||
|
|
e6fa8654ad | ||
|
|
24f97ef768 | ||
|
|
f0a91df2cf | ||
|
|
a3e3c30d0d | ||
|
|
421694ce0c | ||
|
|
3c4d978c1c | ||
|
|
e5fc18fddb | ||
|
|
535048c420 | ||
|
|
b7ac59066f | ||
|
|
4b2a63db1f | ||
|
|
a477e8cb23 | ||
|
|
7108b2fdd4 | ||
|
|
e6e629d2c5 | ||
|
|
f54d32843a | ||
|
|
ce47b6f69f | ||
|
|
4f0c60edfa | ||
|
|
6caa7f30ac | ||
|
|
b43f243f6a | ||
|
|
abbe30ef97 | ||
|
|
8d1ff91af1 | ||
|
|
78c383eb68 | ||
|
|
476a878733 | ||
|
|
d109e4756d | ||
|
|
d448a0ec5c | ||
|
|
d009b997bc | ||
|
|
9258763491 | ||
|
|
79e6df7263 | ||
|
|
4ff013c0fe | ||
|
|
71cdc9fe78 | ||
|
|
e3d17d132a | ||
|
|
70eedfbeec | ||
|
|
c3d598f488 | ||
|
|
6d5dc60b47 | ||
|
|
129146e82e | ||
|
|
df923bf17e | ||
|
|
8f19dd50fe | ||
|
|
ab1d1f248c | ||
|
|
617f51b9ea | ||
|
|
41432d4075 | ||
|
|
ae964fa729 | ||
|
|
9a5f9843b9 | ||
|
|
44175f87b1 | ||
|
|
2f2cfc2d84 | ||
|
|
d9e908fceb | ||
|
|
98aa47c885 | ||
|
|
c777b0095d | ||
|
|
9191ea97fe | ||
|
|
fedf3162f1 | ||
|
|
89fc77d87a | ||
|
|
2004c9b079 | ||
|
|
5aaa9cf205 | ||
|
|
9b15ec417b | ||
|
|
42540d4207 | ||
|
|
72bfa4a1e2 | ||
|
|
7b8342cd9a | ||
|
|
a206b2e4fd | ||
|
|
6e8ce56206 | ||
|
|
7c2c0ba1aa | ||
|
|
f130b5bfb6 | ||
|
|
bf23d85005 | ||
|
|
903b41b336 | ||
|
|
ab24ca4ff6 | ||
|
|
fd8b603910 | ||
|
|
e4462d7546 | ||
|
|
aa796959c9 | ||
|
|
ff3ce2d69e | ||
|
|
ff59dcefe0 | ||
|
|
3f2f79ade4 | ||
|
|
bc380714bd | ||
|
|
2ba41cddc4 | ||
|
|
4a14cc686c | ||
|
|
734cb5c7aa | ||
|
|
da89f57046 | ||
|
|
4a3b616b0f | ||
|
|
a14f2d291e | ||
|
|
72d260c295 | ||
|
|
e1578dabac | ||
|
|
f2c4ba1895 | ||
|
|
695a850979 | ||
|
|
1d7d518ec5 | ||
|
|
44a926b30a | ||
|
|
735cfbf850 | ||
|
|
6b6c60e82c | ||
|
|
cb7717eaf6 | ||
|
|
28a76eb389 | ||
|
|
0fdfb273c6 | ||
|
|
eced1c4c2a | ||
|
|
7d6192b069 | ||
|
|
23f0ffdfeb | ||
|
|
f7ac71d48e | ||
|
|
9af715e872 | ||
|
|
dae50a32c0 | ||
|
|
ca37190da4 | ||
|
|
7c5c5b5f70 | ||
|
|
83ccf96f36 | ||
|
|
c1f4b729ea | ||
|
|
1c8ac97073 | ||
|
|
fd88a249b4 | ||
|
|
d8329a6868 | ||
|
|
5c48233259 | ||
|
|
5efe0e4f8c | ||
|
|
8f88939aa0 | ||
|
|
a58535d95c | ||
|
|
a4173f5de1 | ||
|
|
dc3973b046 | ||
|
|
7a5af4b180 | ||
|
|
25b562e1c1 | ||
|
|
6dada01e70 | ||
|
|
3956dae01e | ||
|
|
19711d75e0 | ||
|
|
5b1b05cd09 | ||
|
|
3e0feeabb4 | ||
|
|
e21ac05e71 | ||
|
|
4adc73ebe2 | ||
|
|
357e70e5bb | ||
|
|
ca3567df1e | ||
|
|
9bd033e288 | ||
|
|
7564e00fc4 | ||
|
|
f1a8add795 | ||
|
|
d6ca30a920 | ||
|
|
c8f6907d02 | ||
|
|
369868624e | ||
|
|
dfecc04901 | ||
|
|
e9fe2856ec | ||
|
|
0ba9b341cd | ||
|
|
9cff2d3206 | ||
|
|
ab8497e7ce | ||
|
|
f0cd122952 | ||
|
|
b83acf4297 | ||
|
|
89d403879e | ||
|
|
b7ad64226e | ||
|
|
2725d8da8b | ||
|
|
08759700b6 | ||
|
|
7cac351d25 | ||
|
|
f62bde0e38 | ||
|
|
a9577bc0ba | ||
|
|
ee69c13b2d | ||
|
|
3c6f57ac5c | ||
|
|
4d48c365f5 | ||
|
|
a3cda59d70 | ||
|
|
22b5e4e5c0 | ||
|
|
19844c89c1 | ||
|
|
c055ed4850 | ||
|
|
6f905e319f | ||
|
|
389783adae | ||
|
|
46a62de14c | ||
|
|
38c74b3f78 | ||
|
|
92fc308590 | ||
|
|
0f9c213796 | ||
|
|
a6831d9783 | ||
|
|
2ba7c47603 | ||
|
|
786d505ecb | ||
|
|
00c0eaed8a | ||
|
|
46c904e67d | ||
|
|
f9fde5d627 | ||
|
|
75754a4750 | ||
|
|
a584ac1da2 | ||
|
|
2ff88837ec | ||
|
|
4528ca0365 | ||
|
|
bfc94d36e3 | ||
|
|
2fb8128791 | ||
|
|
20c1ce40d3 | ||
|
|
ecaa9d90b3 | ||
|
|
c0b069c920 | ||
|
|
551f0c1514 | ||
|
|
7db04b1c3f | ||
|
|
32dbf22d44 | ||
|
|
3243a84dba | ||
|
|
f465befa68 | ||
|
|
f4b4f5c434 | ||
|
|
ffc94a88fe | ||
|
|
fb29c9c0f6 | ||
|
|
df437995df | ||
|
|
e4440ed94c | ||
|
|
b213a302e3 | ||
|
|
d50dfe19d9 | ||
|
|
9ba5dc0a60 | ||
|
|
5011c3e21c | ||
|
|
e48e15b014 | ||
|
|
357c932a88 | ||
|
|
2f07a58e4f | ||
|
|
4a2594c12e | ||
|
|
8cda6db02d | ||
|
|
f7053928f0 | ||
|
|
e22335984f | ||
|
|
6a1a1956c8 | ||
|
|
98852caefa | ||
|
|
e399c6b363 | ||
|
|
cb2c3b1b63 | ||
|
|
19003ea51b | ||
|
|
29064b6c63 | ||
|
|
ba352454ed | ||
|
|
5ee194b2b9 | ||
|
|
9d566d8905 | ||
|
|
b310c57136 | ||
|
|
9aa5f16b49 | ||
|
|
4fac523811 | ||
|
|
2bb22a86d7 | ||
|
|
39aaae303f | ||
|
|
1310b7b07b | ||
|
|
18f6f23271 | ||
|
|
366efacd81 | ||
|
|
7be1af4241 | ||
|
|
d0bc40bc24 | ||
|
|
73b1d9ccd5 | ||
|
|
94c27ae30f | ||
|
|
b476e298d3 | ||
|
|
c9fa2206ef | ||
|
|
b1caaa2208 | ||
|
|
f46072f769 | ||
|
|
7de4d6aeef | ||
|
|
31f14274af | ||
|
|
50c568c232 | ||
|
|
3bcc3e07ae | ||
|
|
2ae169d210 | ||
|
|
1c68409a08 | ||
|
|
f981916f1d | ||
|
|
0a9031e448 | ||
|
|
2d1daa756d | ||
|
|
6b6860196a | ||
|
|
ccb63a9ecc | ||
|
|
3ce62fbafe | ||
|
|
b77160d363 | ||
|
|
6a04f52620 | ||
|
|
aa28beddd8 | ||
|
|
d0cc3a045e | ||
|
|
02efe4f7f3 | ||
|
|
2c0ca3e437 | ||
|
|
aa8de4ff4b | ||
|
|
59fe190f20 | ||
|
|
e0fc44aa42 | ||
|
|
e7b5a14e11 | ||
|
|
0710c094e7 | ||
|
|
1410dd093a | ||
|
|
d1362c3751 | ||
|
|
2299383b03 | ||
|
|
622e4033c1 | ||
|
|
ec9a2b02ea | ||
|
|
275648a882 | ||
|
|
5214b32ee3 | ||
|
|
c1c2be0b58 | ||
|
|
44fc500c93 | ||
|
|
b6d2e1b243 | ||
|
|
d54327f1a9 | ||
|
|
0f4ab5b50b | ||
|
|
ca34da51aa | ||
|
|
f937eabc1a | ||
|
|
e019341e59 | ||
|
|
50b2bc07dc | ||
|
|
8f7206b186 | ||
|
|
1461953341 | ||
|
|
cd3245960b | ||
|
|
580c0601cf | ||
|
|
979a6a80f0 | ||
|
|
6f9fac5663 | ||
|
|
85730619f4 | ||
|
|
61374f15f1 | ||
|
|
ad52f618cf | ||
|
|
bbb32607ed | ||
|
|
669ef3cc93 | ||
|
|
d47022b8c3 | ||
|
|
c20cd1b464 | ||
|
|
3161e5f606 | ||
|
|
233d48fac0 | ||
|
|
218a1dccf6 | ||
|
|
02bad10652 | ||
|
|
7495160374 | ||
|
|
3663dc3470 | ||
|
|
d2b34d42f7 | ||
|
|
b78a151706 | ||
|
|
6a49df7dfe |
@@ -1,13 +1,20 @@
|
||||
build: off
|
||||
environment:
|
||||
|
||||
platform:
|
||||
- x64
|
||||
|
||||
environment:
|
||||
matrix:
|
||||
- TOXENV: "py27"
|
||||
PLATFORMIO_BUILD_CACHE_DIR: C:/Temp/PIO_Build_Cache_P2_{build}
|
||||
|
||||
- TOXENV: "py36"
|
||||
PLATFORMIO_BUILD_CACHE_DIR: C:/Temp/PIO_Build_Cache_P3_{build}
|
||||
|
||||
install:
|
||||
- cmd: git submodule update --init --recursive
|
||||
- cmd: SET PATH=%PATH%;C:\Python27\Scripts;C:\MinGW\bin
|
||||
- cmd: pip install tox
|
||||
- cmd: SET PATH=C:\MinGW\bin;%PATH%
|
||||
- cmd: pip install --force-reinstall tox
|
||||
|
||||
test_script:
|
||||
- cmd: tox
|
||||
|
||||
1
.github/FUNDING.yml
vendored
Normal file
1
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1 @@
|
||||
custom: https://platformio.org/donate
|
||||
28
.github/ISSUE_TEMPLATE.md
vendored
28
.github/ISSUE_TEMPLATE.md
vendored
@@ -1,22 +1,28 @@
|
||||
What kind of issue is this?
|
||||
|
||||
- [ ] Question. This issue tracker is not the place for questions. If you want to ask how to do
|
||||
something, or to understand why something isn't working the way you expect it to, use
|
||||
our Community Forums https://community.platformio.org
|
||||
- [ ] **Question**.
|
||||
This issue tracker is not the place for questions. If you want to ask how to do something,
|
||||
or to understand why something isn't working the way you expect it to,
|
||||
use [Community Forums](https://community.platformio.org) or [Premium Support](https://platformio.org/support)
|
||||
|
||||
- [ ] PlatformIO IDE. All issues related to PlatformIO IDE should be reported to appropriate repository
|
||||
https://github.com/platformio/platformio-atom-ide/issues
|
||||
- [ ] **PlatformIO IDE**.
|
||||
All issues related to PlatformIO IDE should be reported to appropriate repository:
|
||||
[PlatformIO IDE for Atom](https://github.com/platformio/platformio-atom-ide/issues) or
|
||||
[PlatformIO IDE for VSCode](https://github.com/platformio/platformio-vscode-ide/issues)
|
||||
|
||||
- [ ] Development Platform or Board. All issues related to Development Platforms or Embedded Boards
|
||||
should be reported to appropriate repository.
|
||||
See full list with repositories and search for "platform-xxx" repository related to your hardware
|
||||
https://github.com/platformio?query=platform-
|
||||
- [ ] **Development Platform or Board**.
|
||||
All issues (building, uploading, adding new boards, etc.) related to PlatformIO development platforms
|
||||
should be reported to appropriate repository related to your hardware
|
||||
https://github.com/topics/platformio-platform
|
||||
|
||||
- [ ] Feature Request. Start by telling us what problem you’re trying to solve. Often a solution
|
||||
- [ ] **Feature Request**.
|
||||
Start by telling us what problem you’re trying to solve. Often a solution
|
||||
already exists! Don’t send pull requests to implement new features without first getting our
|
||||
support. Sometimes we leave features out on purpose to keep the project small.
|
||||
|
||||
- [ ] PlatformIO Core. If you’ve found a bug, please provide an information below.
|
||||
- [ ] **PlatformIO Core**.
|
||||
If you’ve found a bug, please provide an information below.
|
||||
|
||||
|
||||
*You can erase any parts of this template not applicable to your Issue.*
|
||||
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -9,3 +9,4 @@ build
|
||||
coverage.xml
|
||||
.coverage
|
||||
htmlcov
|
||||
.pytest_cache
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
[settings]
|
||||
line_length=79
|
||||
known_third_party=bottle,click,lockfile,python-dateutil,pytest,requests,SCons,semantic_version,serial
|
||||
known_third_party=bottle,click,pytest,requests,SCons,semantic_version,serial,twisted,autobahn,jsonrpc,tabulate
|
||||
|
||||
33
.pylintrc
33
.pylintrc
@@ -1,23 +1,12 @@
|
||||
[MESSAGES CONTROL]
|
||||
|
||||
# Only show warnings with the listed confidence levels. Leave empty to show
|
||||
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
|
||||
confidence=
|
||||
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
# multiple time. See also the "--disable" option for examples.
|
||||
#enable=
|
||||
|
||||
# Disable the message, report, category or checker with the given id(s). You
|
||||
# can either give multiple identifiers separated by comma (,) or put this
|
||||
# option multiple times (only on the command line, not in the configuration
|
||||
# file where it should appear only once).You can also use "--disable=all" to
|
||||
# disable everything first and then reenable specific checks. For example, if
|
||||
# you want to run only the similarities checker, you can use "--disable=all
|
||||
# --enable=similarities". If you want to run only the classes checker, but have
|
||||
# no Warning level messages displayed, use"--disable=all --enable=classes
|
||||
# --disable=W"
|
||||
# disable=import-star-module-level,old-octal-literal,oct-method,print-statement,unpacking-in-except,parameter-unpacking,backtick,old-raise-syntax,old-ne-operator,long-suffix,dict-view-method,dict-iter-method,metaclass-assignment,next-method-called,raising-string,indexing-exception,raw_input-builtin,long-builtin,file-builtin,execfile-builtin,coerce-builtin,cmp-builtin,buffer-builtin,basestring-builtin,apply-builtin,filter-builtin-not-iterating,using-cmp-argument,useless-suppression,range-builtin-not-iterating,suppressed-message,no-absolute-import,old-division,cmp-method,reload-builtin,zip-builtin-not-iterating,intern-builtin,unichr-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,input-builtin,round-builtin,hex-method,nonzero-method,map-builtin-not-iterating
|
||||
|
||||
disable=locally-disabled,missing-docstring,invalid-name,too-few-public-methods,redefined-variable-type,import-error,similarities,unsupported-membership-test,unsubscriptable-object,ungrouped-imports,cyclic-import
|
||||
disable=
|
||||
missing-docstring,
|
||||
ungrouped-imports,
|
||||
invalid-name,
|
||||
cyclic-import,
|
||||
duplicate-code,
|
||||
superfluous-parens,
|
||||
too-few-public-methods,
|
||||
useless-object-inheritance,
|
||||
useless-import-alias,
|
||||
fixme
|
||||
|
||||
18
.travis.yml
18
.travis.yml
@@ -6,14 +6,14 @@ matrix:
|
||||
sudo: false
|
||||
python: 2.7
|
||||
env: TOX_ENV=docs
|
||||
- os: linux
|
||||
sudo: false
|
||||
python: 2.7
|
||||
env: TOX_ENV=lint
|
||||
- os: linux
|
||||
sudo: required
|
||||
python: 2.7
|
||||
env: TOX_ENV=py27
|
||||
env: TOX_ENV=py27 PLATFORMIO_BUILD_CACHE_DIR=$(mktemp -d)
|
||||
- os: linux
|
||||
sudo: required
|
||||
python: 3.6
|
||||
env: TOX_ENV=py36 PLATFORMIO_BUILD_CACHE_DIR=$(mktemp -d)
|
||||
- os: osx
|
||||
language: generic
|
||||
env: TOX_ENV=skipexamples
|
||||
@@ -21,18 +21,14 @@ matrix:
|
||||
install:
|
||||
- git submodule update --init --recursive
|
||||
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then curl -fsSL https://bootstrap.pypa.io/get-pip.py | sudo python; fi
|
||||
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then sudo pip install tox; else pip install -U tox; fi
|
||||
- pip install -U tox
|
||||
|
||||
# ChipKIT issue: install 32-bit support for GCC PIC32
|
||||
- if [[ "$TOX_ENV" == "py27" ]] && [[ "$TRAVIS_OS_NAME" == "linux" ]]; then sudo apt-get install libc6-i386; fi
|
||||
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then sudo apt-get install libc6-i386; fi
|
||||
|
||||
script:
|
||||
- tox -e $TOX_ENV
|
||||
|
||||
after_success:
|
||||
- if [[ "$TOX_ENV" == "py27" ]] && [[ "$TRAVIS_OS_NAME" == "linux" ]]; then tox -e coverage; fi
|
||||
- if [[ "$TOX_ENV" == "py27" ]] && [[ "$TRAVIS_OS_NAME" == "linux" ]]; then bash <(curl -s https://codecov.io/bash); fi
|
||||
|
||||
notifications:
|
||||
email: false
|
||||
|
||||
|
||||
15
.vscode/settings.json
vendored
15
.vscode/settings.json
vendored
@@ -1,15 +0,0 @@
|
||||
{
|
||||
"python.pythonPath": "${workspaceRoot}/.tox/develop/bin/python",
|
||||
"python.formatting.provider": "yapf",
|
||||
"files.exclude": {
|
||||
"**/*.pyc": true,
|
||||
"*.egg-info": true,
|
||||
".cache": true,
|
||||
"build": true,
|
||||
"dist": true
|
||||
},
|
||||
"editor.rulers": [79],
|
||||
"restructuredtext.builtDocumentationPath": "${workspaceRoot}/docs/_build/html",
|
||||
"restructuredtext.confPath": "${workspaceRoot}/docs",
|
||||
"restructuredtext.linter.executablePath": "${workspaceRoot}/.tox/docs/bin/restructuredtext-lint"
|
||||
}
|
||||
@@ -4,18 +4,18 @@ Contributing
|
||||
To get started, <a href="https://www.clahub.com/agreements/platformio/platformio-core">sign the Contributor License Agreement</a>.
|
||||
|
||||
1. Fork the repository on GitHub.
|
||||
2. Make a branch off of ``develop``
|
||||
3. Run ``pip install tox``
|
||||
4. Go to the root of project where is located ``tox.ini`` and run ``tox -e develop``
|
||||
2. Clone repository `git clone --recursive https://github.com/YourGithubUsername/platformio-core.git`
|
||||
3. Run `pip install tox`
|
||||
4. Go to the root of project where is located `tox.ini` and run `tox -e py27`
|
||||
5. Activate current development environment:
|
||||
|
||||
* Windows: ``.tox\develop\Scripts\activate``
|
||||
* Bash/ZSH: ``source .tox/develop/bin/activate``
|
||||
* Fish: ``source .tox/bin/activate.fish``
|
||||
* Windows: `.tox\py27\Scripts\activate`
|
||||
* Bash/ZSH: `source .tox/py27/bin/activate`
|
||||
* Fish: `source .tox/py27/bin/activate.fish`
|
||||
|
||||
6. Make changes to code, documentation, etc.
|
||||
7. Lint source code ``tox -e lint``
|
||||
8. Run the tests ``tox -e py27``
|
||||
9. Build documentation ``tox -e docs`` (creates a directory _build under docs where you can find the html)
|
||||
7. Lint source code `make lint`
|
||||
8. Run the tests `make test`
|
||||
9. Build documentation `tox -e docs` (creates a directory _build under docs where you can find the html)
|
||||
10. Commit changes to your forked repository
|
||||
11. Submit a Pull Request on GitHub.
|
||||
|
||||
750
HISTORY.rst
750
HISTORY.rst
File diff suppressed because it is too large
Load Diff
13
Makefile
13
Makefile
@@ -1,4 +1,3 @@
|
||||
|
||||
lint:
|
||||
pylint --rcfile=./.pylintrc ./platformio
|
||||
|
||||
@@ -10,7 +9,7 @@ yapf:
|
||||
yapf --recursive --in-place platformio/
|
||||
|
||||
test:
|
||||
py.test -v -s -n 3 --dist=loadscope tests --ignore tests/test_examples.py --ignore tests/test_pkgmanifest.py
|
||||
py.test --verbose --capture=no --exitfirst -n 3 --dist=loadscope tests --ignore tests/test_examples.py --ignore tests/test_pkgmanifest.py
|
||||
|
||||
before-commit: isort yapf lint test
|
||||
|
||||
@@ -23,4 +22,12 @@ clean: clean-docs
|
||||
rm -rf .cache
|
||||
rm -rf build
|
||||
rm -rf htmlcov
|
||||
rm -f .coverage
|
||||
rm -f .coverage
|
||||
|
||||
profile:
|
||||
# Usage $ > make PIOARGS="boards" profile
|
||||
python -m cProfile -o .tox/.tmp/cprofile.prof $(shell which platformio) ${PIOARGS}
|
||||
snakeviz .tox/.tmp/cprofile.prof
|
||||
|
||||
publish:
|
||||
python setup.py sdist upload
|
||||
|
||||
73
README.rst
73
README.rst
@@ -7,31 +7,25 @@ PlatformIO
|
||||
.. image:: https://ci.appveyor.com/api/projects/status/unnpw0n3c5k14btn/branch/develop?svg=true
|
||||
:target: https://ci.appveyor.com/project/ivankravets/platformio-core
|
||||
:alt: AppVeyor.CI Build Status
|
||||
.. image:: https://requires.io/github/platformio/platformio-core/requirements.svg?branch=develop
|
||||
:target: https://requires.io/github/platformio/platformio-core/requirements/?branch=develop
|
||||
:alt: Requirements Status
|
||||
.. image:: https://img.shields.io/pypi/v/platformio.svg
|
||||
:target: https://pypi.python.org/pypi/platformio/
|
||||
:alt: Latest Version
|
||||
.. image:: https://img.shields.io/pypi/l/platformio.svg
|
||||
.. image:: https://img.shields.io/badge/license-Apache%202.0-blue.svg
|
||||
:target: https://pypi.python.org/pypi/platformio/
|
||||
:alt: License
|
||||
.. image:: https://img.shields.io/PlatformIO/Community.png
|
||||
.. image:: https://img.shields.io/badge/PlatformIO-Community-orange.svg
|
||||
:alt: Community Forums
|
||||
:target: https://community.platformio.org?utm_source=github&utm_medium=core
|
||||
.. image:: https://img.shields.io/PlatformIO/Plus.png?color=orange
|
||||
:alt: PlatformIO Plus: Professional solutions for an awesome open source PlatformIO ecosystem
|
||||
:target: https://platformio.org/pricing?utm_source=github&utm_medium=core
|
||||
|
||||
**Quick Links:** `Home Page <https://platformio.org?utm_source=github&utm_medium=core>`_ |
|
||||
`PlatformIO Plus <https://platformio.org/pricing?utm_source=github&utm_medium=core>`_ |
|
||||
**Quick Links:** `Web <https://platformio.org?utm_source=github&utm_medium=core>`_ |
|
||||
`PlatformIO IDE <https://platformio.org/platformio-ide?utm_source=github&utm_medium=core>`_ |
|
||||
`Project Examples <https://github.com/platformio/platformio-examples/>`_ |
|
||||
`Docs <http://docs.platformio.org?utm_source=github&utm_medium=core>`_ |
|
||||
`Project Examples <https://github.com/platformio/platformio-examples/>`__ |
|
||||
`Docs <https://docs.platformio.org?utm_source=github&utm_medium=core>`_ |
|
||||
`Donate <https://platformio.org/donate?utm_source=github&utm_medium=core>`_ |
|
||||
`Contact Us <https://platformio.org/contact?utm_source=github&utm_medium=core>`_
|
||||
|
||||
**Social:** `Twitter <https://twitter.com/PlatformIO_Org>`_ |
|
||||
`LinkedIn <https://www.linkedin.com/company/platformio/>`_ |
|
||||
`Facebook <https://www.facebook.com/platformio>`_ |
|
||||
`Hackaday <https://hackaday.io/project/7980-platformio>`_ |
|
||||
`Bintray <https://bintray.com/platformio>`_ |
|
||||
@@ -47,18 +41,27 @@ firmware updates.
|
||||
Get Started
|
||||
-----------
|
||||
|
||||
* `What is PlatformIO? <http://docs.platformio.org/en/latest/what-is-platformio.html?utm_source=github&utm_medium=core>`_
|
||||
* `What is PlatformIO? <https://docs.platformio.org/en/latest/what-is-platformio.html?utm_source=github&utm_medium=core>`_
|
||||
|
||||
Products
|
||||
--------
|
||||
Open Source
|
||||
-----------
|
||||
|
||||
* `PlatformIO IDE <https://platformio.org/platformio-ide?utm_source=github&utm_medium=core>`_
|
||||
* `PlatformIO Core (CLI) <http://docs.platformio.org/en/latest/core.html?utm_source=github&utm_medium=core>`_
|
||||
* `PIO Remote™ <http://docs.platformio.org/en/latest/plus/pio-remote.html?utm_source=github&utm_medium=core>`_
|
||||
* `PIO Unified Debugger <http://docs.platformio.org/en/latest/plus/debugging.html?utm_source=github&utm_medium=core>`_
|
||||
* `PIO Unit Testing <http://docs.platformio.org/en/latest/plus/unit-testing.html?utm_source=github&utm_medium=core>`_
|
||||
* `PIO Delivery™ <https://platformio.org/pricing?utm_source=github&utm_medium=core#solution-pio-delivery>`_
|
||||
* `Cloud Builder <https://platformio.org/pricing?utm_source=github&utm_medium=core#solution-cloud-builder>`_
|
||||
* `PlatformIO Core (CLI) <https://docs.platformio.org/en/latest/core.html?utm_source=github&utm_medium=core>`_
|
||||
* `Library Management <https://docs.platformio.org/page/librarymanager/index.html?utm_source=github&utm_medium=core>`_
|
||||
* `Project Examples <https://github.com/platformio/platformio-examples?utm_source=github&utm_medium=core>`__
|
||||
* `Desktop IDEs Integration <https://docs.platformio.org/page/ide.html?utm_source=github&utm_medium=core>`_
|
||||
* `Continuous Integration <https://docs.platformio.org/page/ci/index.html?utm_source=github&utm_medium=core>`_
|
||||
* `Advanced Scripting API <https://docs.platformio.org/page/projectconf/advanced_scripting.html?utm_source=github&utm_medium=core>`_
|
||||
|
||||
PIO Plus
|
||||
--------
|
||||
|
||||
* `PIO Remote <https://docs.platformio.org/page/plus/pio-remote.html?utm_source=github&utm_medium=core>`_
|
||||
* `PIO Unified Debugger <https://docs.platformio.org/page/plus/debugging.html?utm_source=github&utm_medium=core>`_
|
||||
* `PIO Unit Testing <https://docs.platformio.org/en/latest/plus/unit-testing.html?utm_source=github&utm_medium=core>`_
|
||||
* `Cloud IDEs Integration <https://docs.platformio.org/en/latest/ide.html?utm_source=github&utm_medium=core#solution-pio-delivery>`_
|
||||
* `Integration Services <https://platformio.org/pricing?utm_source=github&utm_medium=core#enterprise-features>`_
|
||||
|
||||
Registry
|
||||
--------
|
||||
@@ -68,31 +71,31 @@ Registry
|
||||
* `Frameworks <https://platformio.org/frameworks?utm_source=github&utm_medium=core>`_
|
||||
* `Embedded Boards <https://platformio.org/boards?utm_source=github&utm_medium=core>`_
|
||||
|
||||
Solutions
|
||||
---------
|
||||
|
||||
* `Library Manager <http://docs.platformio.org/en/latest/librarymanager/index.html?utm_source=github&utm_medium=core>`_
|
||||
* `Cloud IDEs Integration <https://platformio.org/pricing?utm_source=github&utm_medium=core#solution-cloud-ide>`_
|
||||
* `Standalone IDEs Integration <http://docs.platformio.org/en/latest/ide.html?utm_source=github&utm_medium=core#other-ide>`_
|
||||
* `Continuous Integration <http://docs.platformio.org/en/latest/ci/index.html?utm_source=github&utm_medium=core>`_
|
||||
|
||||
Development Platforms
|
||||
---------------------
|
||||
|
||||
* `Aceinna IMU <https://platformio.org/platforms/aceinna_imu?utm_source=github&utm_medium=core>`_
|
||||
* `Atmel AVR <https://platformio.org/platforms/atmelavr?utm_source=github&utm_medium=core>`_
|
||||
* `Atmel SAM <https://platformio.org/platforms/atmelsam?utm_source=github&utm_medium=core>`_
|
||||
* `Espressif 32 <https://platformio.org/platforms/espressif32?utm_source=github&utm_medium=core>`_
|
||||
* `Espressif 8266 <https://platformio.org/platforms/espressif8266?utm_source=github&utm_medium=core>`_
|
||||
* `Freescale Kinetis <https://platformio.org/platforms/freescalekinetis?utm_source=github&utm_medium=core>`_
|
||||
* `Infineon XMC <https://platformio.org/platforms/infineonxmc?utm_source=github&utm_medium=core>`_
|
||||
* `Intel ARC32 <https://platformio.org/platforms/intel_arc32?utm_source=github&utm_medium=core>`_
|
||||
* `Intel MCS-51 (8051) <https://platformio.org/platforms/intel_mcs51?utm_source=github&utm_medium=core>`_
|
||||
* `Kendryte K210 <https://platformio.org/platforms/kendryte210?utm_source=github&utm_medium=core>`_
|
||||
* `Lattice iCE40 <https://platformio.org/platforms/lattice_ice40?utm_source=github&utm_medium=core>`_
|
||||
* `Maxim 32 <https://platformio.org/platforms/maxim32?utm_source=github&utm_medium=core>`_
|
||||
* `Microchip PIC32 <https://platformio.org/platforms/microchippic32?utm_source=github&utm_medium=core>`_
|
||||
* `Nordic nRF51 <https://platformio.org/platforms/nordicnrf51?utm_source=github&utm_medium=core>`_
|
||||
* `Nordic nRF52 <https://platformio.org/platforms/nordicnrf52?utm_source=github&utm_medium=core>`_
|
||||
* `NXP LPC <https://platformio.org/platforms/nxplpc?utm_source=github&utm_medium=core>`_
|
||||
* `RISC-V <https://platformio.org/platforms/riscv?utm_source=github&utm_medium=core>`_
|
||||
* `RISC-V GAP <https://platformio.org/platforms/riscv_gap?utm_source=github&utm_medium=core>`_
|
||||
* `Samsung ARTIK <https://platformio.org/platforms/samsung_artik?utm_source=github&utm_medium=core>`_
|
||||
* `Silicon Labs EFM32 <https://platformio.org/platforms/siliconlabsefm32?utm_source=github&utm_medium=core>`_
|
||||
* `ST STM32 <https://platformio.org/platforms/ststm32?utm_source=github&utm_medium=core>`_
|
||||
* `ST STM8 <https://platformio.org/platforms/ststm8?utm_source=github&utm_medium=core>`_
|
||||
* `Teensy <https://platformio.org/platforms/teensy?utm_source=github&utm_medium=core>`_
|
||||
* `TI MSP430 <https://platformio.org/platforms/timsp430?utm_source=github&utm_medium=core>`_
|
||||
* `TI Tiva <https://platformio.org/platforms/titiva?utm_source=github&utm_medium=core>`_
|
||||
@@ -108,8 +111,11 @@ Frameworks
|
||||
* `ESP-IDF <https://platformio.org/frameworks/espidf?utm_source=github&utm_medium=core>`_
|
||||
* `ESP8266 Non-OS SDK <https://platformio.org/frameworks/esp8266-nonos-sdk?utm_source=github&utm_medium=core>`_
|
||||
* `ESP8266 RTOS SDK <https://platformio.org/frameworks/esp8266-rtos-sdk?utm_source=github&utm_medium=core>`_
|
||||
* `Freedom E SDK <https://platformio.org/frameworks/freedom-e-sdk?utm_source=github&utm_medium=core>`_
|
||||
* `Kendryte Standalone SDK <https://platformio.org/frameworks/kendryte-standalone-sdk?utm_source=github&utm_medium=core>`_
|
||||
* `libOpenCM3 <https://platformio.org/frameworks/libopencm3?utm_source=github&utm_medium=core>`_
|
||||
* `mbed <https://platformio.org/frameworks/mbed?utm_source=github&utm_medium=core>`_
|
||||
* `PULP OS <https://platformio.org/frameworks/pulp-os?utm_source=github&utm_medium=core>`_
|
||||
* `Pumbaa <https://platformio.org/frameworks/pumbaa?utm_source=github&utm_medium=core>`_
|
||||
* `Simba <https://platformio.org/frameworks/simba?utm_source=github&utm_medium=core>`_
|
||||
* `SPL <https://platformio.org/frameworks/spl?utm_source=github&utm_medium=core>`_
|
||||
@@ -122,6 +128,15 @@ Contributing
|
||||
|
||||
See `contributing guidelines <https://github.com/platformio/platformio/blob/develop/CONTRIBUTING.md>`_.
|
||||
|
||||
Telemetry / Privacy Policy
|
||||
--------------------------
|
||||
|
||||
Share minimal diagnostics and usage information to help us make PlatformIO better.
|
||||
It is enabled by default. For more information see:
|
||||
|
||||
* `Telemetry Setting <https://docs.platformio.org/en/latest/userguide/cmd_settings.html?utm_source=github&utm_medium=core#enable-telemetry>`_
|
||||
* `SSL Setting <https://docs.platformio.org/en/latest/userguide/cmd_settings.html?utm_source=github&utm_medium=core#strict-ssl>`_
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
|
||||
2
docs
2
docs
Submodule docs updated: e9e78d043e...29f80d45f2
2
examples
2
examples
Submodule examples updated: db8b4f3c77...a71564ab46
@@ -12,9 +12,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
|
||||
VERSION = (3, 5, 2)
|
||||
VERSION = (4, 0, 1)
|
||||
__version__ = ".".join([str(s) for s in VERSION])
|
||||
|
||||
__title__ = "platformio"
|
||||
@@ -26,17 +24,10 @@ __description__ = (
|
||||
"FPGA, CMSIS, SPL, AVR, Samsung ARTIK, libOpenCM3")
|
||||
__url__ = "https://platformio.org"
|
||||
|
||||
__author__ = "Ivan Kravets"
|
||||
__email__ = "me@ikravets.com"
|
||||
__author__ = "PlatformIO"
|
||||
__email__ = "contact@platformio.org"
|
||||
|
||||
__license__ = "Apache Software License"
|
||||
__copyright__ = "Copyright 2014-present PlatformIO"
|
||||
|
||||
__apiurl__ = "https://api.platformio.org"
|
||||
|
||||
if sys.version_info < (2, 7, 0) or sys.version_info >= (3, 0, 0):
|
||||
msg = ("PlatformIO Core v%s does not run under Python version %s.\n"
|
||||
"Minimum supported version is 2.7, please upgrade Python.\n"
|
||||
"Python 3 is not yet supported.\n")
|
||||
sys.stderr.write(msg % (__version__, sys.version))
|
||||
sys.exit(1)
|
||||
|
||||
@@ -14,60 +14,22 @@
|
||||
|
||||
import os
|
||||
import sys
|
||||
from os.path import join
|
||||
from platform import system
|
||||
from traceback import format_exc
|
||||
|
||||
import click
|
||||
|
||||
from platformio import __version__, exception, maintenance
|
||||
from platformio.util import get_source_dir
|
||||
from platformio import __version__, exception, maintenance, util
|
||||
from platformio.commands import PlatformioCLI
|
||||
from platformio.compat import CYGWIN
|
||||
|
||||
|
||||
class PlatformioCLI(click.MultiCommand): # pylint: disable=R0904
|
||||
|
||||
def list_commands(self, ctx):
|
||||
cmds = []
|
||||
for filename in os.listdir(join(get_source_dir(), "commands")):
|
||||
if filename.startswith("__init__"):
|
||||
continue
|
||||
if filename.endswith(".py"):
|
||||
cmds.append(filename[:-3])
|
||||
cmds.sort()
|
||||
return cmds
|
||||
|
||||
def get_command(self, ctx, cmd_name):
|
||||
mod = None
|
||||
try:
|
||||
mod = __import__("platformio.commands." + cmd_name, None, None,
|
||||
["cli"])
|
||||
except ImportError:
|
||||
try:
|
||||
return self._handle_obsolate_command(cmd_name)
|
||||
except AttributeError:
|
||||
raise click.UsageError('No such command "%s"' % cmd_name, ctx)
|
||||
return mod.cli
|
||||
|
||||
@staticmethod
|
||||
def _handle_obsolate_command(name):
|
||||
if name == "platforms":
|
||||
from platformio.commands import platform
|
||||
return platform.cli
|
||||
elif name == "serialports":
|
||||
from platformio.commands import device
|
||||
return device.cli
|
||||
raise AttributeError()
|
||||
|
||||
|
||||
@click.command(
|
||||
cls=PlatformioCLI,
|
||||
context_settings=dict(help_option_names=["-h", "--help"]))
|
||||
@click.command(cls=PlatformioCLI,
|
||||
context_settings=dict(help_option_names=["-h", "--help"]))
|
||||
@click.version_option(__version__, prog_name="PlatformIO")
|
||||
@click.option(
|
||||
"--force",
|
||||
"-f",
|
||||
is_flag=True,
|
||||
help="Force to accept any confirmation prompts.")
|
||||
@click.option("--force",
|
||||
"-f",
|
||||
is_flag=True,
|
||||
help="Force to accept any confirmation prompts.")
|
||||
@click.option("--caller", "-c", help="Caller ID (service).")
|
||||
@click.pass_context
|
||||
def cli(ctx, force, caller):
|
||||
@@ -80,8 +42,9 @@ def process_result(ctx, result, force, caller): # pylint: disable=W0613
|
||||
maintenance.on_platformio_end(ctx, result)
|
||||
|
||||
|
||||
@util.memoized()
|
||||
def configure():
|
||||
if "cygwin" in system().lower():
|
||||
if CYGWIN:
|
||||
raise exception.CygwinEnvDetected()
|
||||
|
||||
# https://urllib3.readthedocs.org
|
||||
@@ -107,17 +70,24 @@ def configure():
|
||||
try:
|
||||
click_echo_origin[origin](*args, **kwargs)
|
||||
except IOError:
|
||||
(sys.stderr.write if kwargs.get("err") else
|
||||
sys.stdout.write)("%s\n" % (args[0] if args else ""))
|
||||
(sys.stderr.write if kwargs.get("err") else sys.stdout.write)(
|
||||
"%s\n" % (args[0] if args else ""))
|
||||
|
||||
click.echo = lambda *args, **kwargs: _safe_echo(0, *args, **kwargs)
|
||||
click.secho = lambda *args, **kwargs: _safe_echo(1, *args, **kwargs)
|
||||
|
||||
|
||||
def main():
|
||||
def main(argv=None):
|
||||
exit_code = 0
|
||||
prev_sys_argv = sys.argv[:]
|
||||
if argv:
|
||||
assert isinstance(argv, list)
|
||||
sys.argv = argv
|
||||
try:
|
||||
configure()
|
||||
cli(None, None, None)
|
||||
except SystemExit:
|
||||
pass
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
if not isinstance(e, exception.ReturnErrorCode):
|
||||
maintenance.on_platformio_exception(e)
|
||||
@@ -135,7 +105,7 @@ An unexpected error occurred. Further steps:
|
||||
`pip install -U platformio` command
|
||||
|
||||
* Try to find answer in FAQ Troubleshooting section
|
||||
http://docs.platformio.org/page/faq.html
|
||||
https://docs.platformio.org/page/faq.html
|
||||
|
||||
* Report this problem to the developers
|
||||
https://github.com/platformio/platformio-core/issues
|
||||
@@ -143,13 +113,13 @@ An unexpected error occurred. Further steps:
|
||||
============================================================
|
||||
"""
|
||||
click.secho(error_str, fg="red", err=True)
|
||||
return int(str(e)) if str(e).isdigit() else 1
|
||||
return 0
|
||||
exit_code = int(str(e)) if str(e).isdigit() else 1
|
||||
sys.argv = prev_sys_argv
|
||||
return exit_code
|
||||
|
||||
|
||||
def debug_gdb_main():
|
||||
sys.argv = [sys.argv[0], "debug", "--interface", "gdb"] + sys.argv[1:]
|
||||
return main()
|
||||
return main([sys.argv[0], "debug", "--interface", "gdb"] + sys.argv[1:])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -12,19 +12,35 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import codecs
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import uuid
|
||||
from copy import deepcopy
|
||||
from os import environ, getenv, listdir, remove
|
||||
from os.path import abspath, dirname, expanduser, getmtime, isdir, isfile, join
|
||||
from os.path import abspath, dirname, expanduser, isdir, isfile, join
|
||||
from time import time
|
||||
|
||||
import requests
|
||||
from lockfile import LockFailed, LockFile
|
||||
|
||||
from platformio import __version__, exception, util
|
||||
from platformio import exception, fs, lockfile
|
||||
from platformio.compat import (WINDOWS, dump_json_to_unicode,
|
||||
hashlib_encode_data)
|
||||
from platformio.proc import is_ci
|
||||
from platformio.project.helpers import (get_project_cache_dir,
|
||||
get_project_core_dir)
|
||||
|
||||
|
||||
def get_default_projects_dir():
|
||||
docs_dir = join(expanduser("~"), "Documents")
|
||||
try:
|
||||
assert WINDOWS
|
||||
import ctypes.wintypes
|
||||
buf = ctypes.create_unicode_buffer(ctypes.wintypes.MAX_PATH)
|
||||
ctypes.windll.shell32.SHGetFolderPathW(None, 5, None, 0, buf)
|
||||
docs_dir = buf.value
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
return join(docs_dir, "PlatformIO", "Projects")
|
||||
|
||||
|
||||
def projects_dir_validate(projects_dir):
|
||||
@@ -57,16 +73,14 @@ DEFAULT_SETTINGS = {
|
||||
"description": "Enable caching for API requests and Library Manager",
|
||||
"value": True
|
||||
},
|
||||
"enable_ssl": {
|
||||
"description": "Enable SSL for PlatformIO Services",
|
||||
"strict_ssl": {
|
||||
"description": "Strict SSL for PlatformIO Services",
|
||||
"value": False
|
||||
},
|
||||
"enable_telemetry": {
|
||||
"description":
|
||||
("Telemetry service <http://docs.platformio.org/page/"
|
||||
"userguide/cmd_settings.html?#enable-telemetry> (Yes/No)"),
|
||||
"value":
|
||||
True
|
||||
("Telemetry service <http://bit.ly/pio-telemetry> (Yes/No)"),
|
||||
"value": True
|
||||
},
|
||||
"force_verbose": {
|
||||
"description": "Force verbose output when processing environments",
|
||||
@@ -74,7 +88,7 @@ DEFAULT_SETTINGS = {
|
||||
},
|
||||
"projects_dir": {
|
||||
"description": "Default location for PlatformIO projects (PIO Home)",
|
||||
"value": join(expanduser("~"), "Documents", "PlatformIO", "Projects"),
|
||||
"value": get_default_projects_dir(),
|
||||
"validator": projects_dir_validate
|
||||
},
|
||||
}
|
||||
@@ -88,51 +102,76 @@ class State(object):
|
||||
self.path = path
|
||||
self.lock = lock
|
||||
if not self.path:
|
||||
self.path = join(util.get_home_dir(), "appstate.json")
|
||||
self._state = {}
|
||||
self._prev_state = {}
|
||||
self.path = join(get_project_core_dir(), "appstate.json")
|
||||
self._storage = {}
|
||||
self._lockfile = None
|
||||
self.modified = False
|
||||
|
||||
def __enter__(self):
|
||||
try:
|
||||
self._lock_state_file()
|
||||
if isfile(self.path):
|
||||
self._state = util.load_json(self.path)
|
||||
except exception.PlatformioException:
|
||||
self._state = {}
|
||||
self._prev_state = deepcopy(self._state)
|
||||
return self._state
|
||||
self._storage = fs.load_json(self.path)
|
||||
assert isinstance(self._storage, dict)
|
||||
except (AssertionError, ValueError, UnicodeDecodeError,
|
||||
exception.InvalidJSONFile):
|
||||
self._storage = {}
|
||||
return self
|
||||
|
||||
def __exit__(self, type_, value, traceback):
|
||||
if self._prev_state != self._state:
|
||||
if self.modified:
|
||||
try:
|
||||
with open(self.path, "w") as fp:
|
||||
if "dev" in __version__:
|
||||
json.dump(self._state, fp, indent=4)
|
||||
else:
|
||||
json.dump(self._state, fp)
|
||||
fp.write(dump_json_to_unicode(self._storage))
|
||||
except IOError:
|
||||
raise exception.HomeDirPermissionsError(util.get_home_dir())
|
||||
raise exception.HomeDirPermissionsError(get_project_core_dir())
|
||||
self._unlock_state_file()
|
||||
|
||||
def _lock_state_file(self):
|
||||
if not self.lock:
|
||||
return
|
||||
self._lockfile = LockFile(self.path)
|
||||
|
||||
if self._lockfile.is_locked() and \
|
||||
(time() - getmtime(self._lockfile.lock_file)) > 10:
|
||||
self._lockfile.break_lock()
|
||||
|
||||
self._lockfile = lockfile.LockFile(self.path)
|
||||
try:
|
||||
self._lockfile.acquire()
|
||||
except LockFailed:
|
||||
except IOError:
|
||||
raise exception.HomeDirPermissionsError(dirname(self.path))
|
||||
|
||||
def _unlock_state_file(self):
|
||||
if self._lockfile:
|
||||
if hasattr(self, "_lockfile") and self._lockfile:
|
||||
self._lockfile.release()
|
||||
|
||||
def __del__(self):
|
||||
self._unlock_state_file()
|
||||
|
||||
# Dictionary Proxy
|
||||
|
||||
def as_dict(self):
|
||||
return self._storage
|
||||
|
||||
def get(self, key, default=True):
|
||||
return self._storage.get(key, default)
|
||||
|
||||
def update(self, *args, **kwargs):
|
||||
self.modified = True
|
||||
return self._storage.update(*args, **kwargs)
|
||||
|
||||
def clear(self):
|
||||
return self._storage.clear()
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self._storage[key]
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
self.modified = True
|
||||
self._storage[key] = value
|
||||
|
||||
def __delitem__(self, key):
|
||||
self.modified = True
|
||||
del self._storage[key]
|
||||
|
||||
def __contains__(self, item):
|
||||
return item in self._storage
|
||||
|
||||
|
||||
class ContentCache(object):
|
||||
|
||||
@@ -141,7 +180,7 @@ class ContentCache(object):
|
||||
self._db_path = None
|
||||
self._lockfile = None
|
||||
|
||||
self.cache_dir = cache_dir or util.get_cache_dir()
|
||||
self.cache_dir = cache_dir or get_project_cache_dir()
|
||||
self._db_path = join(self.cache_dir, "db.data")
|
||||
|
||||
def __enter__(self):
|
||||
@@ -154,15 +193,10 @@ class ContentCache(object):
|
||||
def _lock_dbindex(self):
|
||||
if not self.cache_dir:
|
||||
os.makedirs(self.cache_dir)
|
||||
self._lockfile = LockFile(self.cache_dir)
|
||||
if self._lockfile.is_locked() and \
|
||||
isfile(self._lockfile.lock_file) and \
|
||||
(time() - getmtime(self._lockfile.lock_file)) > 10:
|
||||
self._lockfile.break_lock()
|
||||
|
||||
self._lockfile = lockfile.LockFile(self.cache_dir)
|
||||
try:
|
||||
self._lockfile.acquire()
|
||||
except LockFailed:
|
||||
except: # pylint: disable=bare-except
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -173,25 +207,24 @@ class ContentCache(object):
|
||||
return True
|
||||
|
||||
def get_cache_path(self, key):
|
||||
key = str(key)
|
||||
assert len(key) > 3
|
||||
return join(self.cache_dir, key[-2:], key)
|
||||
|
||||
@staticmethod
|
||||
def key_from_args(*args):
|
||||
h = hashlib.md5()
|
||||
for data in args:
|
||||
h.update(str(data))
|
||||
for arg in args:
|
||||
if arg:
|
||||
h.update(hashlib_encode_data(arg))
|
||||
return h.hexdigest()
|
||||
|
||||
def get(self, key):
|
||||
cache_path = self.get_cache_path(key)
|
||||
if not isfile(cache_path):
|
||||
return None
|
||||
with open(cache_path, "rb") as fp:
|
||||
data = fp.read()
|
||||
if data and data[0] in ("{", "["):
|
||||
return json.loads(data)
|
||||
return data
|
||||
with codecs.open(cache_path, "rb", encoding="utf8") as fp:
|
||||
return fp.read()
|
||||
|
||||
def set(self, key, data, valid):
|
||||
if not get_setting("enable_cache"):
|
||||
@@ -204,7 +237,7 @@ class ContentCache(object):
|
||||
if not isdir(self.cache_dir):
|
||||
os.makedirs(self.cache_dir)
|
||||
tdmap = {"s": 1, "m": 60, "h": 3600, "d": 86400}
|
||||
assert valid.endswith(tuple(tdmap.keys()))
|
||||
assert valid.endswith(tuple(tdmap))
|
||||
expire_time = int(time() + tdmap[valid[-1]] * int(valid[:-1]))
|
||||
|
||||
if not self._lock_dbindex():
|
||||
@@ -212,13 +245,17 @@ class ContentCache(object):
|
||||
|
||||
if not isdir(dirname(cache_path)):
|
||||
os.makedirs(dirname(cache_path))
|
||||
with open(cache_path, "wb") as fp:
|
||||
if isinstance(data, (dict, list)):
|
||||
json.dump(data, fp)
|
||||
else:
|
||||
fp.write(str(data))
|
||||
with open(self._db_path, "a") as fp:
|
||||
fp.write("%s=%s\n" % (str(expire_time), cache_path))
|
||||
try:
|
||||
with codecs.open(cache_path, "wb", encoding="utf8") as fp:
|
||||
fp.write(data)
|
||||
with open(self._db_path, "a") as fp:
|
||||
fp.write("%s=%s\n" % (str(expire_time), cache_path))
|
||||
except UnicodeError:
|
||||
if isfile(cache_path):
|
||||
try:
|
||||
remove(cache_path)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
return self._unlock_dbindex()
|
||||
|
||||
@@ -235,20 +272,23 @@ class ContentCache(object):
|
||||
newlines = []
|
||||
with open(self._db_path) as fp:
|
||||
for line in fp.readlines():
|
||||
line = line.strip()
|
||||
if "=" not in line:
|
||||
continue
|
||||
line = line.strip()
|
||||
expire, path = line.split("=")
|
||||
if time() < int(expire) and isfile(path) and \
|
||||
path not in paths_for_delete:
|
||||
newlines.append(line)
|
||||
continue
|
||||
try:
|
||||
if time() < int(expire) and isfile(path) and \
|
||||
path not in paths_for_delete:
|
||||
newlines.append(line)
|
||||
continue
|
||||
except ValueError:
|
||||
pass
|
||||
found = True
|
||||
if isfile(path):
|
||||
try:
|
||||
remove(path)
|
||||
if not listdir(dirname(path)):
|
||||
util.rmtree_(dirname(path))
|
||||
fs.rmtree(dirname(path))
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
@@ -262,7 +302,7 @@ class ContentCache(object):
|
||||
def clean(self):
|
||||
if not self.cache_dir or not isdir(self.cache_dir):
|
||||
return
|
||||
util.rmtree_(self.cache_dir)
|
||||
fs.rmtree(self.cache_dir)
|
||||
|
||||
|
||||
def clean_cache():
|
||||
@@ -289,19 +329,20 @@ def sanitize_setting(name, value):
|
||||
|
||||
|
||||
def get_state_item(name, default=None):
|
||||
with State() as data:
|
||||
return data.get(name, default)
|
||||
with State() as state:
|
||||
return state.get(name, default)
|
||||
|
||||
|
||||
def set_state_item(name, value):
|
||||
with State(lock=True) as data:
|
||||
data[name] = value
|
||||
with State(lock=True) as state:
|
||||
state[name] = value
|
||||
state.modified = True
|
||||
|
||||
|
||||
def delete_state_item(name):
|
||||
with State(lock=True) as data:
|
||||
if name in data:
|
||||
del data[name]
|
||||
with State(lock=True) as state:
|
||||
if name in state:
|
||||
del state[name]
|
||||
|
||||
|
||||
def get_setting(name):
|
||||
@@ -309,24 +350,25 @@ def get_setting(name):
|
||||
if _env_name in environ:
|
||||
return sanitize_setting(name, getenv(_env_name))
|
||||
|
||||
with State() as data:
|
||||
if "settings" in data and name in data['settings']:
|
||||
return data['settings'][name]
|
||||
with State() as state:
|
||||
if "settings" in state and name in state['settings']:
|
||||
return state['settings'][name]
|
||||
|
||||
return DEFAULT_SETTINGS[name]['value']
|
||||
|
||||
|
||||
def set_setting(name, value):
|
||||
with State(lock=True) as data:
|
||||
if "settings" not in data:
|
||||
data['settings'] = {}
|
||||
data['settings'][name] = sanitize_setting(name, value)
|
||||
with State(lock=True) as state:
|
||||
if "settings" not in state:
|
||||
state['settings'] = {}
|
||||
state['settings'][name] = sanitize_setting(name, value)
|
||||
state.modified = True
|
||||
|
||||
|
||||
def reset_settings():
|
||||
with State(lock=True) as data:
|
||||
if "settings" in data:
|
||||
del data['settings']
|
||||
with State(lock=True) as state:
|
||||
if "settings" in state:
|
||||
del state['settings']
|
||||
|
||||
|
||||
def get_session_var(name, default=None):
|
||||
@@ -341,28 +383,29 @@ def set_session_var(name, value):
|
||||
def is_disabled_progressbar():
|
||||
return any([
|
||||
get_session_var("force_option"),
|
||||
util.is_ci(),
|
||||
is_ci(),
|
||||
getenv("PLATFORMIO_DISABLE_PROGRESSBAR") == "true"
|
||||
])
|
||||
|
||||
|
||||
def get_cid():
|
||||
cid = get_state_item("cid")
|
||||
if not cid:
|
||||
_uid = None
|
||||
if getenv("C9_UID"):
|
||||
_uid = getenv("C9_UID")
|
||||
elif getenv("CHE_API", getenv("CHE_API_ENDPOINT")):
|
||||
try:
|
||||
_uid = requests.get("{api}/user?token={token}".format(
|
||||
api=getenv("CHE_API", getenv("CHE_API_ENDPOINT")),
|
||||
token=getenv("USER_TOKEN"))).json().get("id")
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
cid = str(
|
||||
uuid.UUID(
|
||||
bytes=hashlib.md5(str(_uid if _uid else uuid.getnode()))
|
||||
.digest()))
|
||||
if "windows" in util.get_systype() or os.getuid() > 0:
|
||||
set_state_item("cid", cid)
|
||||
if cid:
|
||||
return cid
|
||||
uid = None
|
||||
if getenv("C9_UID"):
|
||||
uid = getenv("C9_UID")
|
||||
elif getenv("CHE_API", getenv("CHE_API_ENDPOINT")):
|
||||
try:
|
||||
uid = requests.get("{api}/user?token={token}".format(
|
||||
api=getenv("CHE_API", getenv("CHE_API_ENDPOINT")),
|
||||
token=getenv("USER_TOKEN"))).json().get("id")
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
if not uid:
|
||||
uid = uuid.getnode()
|
||||
cid = uuid.UUID(bytes=hashlib.md5(hashlib_encode_data(uid)).digest())
|
||||
cid = str(cid)
|
||||
if WINDOWS or os.getuid() > 0: # yapf: disable pylint: disable=no-member
|
||||
set_state_item("cid", cid)
|
||||
return cid
|
||||
|
||||
@@ -12,101 +12,74 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import base64
|
||||
import json
|
||||
import sys
|
||||
from os import environ
|
||||
from os.path import expanduser, join
|
||||
from os import environ, makedirs
|
||||
from os.path import isdir, join
|
||||
from time import time
|
||||
|
||||
from SCons.Script import (ARGUMENTS, COMMAND_LINE_TARGETS, DEFAULT_TARGETS,
|
||||
Action, AllowSubstExceptions, AlwaysBuild,
|
||||
DefaultEnvironment, Variables)
|
||||
import click
|
||||
from SCons.Script import ARGUMENTS # pylint: disable=import-error
|
||||
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
|
||||
from SCons.Script import DEFAULT_TARGETS # pylint: disable=import-error
|
||||
from SCons.Script import AllowSubstExceptions # pylint: disable=import-error
|
||||
from SCons.Script import AlwaysBuild # pylint: disable=import-error
|
||||
from SCons.Script import Default # pylint: disable=import-error
|
||||
from SCons.Script import DefaultEnvironment # pylint: disable=import-error
|
||||
from SCons.Script import Import # pylint: disable=import-error
|
||||
from SCons.Script import Variables # pylint: disable=import-error
|
||||
|
||||
from platformio import util
|
||||
from platformio import fs
|
||||
from platformio.compat import PY2, dump_json_to_unicode
|
||||
from platformio.managers.platform import PlatformBase
|
||||
from platformio.proc import get_pythonexe_path
|
||||
from platformio.project import helpers as project_helpers
|
||||
|
||||
AllowSubstExceptions(NameError)
|
||||
|
||||
# allow common variables from INI file
|
||||
commonvars = Variables(None)
|
||||
commonvars.AddVariables(
|
||||
# append CLI arguments to build environment
|
||||
clivars = Variables(None)
|
||||
clivars.AddVariables(
|
||||
("PLATFORM_MANIFEST",),
|
||||
("BUILD_SCRIPT",),
|
||||
("EXTRA_SCRIPTS",),
|
||||
("PROJECT_CONFIG",),
|
||||
("PIOENV",),
|
||||
("PIOTEST",),
|
||||
("PIOPLATFORM",),
|
||||
("PIOFRAMEWORK",),
|
||||
|
||||
# build options
|
||||
("BUILD_FLAGS",),
|
||||
("SRC_BUILD_FLAGS",),
|
||||
("BUILD_UNFLAGS",),
|
||||
("SRC_FILTER",),
|
||||
|
||||
# library options
|
||||
("LIB_LDF_MODE",),
|
||||
("LIB_COMPAT_MODE",),
|
||||
("LIB_DEPS",),
|
||||
("LIB_IGNORE",),
|
||||
("LIB_EXTRA_DIRS",),
|
||||
("LIB_ARCHIVE",),
|
||||
|
||||
# board options
|
||||
("BOARD",),
|
||||
("BOARD_MCU",),
|
||||
("BOARD_F_CPU",),
|
||||
("BOARD_F_FLASH",),
|
||||
("BOARD_FLASH_MODE",),
|
||||
|
||||
# upload options
|
||||
("UPLOAD_PORT",),
|
||||
("UPLOAD_PROTOCOL",),
|
||||
("UPLOAD_SPEED",),
|
||||
("UPLOAD_FLAGS",),
|
||||
("UPLOAD_RESETMETHOD",),
|
||||
|
||||
# debug options
|
||||
("DEBUG_TOOL",),
|
||||
|
||||
|
||||
("PIOTEST_RUNNING_NAME",),
|
||||
("UPLOAD_PORT",)
|
||||
) # yapf: disable
|
||||
|
||||
MULTILINE_VARS = [
|
||||
"EXTRA_SCRIPTS", "PIOFRAMEWORK", "BUILD_FLAGS", "SRC_BUILD_FLAGS",
|
||||
"BUILD_UNFLAGS", "SRC_FILTER", "LIB_DEPS", "LIB_IGNORE", "LIB_EXTRA_DIRS"
|
||||
]
|
||||
|
||||
DEFAULT_ENV_OPTIONS = dict(
|
||||
tools=[
|
||||
"ar", "gas", "gcc", "g++", "gnulink", "platformio", "pioplatform",
|
||||
"piowinhooks", "piolib", "pioupload", "piomisc", "pioide"
|
||||
], # yapf: disable
|
||||
toolpath=[join(util.get_source_dir(), "builder", "tools")],
|
||||
variables=commonvars,
|
||||
"pioproject", "piowinhooks", "piolib", "pioupload", "piomisc", "pioide"
|
||||
],
|
||||
toolpath=[join(fs.get_source_dir(), "builder", "tools")],
|
||||
variables=clivars,
|
||||
|
||||
# Propagating External Environment
|
||||
PIOVARIABLES=commonvars.keys(),
|
||||
ENV=environ,
|
||||
UNIX_TIME=int(time()),
|
||||
PIOHOME_DIR=util.get_home_dir(),
|
||||
PROJECT_DIR=util.get_project_dir(),
|
||||
PROJECTINCLUDE_DIR=util.get_projectinclude_dir(),
|
||||
PROJECTSRC_DIR=util.get_projectsrc_dir(),
|
||||
PROJECTTEST_DIR=util.get_projecttest_dir(),
|
||||
PROJECTDATA_DIR=util.get_projectdata_dir(),
|
||||
PROJECTBUILD_DIR=util.get_projectbuild_dir(),
|
||||
PROJECT_DIR=project_helpers.get_project_dir(),
|
||||
PROJECTCORE_DIR=project_helpers.get_project_core_dir(),
|
||||
PROJECTPACKAGES_DIR=project_helpers.get_project_packages_dir(),
|
||||
PROJECTWORKSPACE_DIR=project_helpers.get_project_workspace_dir(),
|
||||
PROJECTLIBDEPS_DIR=project_helpers.get_project_libdeps_dir(),
|
||||
PROJECTINCLUDE_DIR=project_helpers.get_project_include_dir(),
|
||||
PROJECTSRC_DIR=project_helpers.get_project_src_dir(),
|
||||
PROJECTTEST_DIR=project_helpers.get_project_test_dir(),
|
||||
PROJECTDATA_DIR=project_helpers.get_project_data_dir(),
|
||||
PROJECTBUILD_DIR=project_helpers.get_project_build_dir(),
|
||||
BUILDCACHE_DIR=project_helpers.get_project_optional_dir("build_cache_dir"),
|
||||
BUILD_DIR=join("$PROJECTBUILD_DIR", "$PIOENV"),
|
||||
BUILDSRC_DIR=join("$BUILD_DIR", "src"),
|
||||
BUILDTEST_DIR=join("$BUILD_DIR", "test"),
|
||||
LIBPATH=["$BUILD_DIR"],
|
||||
LIBSOURCE_DIRS=[
|
||||
util.get_projectlib_dir(),
|
||||
util.get_projectlibdeps_dir(),
|
||||
join("$PIOHOME_DIR", "lib")
|
||||
project_helpers.get_project_lib_dir(),
|
||||
join("$PROJECTLIBDEPS_DIR", "$PIOENV"),
|
||||
project_helpers.get_project_global_lib_dir()
|
||||
],
|
||||
PROGNAME="program",
|
||||
PROG_PATH=join("$BUILD_DIR", "$PROGNAME$PROGSUFFIX"),
|
||||
PYTHONEXE=util.get_pythonexe_path())
|
||||
PYTHONEXE=get_pythonexe_path())
|
||||
|
||||
if not int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
DEFAULT_ENV_OPTIONS['ARCOMSTR'] = "Archiving $TARGET"
|
||||
@@ -117,76 +90,78 @@ if not int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
|
||||
env = DefaultEnvironment(**DEFAULT_ENV_OPTIONS)
|
||||
|
||||
# decode common variables
|
||||
for k in commonvars.keys():
|
||||
if k in env:
|
||||
env[k] = base64.b64decode(env[k])
|
||||
if k in MULTILINE_VARS:
|
||||
env[k] = util.parse_conf_multi_values(env[k])
|
||||
# Load variables from CLI
|
||||
env.Replace(
|
||||
**{
|
||||
key: PlatformBase.decode_scons_arg(env[key])
|
||||
for key in list(clivars.keys()) if key in env
|
||||
})
|
||||
|
||||
if env.subst("$BUILDCACHE_DIR"):
|
||||
if not isdir(env.subst("$BUILDCACHE_DIR")):
|
||||
makedirs(env.subst("$BUILDCACHE_DIR"))
|
||||
env.CacheDir("$BUILDCACHE_DIR")
|
||||
|
||||
if int(ARGUMENTS.get("ISATTY", 0)):
|
||||
# pylint: disable=protected-access
|
||||
click._compat.isatty = lambda stream: True
|
||||
|
||||
if env.GetOption('clean'):
|
||||
env.PioClean(env.subst("$BUILD_DIR"))
|
||||
env.Exit(0)
|
||||
elif not int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
print "Verbose mode can be enabled via `-v, --verbose` option"
|
||||
print("Verbose mode can be enabled via `-v, --verbose` option")
|
||||
|
||||
# Handle custom variables from system environment
|
||||
for var in ("BUILD_FLAGS", "SRC_BUILD_FLAGS", "SRC_FILTER", "EXTRA_SCRIPTS",
|
||||
"UPLOAD_PORT", "UPLOAD_FLAGS", "LIB_EXTRA_DIRS"):
|
||||
k = "PLATFORMIO_%s" % var
|
||||
if k not in environ:
|
||||
continue
|
||||
if var in ("UPLOAD_PORT", ):
|
||||
env[var] = environ.get(k)
|
||||
continue
|
||||
env.Append(**{var: util.parse_conf_multi_values(environ.get(k))})
|
||||
|
||||
# Configure extra library source directories for LDF
|
||||
if util.get_project_optional_dir("lib_extra_dirs"):
|
||||
env.Prepend(
|
||||
LIBSOURCE_DIRS=util.parse_conf_multi_values(
|
||||
util.get_project_optional_dir("lib_extra_dirs")))
|
||||
env.Prepend(LIBSOURCE_DIRS=env.get("LIB_EXTRA_DIRS", []))
|
||||
env['LIBSOURCE_DIRS'] = [
|
||||
expanduser(d) if d.startswith("~") else d for d in env['LIBSOURCE_DIRS']
|
||||
]
|
||||
|
||||
env.LoadPioPlatform(commonvars)
|
||||
env.LoadProjectOptions()
|
||||
env.LoadPioPlatform()
|
||||
|
||||
env.SConscriptChdir(0)
|
||||
env.SConsignFile(join("$PROJECTBUILD_DIR", ".sconsign.dblite"))
|
||||
env.SConsignFile(
|
||||
join("$PROJECTBUILD_DIR",
|
||||
".sconsign.dblite" if PY2 else ".sconsign3.dblite"))
|
||||
|
||||
for item in env.GetPreExtraScripts():
|
||||
for item in env.GetExtraScripts("pre"):
|
||||
env.SConscript(item, exports="env")
|
||||
|
||||
env.SConscript("$BUILD_SCRIPT")
|
||||
|
||||
AlwaysBuild(env.Alias("__debug", DEFAULT_TARGETS + ["size"]))
|
||||
AlwaysBuild(env.Alias("__test", DEFAULT_TARGETS + ["size"]))
|
||||
|
||||
if "UPLOAD_FLAGS" in env:
|
||||
env.Append(UPLOADERFLAGS=["$UPLOAD_FLAGS"])
|
||||
env.Prepend(UPLOADERFLAGS=["$UPLOAD_FLAGS"])
|
||||
if env.GetProjectOption("upload_command"):
|
||||
env.Replace(UPLOADCMD=env.GetProjectOption("upload_command"))
|
||||
|
||||
for item in env.GetPostExtraScripts():
|
||||
for item in env.GetExtraScripts("post"):
|
||||
env.SConscript(item, exports="env")
|
||||
|
||||
##############################################################################
|
||||
|
||||
# Checking program size
|
||||
if env.get("SIZETOOL") and "nobuild" not in COMMAND_LINE_TARGETS:
|
||||
env.Depends(["upload", "program"], "checkprogsize")
|
||||
# Replace platform's "size" target with our
|
||||
_new_targets = [t for t in DEFAULT_TARGETS if str(t) != "size"]
|
||||
Default(None)
|
||||
Default(_new_targets)
|
||||
Default("checkprogsize")
|
||||
|
||||
# Print configured protocols
|
||||
env.AddPreAction(["upload", "program"],
|
||||
env.VerboseAction(
|
||||
lambda source, target, env: env.PrintUploadInfo(),
|
||||
"Configuring upload protocol..."))
|
||||
|
||||
AlwaysBuild(env.Alias("debug", DEFAULT_TARGETS))
|
||||
AlwaysBuild(env.Alias("__test", DEFAULT_TARGETS))
|
||||
|
||||
##############################################################################
|
||||
|
||||
if "envdump" in COMMAND_LINE_TARGETS:
|
||||
print env.Dump()
|
||||
print(env.Dump())
|
||||
env.Exit(0)
|
||||
|
||||
if "idedata" in COMMAND_LINE_TARGETS:
|
||||
try:
|
||||
print "\n%s\n" % util.path_to_unicode(
|
||||
json.dumps(env.DumpIDEData(), ensure_ascii=False))
|
||||
env.Exit(0)
|
||||
except UnicodeDecodeError:
|
||||
sys.stderr.write(
|
||||
"\nUnicodeDecodeError: Non-ASCII characters found in build "
|
||||
"environment\n"
|
||||
"See explanation in FAQ > Troubleshooting > Building\n"
|
||||
"http://docs.platformio.org/page/faq.html\n\n")
|
||||
env.Exit(1)
|
||||
|
||||
env.AddPreAction(["upload", "program"],
|
||||
Action(lambda source, target, env: env.PrintUploadInfo(),
|
||||
"Configuring upload protocol..."))
|
||||
Import("projenv")
|
||||
print("\n%s\n" % dump_json_to_unicode(
|
||||
env.DumpIDEData(projenv) # pylint: disable=undefined-variable
|
||||
))
|
||||
env.Exit(0)
|
||||
|
||||
@@ -16,19 +16,20 @@ from __future__ import absolute_import
|
||||
|
||||
from glob import glob
|
||||
from os import environ
|
||||
from os.path import join
|
||||
from os.path import abspath, isfile, join
|
||||
|
||||
from SCons.Defaults import processDefines
|
||||
from SCons.Defaults import processDefines # pylint: disable=import-error
|
||||
|
||||
from platformio import util
|
||||
from platformio.compat import glob_escape
|
||||
from platformio.managers.core import get_core_package_dir
|
||||
from platformio.proc import exec_command, where_is_program
|
||||
|
||||
|
||||
def _dump_includes(env):
|
||||
def _dump_includes(env, projenv):
|
||||
includes = []
|
||||
|
||||
for item in env.get("CPPPATH", []):
|
||||
includes.append(env.subst(item))
|
||||
for item in projenv.get("CPPPATH", []):
|
||||
includes.append(projenv.subst(item))
|
||||
|
||||
# installed libs
|
||||
for lb in env.GetLibBuilders():
|
||||
@@ -39,7 +40,7 @@ def _dump_includes(env):
|
||||
for name in p.get_installed_packages():
|
||||
if p.get_package_type(name) != "toolchain":
|
||||
continue
|
||||
toolchain_dir = util.glob_escape(p.get_package_dir(name))
|
||||
toolchain_dir = glob_escape(p.get_package_dir(name))
|
||||
toolchain_incglobs = [
|
||||
join(toolchain_dir, "*", "include*"),
|
||||
join(toolchain_dir, "*", "include", "c++", "*"),
|
||||
@@ -53,11 +54,15 @@ def _dump_includes(env):
|
||||
if unity_dir:
|
||||
includes.append(unity_dir)
|
||||
|
||||
# remove dupicates
|
||||
includes.extend(
|
||||
[env.subst("$PROJECTINCLUDE_DIR"),
|
||||
env.subst("$PROJECTSRC_DIR")])
|
||||
|
||||
# remove duplicates
|
||||
result = []
|
||||
for item in includes:
|
||||
if item not in result:
|
||||
result.append(item)
|
||||
result.append(abspath(item))
|
||||
|
||||
return result
|
||||
|
||||
@@ -67,8 +72,9 @@ def _get_gcc_defines(env):
|
||||
try:
|
||||
sysenv = environ.copy()
|
||||
sysenv['PATH'] = str(env['ENV']['PATH'])
|
||||
result = util.exec_command(
|
||||
"echo | %s -dM -E -" % env.subst("$CC"), env=sysenv, shell=True)
|
||||
result = exec_command("echo | %s -dM -E -" % env.subst("$CC"),
|
||||
env=sysenv,
|
||||
shell=True)
|
||||
except OSError:
|
||||
return items
|
||||
if result['returncode'] != 0:
|
||||
@@ -97,39 +103,66 @@ def _dump_defines(env):
|
||||
board_mcu = env.BoardConfig().get("build.mcu")
|
||||
if board_mcu:
|
||||
defines.append(
|
||||
str("__AVR_%s__" % board_mcu.upper()
|
||||
.replace("ATMEGA", "ATmega").replace("ATTINY", "ATtiny")))
|
||||
str("__AVR_%s__" % board_mcu.upper().replace(
|
||||
"ATMEGA", "ATmega").replace("ATTINY", "ATtiny")))
|
||||
|
||||
# built-in GCC marcos
|
||||
if env.GetCompilerType() == "gcc":
|
||||
defines.extend(_get_gcc_defines(env))
|
||||
# if env.GetCompilerType() == "gcc":
|
||||
# defines.extend(_get_gcc_defines(env))
|
||||
|
||||
return defines
|
||||
|
||||
|
||||
def DumpIDEData(env):
|
||||
LINTCCOM = "$CFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS"
|
||||
LINTCXXCOM = "$CXXFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS"
|
||||
def _get_svd_path(env):
|
||||
svd_path = env.GetProjectOption("debug_svd_path")
|
||||
if svd_path:
|
||||
return abspath(svd_path)
|
||||
|
||||
if "BOARD" not in env:
|
||||
return None
|
||||
try:
|
||||
svd_path = env.BoardConfig().get("debug.svd_path")
|
||||
assert svd_path
|
||||
except (AssertionError, KeyError):
|
||||
return None
|
||||
# custom path to SVD file
|
||||
if isfile(svd_path):
|
||||
return svd_path
|
||||
# default file from ./platform/misc/svd folder
|
||||
p = env.PioPlatform()
|
||||
if isfile(join(p.get_dir(), "misc", "svd", svd_path)):
|
||||
return abspath(join(p.get_dir(), "misc", "svd", svd_path))
|
||||
return None
|
||||
|
||||
|
||||
def DumpIDEData(env, projenv):
|
||||
LINTCCOM = "$CFLAGS $CCFLAGS $CPPFLAGS"
|
||||
LINTCXXCOM = "$CXXFLAGS $CCFLAGS $CPPFLAGS"
|
||||
|
||||
data = {
|
||||
"libsource_dirs":
|
||||
[env.subst(l) for l in env.get("LIBSOURCE_DIRS", [])],
|
||||
"libsource_dirs": [env.subst(l) for l in env.GetLibSourceDirs()],
|
||||
"defines":
|
||||
_dump_defines(env),
|
||||
"includes":
|
||||
_dump_includes(env),
|
||||
_dump_includes(env, projenv),
|
||||
"cc_flags":
|
||||
env.subst(LINTCCOM),
|
||||
"cxx_flags":
|
||||
env.subst(LINTCXXCOM),
|
||||
"cc_path":
|
||||
util.where_is_program(env.subst("$CC"), env.subst("${ENV['PATH']}")),
|
||||
where_is_program(env.subst("$CC"), env.subst("${ENV['PATH']}")),
|
||||
"cxx_path":
|
||||
util.where_is_program(env.subst("$CXX"), env.subst("${ENV['PATH']}")),
|
||||
where_is_program(env.subst("$CXX"), env.subst("${ENV['PATH']}")),
|
||||
"gdb_path":
|
||||
util.where_is_program(env.subst("$GDB"), env.subst("${ENV['PATH']}")),
|
||||
where_is_program(env.subst("$GDB"), env.subst("${ENV['PATH']}")),
|
||||
"prog_path":
|
||||
env.subst("$PROG_PATH"),
|
||||
"flash_extra_images": [{
|
||||
"offset": item[0],
|
||||
"path": env.subst(item[1])
|
||||
} for item in env.get("FLASH_EXTRA_IMAGES", [])],
|
||||
"svd_path":
|
||||
_get_svd_path(env),
|
||||
"compiler_type":
|
||||
env.GetCompilerType()
|
||||
}
|
||||
|
||||
@@ -12,45 +12,51 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=no-member, no-self-use, unused-argument
|
||||
# pylint: disable=no-member, no-self-use, unused-argument, too-many-lines
|
||||
# pylint: disable=too-many-instance-attributes, too-many-public-methods
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import codecs
|
||||
import hashlib
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from glob import glob
|
||||
from os.path import (basename, commonprefix, dirname, isdir, isfile, join,
|
||||
from os.path import (basename, commonprefix, expanduser, isdir, isfile, join,
|
||||
realpath, sep)
|
||||
|
||||
import SCons.Scanner
|
||||
from SCons.Script import ARGUMENTS, COMMAND_LINE_TARGETS, DefaultEnvironment
|
||||
import click
|
||||
import SCons.Scanner # pylint: disable=import-error
|
||||
from SCons.Script import ARGUMENTS # pylint: disable=import-error
|
||||
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
|
||||
from SCons.Script import DefaultEnvironment # pylint: disable=import-error
|
||||
|
||||
from platformio import util
|
||||
from platformio import exception, fs, util
|
||||
from platformio.builder.tools import platformio as piotool
|
||||
from platformio.compat import (WINDOWS, get_file_contents, hashlib_encode_data,
|
||||
string_types)
|
||||
from platformio.managers.lib import LibraryManager
|
||||
from platformio.managers.package import PackageManager
|
||||
|
||||
|
||||
class LibBuilderFactory(object):
|
||||
|
||||
@staticmethod
|
||||
def new(env, path, verbose=False):
|
||||
def new(env, path, verbose=int(ARGUMENTS.get("PIOVERBOSE", 0))):
|
||||
clsname = "UnknownLibBuilder"
|
||||
if isfile(join(path, "library.json")):
|
||||
clsname = "PlatformIOLibBuilder"
|
||||
else:
|
||||
used_frameworks = LibBuilderFactory.get_used_frameworks(env, path)
|
||||
common_frameworks = (
|
||||
set(env.get("PIOFRAMEWORK", [])) & set(used_frameworks))
|
||||
common_frameworks = (set(env.get("PIOFRAMEWORK", []))
|
||||
& set(used_frameworks))
|
||||
if common_frameworks:
|
||||
clsname = "%sLibBuilder" % list(common_frameworks)[0].title()
|
||||
elif used_frameworks:
|
||||
clsname = "%sLibBuilder" % used_frameworks[0].title()
|
||||
|
||||
obj = getattr(sys.modules[__name__], clsname)(
|
||||
env, path, verbose=verbose)
|
||||
obj = getattr(sys.modules[__name__], clsname)(env,
|
||||
path,
|
||||
verbose=verbose)
|
||||
assert isinstance(obj, LibBuilderBase)
|
||||
return obj
|
||||
|
||||
@@ -64,30 +70,34 @@ class LibBuilderFactory(object):
|
||||
if isfile(join(path, "module.json")):
|
||||
return ["mbed"]
|
||||
|
||||
include_re = re.compile(r'^#include\s+(<|")(Arduino|mbed)\.h(<|")',
|
||||
flags=re.MULTILINE)
|
||||
|
||||
# check source files
|
||||
for root, _, files in os.walk(path, followlinks=True):
|
||||
if "mbed_lib.json" in files:
|
||||
return ["mbed"]
|
||||
for fname in files:
|
||||
if not env.IsFileWithExt(
|
||||
if not fs.path_endswith_ext(
|
||||
fname, piotool.SRC_BUILD_EXT + piotool.SRC_HEADER_EXT):
|
||||
continue
|
||||
with open(join(root, fname)) as f:
|
||||
content = f.read()
|
||||
if "Arduino.h" in content:
|
||||
return ["arduino"]
|
||||
elif "mbed.h" in content:
|
||||
return ["mbed"]
|
||||
content = get_file_contents(join(root, fname))
|
||||
if not content:
|
||||
continue
|
||||
if "Arduino.h" in content and include_re.search(content):
|
||||
return ["arduino"]
|
||||
if "mbed.h" in content and include_re.search(content):
|
||||
return ["mbed"]
|
||||
return []
|
||||
|
||||
|
||||
class LibBuilderBase(object):
|
||||
|
||||
IS_WINDOWS = "windows" in util.get_systype()
|
||||
|
||||
LDF_MODES = ["off", "chain", "deep", "chain+", "deep+"]
|
||||
LDF_MODE_DEFAULT = "chain"
|
||||
|
||||
COMPAT_MODES = ["off", "light", "strict"]
|
||||
COMPAT_MODE_DEFAULT = "light"
|
||||
COMPAT_MODES = ["off", "soft", "strict"]
|
||||
COMPAT_MODE_DEFAULT = "soft"
|
||||
|
||||
CLASSIC_SCANNER = SCons.Scanner.C.CScanner()
|
||||
CCONDITIONAL_SCANNER = SCons.Scanner.C.CConditionalScanner()
|
||||
@@ -125,9 +135,11 @@ class LibBuilderBase(object):
|
||||
def __contains__(self, path):
|
||||
p1 = self.path
|
||||
p2 = path
|
||||
if self.IS_WINDOWS:
|
||||
if WINDOWS:
|
||||
p1 = p1.lower()
|
||||
p2 = p2.lower()
|
||||
if p1 == p2:
|
||||
return True
|
||||
return commonprefix((p1 + sep, p2)) == p1 + sep
|
||||
|
||||
@property
|
||||
@@ -138,13 +150,6 @@ class LibBuilderBase(object):
|
||||
def version(self):
|
||||
return self._manifest.get("version")
|
||||
|
||||
@property
|
||||
def vcs_info(self):
|
||||
items = glob(join(self.path, ".*", PackageManager.SRC_MANIFEST_NAME))
|
||||
if not items:
|
||||
return None
|
||||
return util.load_json(items[0])
|
||||
|
||||
@property
|
||||
def dependencies(self):
|
||||
return LibraryManager.normalize_dependencies(
|
||||
@@ -171,17 +176,17 @@ class LibBuilderBase(object):
|
||||
if isdir(join(self.path, "src")) else self.path)
|
||||
|
||||
def get_include_dirs(self):
|
||||
items = [self.src_dir]
|
||||
items = []
|
||||
include_dir = self.include_dir
|
||||
if include_dir and include_dir not in items:
|
||||
if include_dir:
|
||||
items.append(include_dir)
|
||||
items.append(self.src_dir)
|
||||
return items
|
||||
|
||||
@property
|
||||
def build_dir(self):
|
||||
return join("$BUILD_DIR",
|
||||
"lib%s" % hashlib.sha1(self.path).hexdigest()[:3],
|
||||
basename(self.path))
|
||||
lib_hash = hashlib.sha1(hashlib_encode_data(self.path)).hexdigest()[:3]
|
||||
return join("$BUILD_DIR", "lib%s" % lib_hash, basename(self.path))
|
||||
|
||||
@property
|
||||
def build_flags(self):
|
||||
@@ -195,20 +200,6 @@ class LibBuilderBase(object):
|
||||
def extra_script(self):
|
||||
return None
|
||||
|
||||
@property
|
||||
def lib_archive(self):
|
||||
return self.env.get("LIB_ARCHIVE", "") != "false"
|
||||
|
||||
@property
|
||||
def lib_ldf_mode(self):
|
||||
return self.validate_ldf_mode(
|
||||
self.env.get("LIB_LDF_MODE", self.LDF_MODE_DEFAULT))
|
||||
|
||||
@property
|
||||
def lib_compat_mode(self):
|
||||
return self.validate_compat_mode(
|
||||
self.env.get("LIB_COMPAT_MODE", self.COMPAT_MODE_DEFAULT))
|
||||
|
||||
@property
|
||||
def depbuilders(self):
|
||||
return self._depbuilders
|
||||
@@ -221,9 +212,17 @@ class LibBuilderBase(object):
|
||||
def is_built(self):
|
||||
return self._is_built
|
||||
|
||||
@property
|
||||
def lib_archive(self):
|
||||
return self.env.GetProjectOption("lib_archive", True)
|
||||
|
||||
@property
|
||||
def lib_ldf_mode(self):
|
||||
return self.env.GetProjectOption("lib_ldf_mode", self.LDF_MODE_DEFAULT)
|
||||
|
||||
@staticmethod
|
||||
def validate_ldf_mode(mode):
|
||||
if isinstance(mode, basestring):
|
||||
if isinstance(mode, string_types):
|
||||
mode = mode.strip().lower()
|
||||
if mode in LibBuilderBase.LDF_MODES:
|
||||
return mode
|
||||
@@ -233,9 +232,14 @@ class LibBuilderBase(object):
|
||||
pass
|
||||
return LibBuilderBase.LDF_MODE_DEFAULT
|
||||
|
||||
@property
|
||||
def lib_compat_mode(self):
|
||||
return self.env.GetProjectOption("lib_compat_mode",
|
||||
self.COMPAT_MODE_DEFAULT)
|
||||
|
||||
@staticmethod
|
||||
def validate_compat_mode(mode):
|
||||
if isinstance(mode, basestring):
|
||||
if isinstance(mode, string_types):
|
||||
mode = mode.strip().lower()
|
||||
if mode in LibBuilderBase.COMPAT_MODES:
|
||||
return mode
|
||||
@@ -255,56 +259,33 @@ class LibBuilderBase(object):
|
||||
return {}
|
||||
|
||||
def process_extra_options(self):
|
||||
with util.cd(self.path):
|
||||
self.env.ProcessUnFlags(self.build_unflags)
|
||||
with fs.cd(self.path):
|
||||
self.env.ProcessFlags(self.build_flags)
|
||||
if self.extra_script:
|
||||
self.env.SConscriptChdir(1)
|
||||
self.env.SConscript(
|
||||
realpath(self.extra_script),
|
||||
exports={
|
||||
"env": self.env,
|
||||
"pio_lib_builder": self
|
||||
})
|
||||
self.env.SConscript(realpath(self.extra_script),
|
||||
exports={
|
||||
"env": self.env,
|
||||
"pio_lib_builder": self
|
||||
})
|
||||
self.env.ProcessUnFlags(self.build_unflags)
|
||||
|
||||
def process_dependencies(self):
|
||||
if not self.dependencies:
|
||||
return
|
||||
for item in self.dependencies:
|
||||
skip = False
|
||||
for key in ("platforms", "frameworks"):
|
||||
env_key = "PIO" + key.upper()[:-1]
|
||||
if env_key not in self.env:
|
||||
continue
|
||||
if (key in item and
|
||||
not util.items_in_list(self.env[env_key], item[key])):
|
||||
if self.verbose:
|
||||
sys.stderr.write(
|
||||
"Skip %s incompatible dependency %s\n" % (key[:-1],
|
||||
item))
|
||||
skip = True
|
||||
if skip:
|
||||
continue
|
||||
|
||||
found = False
|
||||
for lb in self.env.GetLibBuilders():
|
||||
if item['name'] != lb.name:
|
||||
continue
|
||||
elif "frameworks" in item and \
|
||||
not lb.is_frameworks_compatible(item["frameworks"]):
|
||||
continue
|
||||
elif "platforms" in item and \
|
||||
not lb.is_platforms_compatible(item["platforms"]):
|
||||
continue
|
||||
found = True
|
||||
self.depend_recursive(lb)
|
||||
if lb not in self.depbuilders:
|
||||
self.depend_recursive(lb)
|
||||
break
|
||||
|
||||
if not found:
|
||||
sys.stderr.write(
|
||||
"Error: Could not find `%s` dependency for `%s` "
|
||||
"library\n" % (item['name'], self.name))
|
||||
self.env.Exit(1)
|
||||
if not found and self.verbose:
|
||||
sys.stderr.write("Warning: Ignored `%s` dependency for `%s` "
|
||||
"library\n" % (item['name'], self.name))
|
||||
|
||||
def get_search_files(self):
|
||||
items = [
|
||||
@@ -348,7 +329,7 @@ class LibBuilderBase(object):
|
||||
for path in self._validate_search_files(search_files):
|
||||
try:
|
||||
assert "+" in self.lib_ldf_mode
|
||||
incs = LibBuilderBase.CCONDITIONAL_SCANNER(
|
||||
candidates = LibBuilderBase.CCONDITIONAL_SCANNER(
|
||||
self.env.File(path),
|
||||
self.env,
|
||||
tuple(include_dirs),
|
||||
@@ -358,26 +339,26 @@ class LibBuilderBase(object):
|
||||
sys.stderr.write(
|
||||
"Warning! Classic Pre Processor is used for `%s`, "
|
||||
"advanced has failed with `%s`\n" % (path, e))
|
||||
_incs = LibBuilderBase.CLASSIC_SCANNER(
|
||||
candidates = LibBuilderBase.CLASSIC_SCANNER(
|
||||
self.env.File(path), self.env, tuple(include_dirs))
|
||||
incs = []
|
||||
for inc in _incs:
|
||||
incs.append(inc)
|
||||
if not self.PARSE_SRC_BY_H_NAME:
|
||||
|
||||
# print(path, map(lambda n: n.get_abspath(), candidates))
|
||||
for item in candidates:
|
||||
if item not in result:
|
||||
result.append(item)
|
||||
if not self.PARSE_SRC_BY_H_NAME:
|
||||
continue
|
||||
_h_path = item.get_abspath()
|
||||
if not fs.path_endswith_ext(_h_path, piotool.SRC_HEADER_EXT):
|
||||
continue
|
||||
_f_part = _h_path[:_h_path.rindex(".")]
|
||||
for ext in piotool.SRC_C_EXT:
|
||||
if not isfile("%s.%s" % (_f_part, ext)):
|
||||
continue
|
||||
_h_path = inc.get_abspath()
|
||||
if not self.env.IsFileWithExt(_h_path,
|
||||
piotool.SRC_HEADER_EXT):
|
||||
continue
|
||||
_f_part = _h_path[:_h_path.rindex(".")]
|
||||
for ext in piotool.SRC_C_EXT:
|
||||
if isfile("%s.%s" % (_f_part, ext)):
|
||||
incs.append(
|
||||
self.env.File("%s.%s" % (_f_part, ext)))
|
||||
# print path, map(lambda n: n.get_abspath(), incs)
|
||||
for inc in incs:
|
||||
if inc not in result:
|
||||
result.append(inc)
|
||||
_c_path = self.env.File("%s.%s" % (_f_part, ext))
|
||||
if _c_path not in result:
|
||||
result.append(_c_path)
|
||||
|
||||
return result
|
||||
|
||||
def depend_recursive(self, lb, search_files=None):
|
||||
@@ -395,8 +376,8 @@ class LibBuilderBase(object):
|
||||
if _already_depends(lb):
|
||||
if self.verbose:
|
||||
sys.stderr.write("Warning! Circular dependencies detected "
|
||||
"between `%s` and `%s`\n" % (self.path,
|
||||
lb.path))
|
||||
"between `%s` and `%s`\n" %
|
||||
(self.path, lb.path))
|
||||
self._circular_deps.append(lb)
|
||||
elif lb not in self._depbuilders:
|
||||
self._depbuilders.append(lb)
|
||||
@@ -433,23 +414,23 @@ class LibBuilderBase(object):
|
||||
libs.extend(lb.build())
|
||||
# copy shared information to self env
|
||||
for key in ("CPPPATH", "LIBPATH", "LIBS", "LINKFLAGS"):
|
||||
self.env.AppendUnique(**{key: lb.env.get(key)})
|
||||
self.env.PrependUnique(**{key: lb.env.get(key)})
|
||||
|
||||
for lb in self._circular_deps:
|
||||
self.env.AppendUnique(CPPPATH=lb.get_include_dirs())
|
||||
self.env.PrependUnique(CPPPATH=lb.get_include_dirs())
|
||||
|
||||
if self._is_built:
|
||||
return libs
|
||||
self._is_built = True
|
||||
|
||||
self.env.AppendUnique(CPPPATH=self.get_include_dirs())
|
||||
self.env.PrependUnique(CPPPATH=self.get_include_dirs())
|
||||
|
||||
if self.lib_ldf_mode == "off":
|
||||
for lb in self.env.GetLibBuilders():
|
||||
if self == lb or not lb.is_built:
|
||||
continue
|
||||
for key in ("CPPPATH", "LIBPATH", "LIBS", "LINKFLAGS"):
|
||||
self.env.AppendUnique(**{key: lb.env.get(key)})
|
||||
self.env.PrependUnique(**{key: lb.env.get(key)})
|
||||
|
||||
if self.lib_archive:
|
||||
libs.append(
|
||||
@@ -471,7 +452,8 @@ class ArduinoLibBuilder(LibBuilderBase):
|
||||
manifest = {}
|
||||
if not isfile(join(self.path, "library.properties")):
|
||||
return manifest
|
||||
with open(join(self.path, "library.properties")) as fp:
|
||||
manifest_path = join(self.path, "library.properties")
|
||||
with codecs.open(manifest_path, encoding="utf-8") as fp:
|
||||
for line in fp.readlines():
|
||||
if "=" not in line:
|
||||
continue
|
||||
@@ -489,11 +471,29 @@ class ArduinoLibBuilder(LibBuilderBase):
|
||||
|
||||
@property
|
||||
def src_filter(self):
|
||||
if isdir(join(self.path, "src")):
|
||||
return LibBuilderBase.src_filter.fget(self)
|
||||
src_dir = join(self.path, "src")
|
||||
if isdir(src_dir):
|
||||
src_filter = LibBuilderBase.src_filter.fget(self)
|
||||
for root, _, files in os.walk(src_dir, followlinks=True):
|
||||
found = False
|
||||
for fname in files:
|
||||
if fname.lower().endswith("asm"):
|
||||
found = True
|
||||
break
|
||||
if not found:
|
||||
continue
|
||||
rel_path = root.replace(src_dir, "")
|
||||
if rel_path.startswith(sep):
|
||||
rel_path = rel_path[1:] + sep
|
||||
src_filter.append("-<%s*.[aA][sS][mM]>" % rel_path)
|
||||
return src_filter
|
||||
|
||||
src_filter = []
|
||||
is_utility = isdir(join(self.path, "utility"))
|
||||
for ext in piotool.SRC_BUILD_EXT + piotool.SRC_HEADER_EXT:
|
||||
# arduino ide ignores files with .asm or .ASM extensions
|
||||
if ext.lower() == "asm":
|
||||
continue
|
||||
src_filter.append("+<*.%s>" % ext)
|
||||
if is_utility:
|
||||
src_filter.append("+<utility%s*.%s>" % (sep, ext))
|
||||
@@ -502,13 +502,36 @@ class ArduinoLibBuilder(LibBuilderBase):
|
||||
def is_frameworks_compatible(self, frameworks):
|
||||
return util.items_in_list(frameworks, ["arduino", "energia"])
|
||||
|
||||
def is_platforms_compatible(self, platforms):
|
||||
platforms_map = {
|
||||
"avr": ["atmelavr"],
|
||||
"sam": ["atmelsam"],
|
||||
"samd": ["atmelsam"],
|
||||
"esp8266": ["espressif8266"],
|
||||
"esp32": ["espressif32"],
|
||||
"arc32": ["intel_arc32"],
|
||||
"stm32": ["ststm32"],
|
||||
"nrf5": ["nordicnrf51", "nordicnrf52"]
|
||||
}
|
||||
items = []
|
||||
for arch in self._manifest.get("architectures", "").split(","):
|
||||
arch = arch.strip().lower()
|
||||
if arch == "*":
|
||||
items = "*"
|
||||
break
|
||||
if arch in platforms_map:
|
||||
items.extend(platforms_map[arch])
|
||||
if not items:
|
||||
return LibBuilderBase.is_platforms_compatible(self, platforms)
|
||||
return util.items_in_list(platforms, items)
|
||||
|
||||
|
||||
class MbedLibBuilder(LibBuilderBase):
|
||||
|
||||
def load_manifest(self):
|
||||
if not isfile(join(self.path, "module.json")):
|
||||
return {}
|
||||
return util.load_json(join(self.path, "module.json"))
|
||||
return fs.load_json(join(self.path, "module.json"))
|
||||
|
||||
@property
|
||||
def include_dir(self):
|
||||
@@ -545,12 +568,119 @@ class MbedLibBuilder(LibBuilderBase):
|
||||
def is_frameworks_compatible(self, frameworks):
|
||||
return util.items_in_list(frameworks, ["mbed"])
|
||||
|
||||
def process_extra_options(self):
|
||||
self._process_mbed_lib_confs()
|
||||
return super(MbedLibBuilder, self).process_extra_options()
|
||||
|
||||
def _process_mbed_lib_confs(self):
|
||||
mbed_lib_paths = [
|
||||
join(root, "mbed_lib.json")
|
||||
for root, _, files in os.walk(self.path)
|
||||
if "mbed_lib.json" in files
|
||||
]
|
||||
if not mbed_lib_paths:
|
||||
return None
|
||||
|
||||
mbed_config_path = None
|
||||
for p in self.env.get("CPPPATH"):
|
||||
mbed_config_path = join(self.env.subst(p), "mbed_config.h")
|
||||
if isfile(mbed_config_path):
|
||||
break
|
||||
else:
|
||||
mbed_config_path = None
|
||||
if not mbed_config_path:
|
||||
return None
|
||||
|
||||
macros = {}
|
||||
for mbed_lib_path in mbed_lib_paths:
|
||||
macros.update(self._mbed_lib_conf_parse_macros(mbed_lib_path))
|
||||
|
||||
self._mbed_conf_append_macros(mbed_config_path, macros)
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def _mbed_normalize_macro(macro):
|
||||
name = macro
|
||||
value = None
|
||||
if "=" in macro:
|
||||
name, value = macro.split("=", 1)
|
||||
return dict(name=name, value=value)
|
||||
|
||||
def _mbed_lib_conf_parse_macros(self, mbed_lib_path):
|
||||
macros = {}
|
||||
cppdefines = str(self.env.Flatten(self.env.subst("$CPPDEFINES")))
|
||||
manifest = fs.load_json(mbed_lib_path)
|
||||
|
||||
# default macros
|
||||
for macro in manifest.get("macros", []):
|
||||
macro = self._mbed_normalize_macro(macro)
|
||||
macros[macro['name']] = macro
|
||||
|
||||
# configuration items
|
||||
for key, options in manifest.get("config", {}).items():
|
||||
if "value" not in options:
|
||||
continue
|
||||
macros[key] = dict(name=options.get("macro_name"),
|
||||
value=options.get("value"))
|
||||
|
||||
# overrode items per target
|
||||
for target, options in manifest.get("target_overrides", {}).items():
|
||||
if target != "*" and "TARGET_" + target not in cppdefines:
|
||||
continue
|
||||
for macro in options.get("target.macros_add", []):
|
||||
macro = self._mbed_normalize_macro(macro)
|
||||
macros[macro['name']] = macro
|
||||
for key, value in options.items():
|
||||
if not key.startswith("target.") and key in macros:
|
||||
macros[key]['value'] = value
|
||||
|
||||
# normalize macro names
|
||||
for key, macro in macros.items():
|
||||
if not macro['name']:
|
||||
macro['name'] = key
|
||||
if "." not in macro['name']:
|
||||
macro['name'] = "%s.%s" % (manifest.get("name"),
|
||||
macro['name'])
|
||||
macro['name'] = re.sub(r"[^a-z\d]+",
|
||||
"_",
|
||||
macro['name'],
|
||||
flags=re.I).upper()
|
||||
macro['name'] = "MBED_CONF_" + macro['name']
|
||||
if isinstance(macro['value'], bool):
|
||||
macro['value'] = 1 if macro['value'] else 0
|
||||
|
||||
return {macro["name"]: macro["value"] for macro in macros.values()}
|
||||
|
||||
def _mbed_conf_append_macros(self, mbed_config_path, macros):
|
||||
lines = []
|
||||
with open(mbed_config_path) as fp:
|
||||
for line in fp.readlines():
|
||||
line = line.strip()
|
||||
if line == "#endif":
|
||||
lines.append(
|
||||
"// PlatformIO Library Dependency Finder (LDF)")
|
||||
lines.extend([
|
||||
"#define %s %s" %
|
||||
(name, value if value is not None else "")
|
||||
for name, value in macros.items()
|
||||
])
|
||||
lines.append("")
|
||||
if not line.startswith("#define"):
|
||||
lines.append(line)
|
||||
continue
|
||||
tokens = line.split()
|
||||
if len(tokens) < 2 or tokens[1] not in macros:
|
||||
lines.append(line)
|
||||
lines.append("")
|
||||
with open(mbed_config_path, "w") as fp:
|
||||
fp.write("\n".join(lines))
|
||||
|
||||
|
||||
class PlatformIOLibBuilder(LibBuilderBase):
|
||||
|
||||
def load_manifest(self):
|
||||
assert isfile(join(self.path, "library.json"))
|
||||
manifest = util.load_json(join(self.path, "library.json"))
|
||||
manifest = fs.load_json(join(self.path, "library.json"))
|
||||
assert "name" in manifest
|
||||
|
||||
# replace "espressif" old name dev/platform with ESP8266
|
||||
@@ -565,22 +695,27 @@ class PlatformIOLibBuilder(LibBuilderBase):
|
||||
def _is_arduino_manifest(self):
|
||||
return isfile(join(self.path, "library.properties"))
|
||||
|
||||
@property
|
||||
def include_dir(self):
|
||||
if "includeDir" in self._manifest.get("build", {}):
|
||||
with fs.cd(self.path):
|
||||
return realpath(self._manifest.get("build").get("includeDir"))
|
||||
return LibBuilderBase.include_dir.fget(self)
|
||||
|
||||
@property
|
||||
def src_dir(self):
|
||||
if all([
|
||||
"srcFilter" in self._manifest.get("build", {})
|
||||
or self.env['SRC_FILTER'], not self._is_arduino_manifest()
|
||||
]):
|
||||
return self.path
|
||||
if "srcDir" in self._manifest.get("build", {}):
|
||||
with fs.cd(self.path):
|
||||
return realpath(self._manifest.get("build").get("srcDir"))
|
||||
return LibBuilderBase.src_dir.fget(self)
|
||||
|
||||
@property
|
||||
def src_filter(self):
|
||||
if "srcFilter" in self._manifest.get("build", {}):
|
||||
return self._manifest.get("build").get("srcFilter")
|
||||
elif self.env['SRC_FILTER']:
|
||||
if self.env['SRC_FILTER']:
|
||||
return self.env['SRC_FILTER']
|
||||
elif self._is_arduino_manifest():
|
||||
if self._is_arduino_manifest():
|
||||
return ArduinoLibBuilder.src_filter.fget(self)
|
||||
return LibBuilderBase.src_filter.fget(self)
|
||||
|
||||
@@ -604,23 +739,28 @@ class PlatformIOLibBuilder(LibBuilderBase):
|
||||
|
||||
@property
|
||||
def lib_archive(self):
|
||||
if "libArchive" in self._manifest.get("build", {}):
|
||||
return self._manifest.get("build").get("libArchive")
|
||||
return LibBuilderBase.lib_archive.fget(self)
|
||||
global_value = self.env.GetProjectOption("lib_archive")
|
||||
if global_value is not None:
|
||||
return global_value
|
||||
return self._manifest.get("build", {}).get(
|
||||
"libArchive", LibBuilderBase.lib_archive.fget(self))
|
||||
|
||||
@property
|
||||
def lib_ldf_mode(self):
|
||||
if "libLDFMode" in self._manifest.get("build", {}):
|
||||
return self.validate_ldf_mode(
|
||||
self._manifest.get("build").get("libLDFMode"))
|
||||
return LibBuilderBase.lib_ldf_mode.fget(self)
|
||||
return self.validate_ldf_mode(
|
||||
self.env.GetProjectOption(
|
||||
"lib_ldf_mode",
|
||||
self._manifest.get("build", {}).get(
|
||||
"libLDFMode", LibBuilderBase.lib_ldf_mode.fget(self))))
|
||||
|
||||
@property
|
||||
def lib_compat_mode(self):
|
||||
if "libCompatMode" in self._manifest.get("build", {}):
|
||||
return self.validate_compat_mode(
|
||||
self._manifest.get("build").get("libCompatMode"))
|
||||
return LibBuilderBase.lib_compat_mode.fget(self)
|
||||
return self.validate_ldf_mode(
|
||||
self.env.GetProjectOption(
|
||||
"lib_compat_mode",
|
||||
self._manifest.get("build", {}).get(
|
||||
"libCompatMode",
|
||||
LibBuilderBase.lib_compat_mode.fget(self))))
|
||||
|
||||
def is_platforms_compatible(self, platforms):
|
||||
items = self._manifest.get("platforms")
|
||||
@@ -646,11 +786,18 @@ class PlatformIOLibBuilder(LibBuilderBase):
|
||||
for path in self.env.get("CPPPATH", []):
|
||||
if path not in self.envorigin.get("CPPPATH", []):
|
||||
include_dirs.append(self.env.subst(path))
|
||||
|
||||
return include_dirs
|
||||
|
||||
|
||||
class ProjectAsLibBuilder(LibBuilderBase):
|
||||
|
||||
def __init__(self, env, *args, **kwargs):
|
||||
# backup original value, will be reset in base.__init__
|
||||
project_src_filter = env.get("SRC_FILTER")
|
||||
super(ProjectAsLibBuilder, self).__init__(env, *args, **kwargs)
|
||||
self.env['SRC_FILTER'] = project_src_filter
|
||||
|
||||
@property
|
||||
def include_dir(self):
|
||||
include_dir = self.env.subst("$PROJECTINCLUDE_DIR")
|
||||
@@ -661,10 +808,13 @@ class ProjectAsLibBuilder(LibBuilderBase):
|
||||
return self.env.subst("$PROJECTSRC_DIR")
|
||||
|
||||
def get_include_dirs(self):
|
||||
include_dirs = LibBuilderBase.get_include_dirs(self)
|
||||
include_dirs = []
|
||||
project_include_dir = self.env.subst("$PROJECTINCLUDE_DIR")
|
||||
if isdir(project_include_dir):
|
||||
include_dirs.append(project_include_dir)
|
||||
for include_dir in LibBuilderBase.get_include_dirs(self):
|
||||
if include_dir not in include_dirs:
|
||||
include_dirs.append(include_dir)
|
||||
return include_dirs
|
||||
|
||||
def get_search_files(self):
|
||||
@@ -689,123 +839,173 @@ class ProjectAsLibBuilder(LibBuilderBase):
|
||||
|
||||
@property
|
||||
def src_filter(self):
|
||||
return self.env.get("SRC_FILTER", LibBuilderBase.src_filter.fget(self))
|
||||
return (self.env.get("SRC_FILTER")
|
||||
or LibBuilderBase.src_filter.fget(self))
|
||||
|
||||
@property
|
||||
def dependencies(self):
|
||||
return self.env.GetProjectOption("lib_deps", [])
|
||||
|
||||
def process_extra_options(self):
|
||||
# skip for project, options are already processed
|
||||
pass
|
||||
|
||||
def process_dependencies(self): # pylint: disable=too-many-branches
|
||||
uris = self.env.get("LIB_DEPS", [])
|
||||
if not uris:
|
||||
return
|
||||
storage_dirs = []
|
||||
for lb in self.env.GetLibBuilders():
|
||||
if dirname(lb.path) not in storage_dirs:
|
||||
storage_dirs.append(dirname(lb.path))
|
||||
def install_dependencies(self):
|
||||
|
||||
def _is_builtin(uri):
|
||||
for lb in self.env.GetLibBuilders():
|
||||
if lb.name == uri:
|
||||
return True
|
||||
return False
|
||||
|
||||
not_found_uri = []
|
||||
for uri in self.dependencies:
|
||||
# check if built-in library
|
||||
if _is_builtin(uri):
|
||||
continue
|
||||
|
||||
for uri in uris:
|
||||
found = False
|
||||
for storage_dir in storage_dirs:
|
||||
for storage_dir in self.env.GetLibSourceDirs():
|
||||
lm = LibraryManager(storage_dir)
|
||||
if lm.get_package_dir(*lm.parse_pkg_uri(uri)):
|
||||
found = True
|
||||
break
|
||||
if not found:
|
||||
not_found_uri.append(uri)
|
||||
|
||||
did_install = False
|
||||
lm = LibraryManager(
|
||||
self.env.subst(join("$PROJECTLIBDEPS_DIR", "$PIOENV")))
|
||||
for uri in not_found_uri:
|
||||
try:
|
||||
lm.install(uri)
|
||||
did_install = True
|
||||
except (exception.LibNotFound, exception.InternetIsOffline) as e:
|
||||
click.secho("Warning! %s" % e, fg="yellow")
|
||||
|
||||
# reset cache
|
||||
if did_install:
|
||||
DefaultEnvironment().Replace(__PIO_LIB_BUILDERS=None)
|
||||
|
||||
def process_dependencies(self): # pylint: disable=too-many-branches
|
||||
for uri in self.dependencies:
|
||||
found = False
|
||||
for storage_dir in self.env.GetLibSourceDirs():
|
||||
if found:
|
||||
break
|
||||
lm = LibraryManager(storage_dir)
|
||||
pkg_dir = lm.get_package_dir(*lm.parse_pkg_uri(uri))
|
||||
if not pkg_dir:
|
||||
lib_dir = lm.get_package_dir(*lm.parse_pkg_uri(uri))
|
||||
if not lib_dir:
|
||||
continue
|
||||
for lb in self.env.GetLibBuilders():
|
||||
if lb.path != pkg_dir:
|
||||
if lib_dir not in lb:
|
||||
continue
|
||||
if lb not in self.depbuilders:
|
||||
self.depend_recursive(lb)
|
||||
found = True
|
||||
break
|
||||
if found:
|
||||
continue
|
||||
|
||||
if not found:
|
||||
for lb in self.env.GetLibBuilders():
|
||||
if lb.name != uri:
|
||||
continue
|
||||
if lb not in self.depbuilders:
|
||||
self.depend_recursive(lb)
|
||||
break
|
||||
# look for built-in libraries by a name
|
||||
# which don't have package manifest
|
||||
for lb in self.env.GetLibBuilders():
|
||||
if lb.name != uri:
|
||||
continue
|
||||
if lb not in self.depbuilders:
|
||||
self.depend_recursive(lb)
|
||||
found = True
|
||||
break
|
||||
|
||||
def build(self):
|
||||
self._is_built = True # do not build Project now
|
||||
self.env.AppendUnique(CPPPATH=self.get_include_dirs())
|
||||
return LibBuilderBase.build(self)
|
||||
result = LibBuilderBase.build(self)
|
||||
self.env.PrependUnique(CPPPATH=self.get_include_dirs())
|
||||
return result
|
||||
|
||||
|
||||
def GetLibSourceDirs(env):
|
||||
items = env.GetProjectOption("lib_extra_dirs", [])
|
||||
items.extend(env['LIBSOURCE_DIRS'])
|
||||
return [
|
||||
env.subst(expanduser(item) if item.startswith("~") else item)
|
||||
for item in items
|
||||
]
|
||||
|
||||
|
||||
def IsCompatibleLibBuilder(env,
|
||||
lb,
|
||||
verbose=int(ARGUMENTS.get("PIOVERBOSE", 0))):
|
||||
compat_mode = lb.lib_compat_mode
|
||||
if lb.name in env.GetProjectOption("lib_ignore", []):
|
||||
if verbose:
|
||||
sys.stderr.write("Ignored library %s\n" % lb.path)
|
||||
return None
|
||||
if compat_mode == "strict" and not lb.is_platforms_compatible(
|
||||
env['PIOPLATFORM']):
|
||||
if verbose:
|
||||
sys.stderr.write("Platform incompatible library %s\n" % lb.path)
|
||||
return False
|
||||
if (compat_mode in ("soft", "strict") and "PIOFRAMEWORK" in env
|
||||
and not lb.is_frameworks_compatible(env.get("PIOFRAMEWORK", []))):
|
||||
if verbose:
|
||||
sys.stderr.write("Framework incompatible library %s\n" % lb.path)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def GetLibBuilders(env): # pylint: disable=too-many-branches
|
||||
if DefaultEnvironment().get("__PIO_LIB_BUILDERS", None) is not None:
|
||||
return sorted(DefaultEnvironment()['__PIO_LIB_BUILDERS'],
|
||||
key=lambda lb: 0 if lb.dependent else 1)
|
||||
|
||||
if "__PIO_LIB_BUILDERS" in DefaultEnvironment():
|
||||
return sorted(
|
||||
DefaultEnvironment()['__PIO_LIB_BUILDERS'],
|
||||
key=lambda lb: 0 if lb.dependent else 1)
|
||||
|
||||
items = []
|
||||
verbose = int(ARGUMENTS.get("PIOVERBOSE",
|
||||
0)) and not env.GetOption('clean')
|
||||
|
||||
def _check_lib_builder(lb):
|
||||
compat_mode = lb.lib_compat_mode
|
||||
if lb.name in env.get("LIB_IGNORE", []):
|
||||
if verbose:
|
||||
sys.stderr.write("Ignored library %s\n" % lb.path)
|
||||
return None
|
||||
if compat_mode == "strict" and not lb.is_platforms_compatible(
|
||||
env['PIOPLATFORM']):
|
||||
if verbose:
|
||||
sys.stderr.write(
|
||||
"Platform incompatible library %s\n" % lb.path)
|
||||
return False
|
||||
if compat_mode == "light" and "PIOFRAMEWORK" in env and \
|
||||
not lb.is_frameworks_compatible(env.get("PIOFRAMEWORK", [])):
|
||||
if verbose:
|
||||
sys.stderr.write(
|
||||
"Framework incompatible library %s\n" % lb.path)
|
||||
return False
|
||||
return True
|
||||
DefaultEnvironment().Replace(__PIO_LIB_BUILDERS=[])
|
||||
|
||||
verbose = int(ARGUMENTS.get("PIOVERBOSE", 0))
|
||||
found_incompat = False
|
||||
for libs_dir in env['LIBSOURCE_DIRS']:
|
||||
libs_dir = env.subst(libs_dir)
|
||||
if not isdir(libs_dir):
|
||||
|
||||
for storage_dir in env.GetLibSourceDirs():
|
||||
storage_dir = realpath(storage_dir)
|
||||
if not isdir(storage_dir):
|
||||
continue
|
||||
for item in sorted(os.listdir(libs_dir)):
|
||||
if item == "__cores__" or not isdir(join(libs_dir, item)):
|
||||
for item in sorted(os.listdir(storage_dir)):
|
||||
lib_dir = join(storage_dir, item)
|
||||
if item == "__cores__" or not isdir(lib_dir):
|
||||
continue
|
||||
try:
|
||||
lb = LibBuilderFactory.new(
|
||||
env, join(libs_dir, item), verbose=verbose)
|
||||
except ValueError:
|
||||
lb = LibBuilderFactory.new(env, lib_dir)
|
||||
except exception.InvalidJSONFile:
|
||||
if verbose:
|
||||
sys.stderr.write("Skip library with broken manifest: %s\n"
|
||||
% join(libs_dir, item))
|
||||
sys.stderr.write(
|
||||
"Skip library with broken manifest: %s\n" % lib_dir)
|
||||
continue
|
||||
if _check_lib_builder(lb):
|
||||
items.append(lb)
|
||||
if env.IsCompatibleLibBuilder(lb):
|
||||
DefaultEnvironment().Append(__PIO_LIB_BUILDERS=[lb])
|
||||
else:
|
||||
found_incompat = True
|
||||
|
||||
for lb in env.get("EXTRA_LIB_BUILDERS", []):
|
||||
if _check_lib_builder(lb):
|
||||
items.append(lb)
|
||||
if env.IsCompatibleLibBuilder(lb):
|
||||
DefaultEnvironment().Append(__PIO_LIB_BUILDERS=[lb])
|
||||
else:
|
||||
found_incompat = True
|
||||
|
||||
if verbose and found_incompat:
|
||||
sys.stderr.write(
|
||||
"More details about \"Library Compatibility Mode\": "
|
||||
"http://docs.platformio.org/page/librarymanager/ldf.html#"
|
||||
"https://docs.platformio.org/page/librarymanager/ldf.html#"
|
||||
"ldf-compat-mode\n")
|
||||
|
||||
DefaultEnvironment()['__PIO_LIB_BUILDERS'] = items
|
||||
return items
|
||||
return DefaultEnvironment()['__PIO_LIB_BUILDERS']
|
||||
|
||||
|
||||
def BuildProjectLibraries(env):
|
||||
def ConfigureProjectLibBuilder(env):
|
||||
|
||||
def correct_found_libs(lib_builders):
|
||||
def _get_vcs_info(lb):
|
||||
path = LibraryManager.get_src_manifest_path(lb.path)
|
||||
return fs.load_json(path) if path else None
|
||||
|
||||
def _correct_found_libs(lib_builders):
|
||||
# build full dependency graph
|
||||
found_lbs = [lb for lb in lib_builders if lb.dependent]
|
||||
for lb in lib_builders:
|
||||
@@ -816,13 +1016,13 @@ def BuildProjectLibraries(env):
|
||||
if deplb not in found_lbs:
|
||||
lb.depbuilders.remove(deplb)
|
||||
|
||||
def print_deps_tree(root, level=0):
|
||||
def _print_deps_tree(root, level=0):
|
||||
margin = "| " * (level)
|
||||
for lb in root.depbuilders:
|
||||
title = "<%s>" % lb.name
|
||||
vcs_info = lb.vcs_info
|
||||
vcs_info = _get_vcs_info(lb)
|
||||
if lb.version:
|
||||
title += " v%s" % lb.version
|
||||
title += " %s" % lb.version
|
||||
if vcs_info and vcs_info.get("version"):
|
||||
title += " #%s" % vcs_info.get("version")
|
||||
sys.stdout.write("%s|-- %s" % (margin, title))
|
||||
@@ -834,32 +1034,33 @@ def BuildProjectLibraries(env):
|
||||
sys.stdout.write(")")
|
||||
sys.stdout.write("\n")
|
||||
if lb.depbuilders:
|
||||
print_deps_tree(lb, level + 1)
|
||||
_print_deps_tree(lb, level + 1)
|
||||
|
||||
project = ProjectAsLibBuilder(env, "$PROJECT_DIR")
|
||||
project.env = env
|
||||
ldf_mode = LibBuilderBase.lib_ldf_mode.fget(project)
|
||||
|
||||
print "Library Dependency Finder -> http://bit.ly/configure-pio-ldf"
|
||||
print "LDF MODES: FINDER(%s) COMPATIBILITY(%s)" % (ldf_mode,
|
||||
project.lib_compat_mode)
|
||||
print("LDF: Library Dependency Finder -> http://bit.ly/configure-pio-ldf")
|
||||
print("LDF Modes: Finder ~ %s, Compatibility ~ %s" %
|
||||
(ldf_mode, project.lib_compat_mode))
|
||||
|
||||
project.install_dependencies()
|
||||
|
||||
lib_builders = env.GetLibBuilders()
|
||||
print "Collected %d compatible libraries" % len(lib_builders)
|
||||
print("Found %d compatible libraries" % len(lib_builders))
|
||||
|
||||
print "Scanning dependencies..."
|
||||
print("Scanning dependencies...")
|
||||
project.search_deps_recursive()
|
||||
|
||||
if ldf_mode.startswith("chain") and project.depbuilders:
|
||||
correct_found_libs(lib_builders)
|
||||
_correct_found_libs(lib_builders)
|
||||
|
||||
if project.depbuilders:
|
||||
print "Dependency Graph"
|
||||
print_deps_tree(project)
|
||||
print("Dependency Graph")
|
||||
_print_deps_tree(project)
|
||||
else:
|
||||
print "No dependencies"
|
||||
print("No dependencies")
|
||||
|
||||
return project.build()
|
||||
return project
|
||||
|
||||
|
||||
def exists(_):
|
||||
@@ -867,6 +1068,8 @@ def exists(_):
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(GetLibSourceDirs)
|
||||
env.AddMethod(IsCompatibleLibBuilder)
|
||||
env.AddMethod(GetLibBuilders)
|
||||
env.AddMethod(BuildProjectLibraries)
|
||||
env.AddMethod(ConfigureProjectLibBuilder)
|
||||
return env
|
||||
|
||||
@@ -18,24 +18,27 @@ import atexit
|
||||
import re
|
||||
import sys
|
||||
from os import environ, remove, walk
|
||||
from os.path import basename, isdir, isfile, join, relpath, sep
|
||||
from os.path import basename, isdir, isfile, join, realpath, relpath, sep
|
||||
from tempfile import mkstemp
|
||||
|
||||
from SCons.Action import Action
|
||||
from SCons.Script import ARGUMENTS
|
||||
from SCons.Action import Action # pylint: disable=import-error
|
||||
from SCons.Script import ARGUMENTS # pylint: disable=import-error
|
||||
|
||||
from platformio import util
|
||||
from platformio import fs, util
|
||||
from platformio.compat import get_file_contents, glob_escape
|
||||
from platformio.managers.core import get_core_package_dir
|
||||
from platformio.proc import exec_command
|
||||
|
||||
|
||||
class InoToCPPConverter(object):
|
||||
|
||||
PROTOTYPE_RE = re.compile(r"""^(
|
||||
PROTOTYPE_RE = re.compile(
|
||||
r"""^(
|
||||
(?:template\<.*\>\s*)? # template
|
||||
([a-z_\d]+\*?\s+){1,2} # return type
|
||||
([a-z_\d\&]+\*?\s+){1,2} # return type
|
||||
([a-z_\d]+\s*) # name of prototype
|
||||
\([a-z_,\.\*\&\[\]\s\d]*\) # arguments
|
||||
)\s*\{ # must end with {
|
||||
)\s*(\{|;) # must end with `{` or `;`
|
||||
""", re.X | re.M | re.I)
|
||||
DETECTMAIN_RE = re.compile(r"void\s+(setup|loop)\s*\(", re.M | re.I)
|
||||
PROTOPTRS_TPLRE = r"\([^&\(]*&(%s)[^\)]*\)"
|
||||
@@ -57,7 +60,7 @@ class InoToCPPConverter(object):
|
||||
assert nodes
|
||||
lines = []
|
||||
for node in nodes:
|
||||
contents = node.get_text_contents()
|
||||
contents = get_file_contents(node.get_path())
|
||||
_lines = [
|
||||
'# 1 "%s"' % node.get_path().replace("\\", "/"), contents
|
||||
]
|
||||
@@ -75,8 +78,7 @@ class InoToCPPConverter(object):
|
||||
def process(self, contents):
|
||||
out_file = self._main_ino + ".cpp"
|
||||
assert self._gcc_preprocess(contents, out_file)
|
||||
with open(out_file) as fp:
|
||||
contents = fp.read()
|
||||
contents = get_file_contents(out_file)
|
||||
contents = self._join_multiline_strings(contents)
|
||||
with open(out_file, "w") as fp:
|
||||
fp.write(self.append_prototypes(contents))
|
||||
@@ -89,8 +91,8 @@ class InoToCPPConverter(object):
|
||||
self.env.Execute(
|
||||
self.env.VerboseAction(
|
||||
'$CXX -o "{0}" -x c++ -fpreprocessed -dD -E "{1}"'.format(
|
||||
out_file,
|
||||
tmp_path), "Converting " + basename(out_file[:-4])))
|
||||
out_file, tmp_path),
|
||||
"Converting " + basename(out_file[:-4])))
|
||||
atexit.register(_delete_file, tmp_path)
|
||||
return isfile(out_file)
|
||||
|
||||
@@ -157,32 +159,39 @@ class InoToCPPConverter(object):
|
||||
return total
|
||||
|
||||
def append_prototypes(self, contents):
|
||||
prototypes = self._parse_prototypes(contents)
|
||||
prototypes = self._parse_prototypes(contents) or []
|
||||
|
||||
# skip already declared prototypes
|
||||
declared = set(
|
||||
m.group(1).strip() for m in prototypes if m.group(4) == ";")
|
||||
prototypes = [
|
||||
m for m in prototypes if m.group(1).strip() not in declared
|
||||
]
|
||||
|
||||
if not prototypes:
|
||||
return contents
|
||||
|
||||
prototype_names = set([m.group(3).strip() for m in prototypes])
|
||||
prototype_names = set(m.group(3).strip() for m in prototypes)
|
||||
split_pos = prototypes[0].start()
|
||||
match_ptrs = re.search(self.PROTOPTRS_TPLRE %
|
||||
("|".join(prototype_names)),
|
||||
contents[:split_pos], re.M)
|
||||
match_ptrs = re.search(
|
||||
self.PROTOPTRS_TPLRE % ("|".join(prototype_names)),
|
||||
contents[:split_pos], re.M)
|
||||
if match_ptrs:
|
||||
split_pos = contents.rfind("\n", 0, match_ptrs.start()) + 1
|
||||
|
||||
result = []
|
||||
result.append(contents[:split_pos].strip())
|
||||
result.append("%s;" % ";\n".join([m.group(1) for m in prototypes]))
|
||||
result.append('#line %d "%s"' %
|
||||
(self._get_total_lines(contents[:split_pos]),
|
||||
self._main_ino.replace("\\", "/")))
|
||||
result.append('#line %d "%s"' % (self._get_total_lines(
|
||||
contents[:split_pos]), self._main_ino.replace("\\", "/")))
|
||||
result.append(contents[split_pos:].strip())
|
||||
return "\n".join(result)
|
||||
|
||||
|
||||
def ConvertInoToCpp(env):
|
||||
src_dir = util.glob_escape(env.subst("$PROJECTSRC_DIR"))
|
||||
ino_nodes = (
|
||||
env.Glob(join(src_dir, "*.ino")) + env.Glob(join(src_dir, "*.pde")))
|
||||
src_dir = glob_escape(env.subst("$PROJECTSRC_DIR"))
|
||||
ino_nodes = (env.Glob(join(src_dir, "*.ino")) +
|
||||
env.Glob(join(src_dir, "*.pde")))
|
||||
if not ino_nodes:
|
||||
return
|
||||
c = InoToCPPConverter(env)
|
||||
@@ -199,12 +208,14 @@ def _delete_file(path):
|
||||
pass
|
||||
|
||||
|
||||
@util.memoized
|
||||
@util.memoized()
|
||||
def _get_compiler_type(env):
|
||||
if env.subst("$CC").endswith("-gcc"):
|
||||
return "gcc"
|
||||
try:
|
||||
sysenv = environ.copy()
|
||||
sysenv['PATH'] = str(env['ENV']['PATH'])
|
||||
result = util.exec_command([env.subst("$CC"), "-v"], env=sysenv)
|
||||
result = exec_command([env.subst("$CC"), "-v"], env=sysenv)
|
||||
except OSError:
|
||||
return None
|
||||
if result['returncode'] != 0:
|
||||
@@ -212,7 +223,7 @@ def _get_compiler_type(env):
|
||||
output = "".join([result['out'], result['err']]).lower()
|
||||
if "clang" in output and "LLVM" in output:
|
||||
return "clang"
|
||||
elif "gcc" in output:
|
||||
if "gcc" in output:
|
||||
return "gcc"
|
||||
return None
|
||||
|
||||
@@ -231,14 +242,25 @@ def GetActualLDScript(env):
|
||||
return None
|
||||
|
||||
script = None
|
||||
script_in_next = False
|
||||
for f in env.get("LINKFLAGS", []):
|
||||
if f.startswith("-Wl,-T"):
|
||||
script = env.subst(f[6:].replace('"', "").strip())
|
||||
if isfile(script):
|
||||
return script
|
||||
path = _lookup_in_ldpath(script)
|
||||
if path:
|
||||
return path
|
||||
raw_script = None
|
||||
if f == "-T":
|
||||
script_in_next = True
|
||||
continue
|
||||
elif script_in_next:
|
||||
script_in_next = False
|
||||
raw_script = f
|
||||
elif f.startswith("-Wl,-T"):
|
||||
raw_script = f[6:]
|
||||
else:
|
||||
continue
|
||||
script = env.subst(raw_script.replace('"', "").strip())
|
||||
if isfile(script):
|
||||
return script
|
||||
path = _lookup_in_ldpath(script)
|
||||
if path:
|
||||
return path
|
||||
|
||||
if script:
|
||||
sys.stderr.write(
|
||||
@@ -263,57 +285,56 @@ def VerboseAction(_, act, actstr):
|
||||
|
||||
def PioClean(env, clean_dir):
|
||||
if not isdir(clean_dir):
|
||||
print "Build environment is clean"
|
||||
print("Build environment is clean")
|
||||
env.Exit(0)
|
||||
clean_rel_path = relpath(clean_dir)
|
||||
for root, _, files in walk(clean_dir):
|
||||
for file_ in files:
|
||||
remove(join(root, file_))
|
||||
print "Removed %s" % relpath(join(root, file_))
|
||||
print "Done cleaning"
|
||||
util.rmtree_(clean_dir)
|
||||
for f in files:
|
||||
dst = join(root, f)
|
||||
remove(dst)
|
||||
print("Removed %s" %
|
||||
(dst if clean_rel_path.startswith(".") else relpath(dst)))
|
||||
print("Done cleaning")
|
||||
fs.rmtree(clean_dir)
|
||||
env.Exit(0)
|
||||
|
||||
|
||||
def ProcessDebug(env):
|
||||
if not env.subst("$PIODEBUGFLAGS"):
|
||||
env.Replace(PIODEBUGFLAGS=["-Og", "-g3", "-ggdb3"])
|
||||
env.Append(
|
||||
BUILD_FLAGS=env.get("PIODEBUGFLAGS", []),
|
||||
BUILD_UNFLAGS=["-Os", "-O0", "-O1", "-O2", "-O3"])
|
||||
env.Append(BUILD_FLAGS=list(env['PIODEBUGFLAGS']) +
|
||||
["-D__PLATFORMIO_BUILD_DEBUG__"])
|
||||
unflags = ["-Os"]
|
||||
for level in [0, 1, 2]:
|
||||
for flag in ("O", "g", "ggdb"):
|
||||
unflags.append("-%s%d" % (flag, level))
|
||||
env.Append(BUILD_UNFLAGS=unflags)
|
||||
|
||||
|
||||
def ProcessTest(env):
|
||||
env.Append(
|
||||
CPPDEFINES=["UNIT_TEST", "UNITY_INCLUDE_CONFIG_H"],
|
||||
CPPPATH=[join("$BUILD_DIR", "UnityTestLib")])
|
||||
unitylib = env.BuildLibrary(
|
||||
join("$BUILD_DIR", "UnityTestLib"), get_core_package_dir("tool-unity"))
|
||||
env.Append(CPPDEFINES=["UNIT_TEST", "UNITY_INCLUDE_CONFIG_H"],
|
||||
CPPPATH=[join("$BUILD_DIR", "UnityTestLib")])
|
||||
unitylib = env.BuildLibrary(join("$BUILD_DIR", "UnityTestLib"),
|
||||
get_core_package_dir("tool-unity"))
|
||||
env.Prepend(LIBS=[unitylib])
|
||||
|
||||
src_filter = ["+<*.cpp>", "+<*.c>"]
|
||||
if "PIOTEST" in env:
|
||||
src_filter.append("+<%s%s>" % (env['PIOTEST'], sep))
|
||||
if "PIOTEST_RUNNING_NAME" in env:
|
||||
src_filter.append("+<%s%s>" % (env['PIOTEST_RUNNING_NAME'], sep))
|
||||
env.Replace(PIOTEST_SRC_FILTER=src_filter)
|
||||
|
||||
return env.CollectBuildFiles(
|
||||
"$BUILDTEST_DIR",
|
||||
"$PROJECTTEST_DIR",
|
||||
"$PIOTEST_SRC_FILTER",
|
||||
duplicate=False)
|
||||
|
||||
|
||||
def GetPreExtraScripts(env):
|
||||
return [
|
||||
item[4:] for item in env.get("EXTRA_SCRIPTS", [])
|
||||
if item.startswith("pre:")
|
||||
]
|
||||
|
||||
|
||||
def GetPostExtraScripts(env):
|
||||
return [
|
||||
item[5:] if item.startswith("post:") else item
|
||||
for item in env.get("EXTRA_SCRIPTS", []) if not item.startswith("pre:")
|
||||
]
|
||||
def GetExtraScripts(env, scope):
|
||||
items = []
|
||||
for item in env.GetProjectOption("extra_scripts", []):
|
||||
if scope == "post" and ":" not in item:
|
||||
items.append(item)
|
||||
elif item.startswith("%s:" % scope):
|
||||
items.append(item[len(scope) + 1:])
|
||||
if not items:
|
||||
return items
|
||||
with fs.cd(env.subst("$PROJECT_DIR")):
|
||||
return [realpath(item) for item in items]
|
||||
|
||||
|
||||
def exists(_):
|
||||
@@ -328,6 +349,5 @@ def generate(env):
|
||||
env.AddMethod(PioClean)
|
||||
env.AddMethod(ProcessDebug)
|
||||
env.AddMethod(ProcessTest)
|
||||
env.AddMethod(GetPreExtraScripts)
|
||||
env.AddMethod(GetPostExtraScripts)
|
||||
env.AddMethod(GetExtraScripts)
|
||||
return env
|
||||
|
||||
@@ -17,29 +17,30 @@ from __future__ import absolute_import
|
||||
import sys
|
||||
from os.path import isdir, isfile, join
|
||||
|
||||
from SCons.Script import COMMAND_LINE_TARGETS
|
||||
from SCons.Script import ARGUMENTS # pylint: disable=import-error
|
||||
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
|
||||
|
||||
from platformio import exception, util
|
||||
from platformio import exception, fs, util
|
||||
from platformio.compat import WINDOWS
|
||||
from platformio.managers.platform import PlatformFactory
|
||||
from platformio.project.config import ProjectOptions
|
||||
|
||||
# pylint: disable=too-many-branches, too-many-locals
|
||||
|
||||
|
||||
@util.memoized
|
||||
def initPioPlatform(name):
|
||||
return PlatformFactory.newPlatform(name)
|
||||
|
||||
|
||||
@util.memoized()
|
||||
def PioPlatform(env):
|
||||
variables = {}
|
||||
for name in env['PIOVARIABLES']:
|
||||
if name in env:
|
||||
variables[name.lower()] = env[name]
|
||||
p = initPioPlatform(env['PLATFORM_MANIFEST'])
|
||||
variables = env.GetProjectOptions(as_dict=True)
|
||||
if "framework" in variables:
|
||||
# support PIO Core 3.0 dev/platforms
|
||||
variables['pioframework'] = variables['framework']
|
||||
p = PlatformFactory.newPlatform(env['PLATFORM_MANIFEST'])
|
||||
p.configure_default_packages(variables, COMMAND_LINE_TARGETS)
|
||||
return p
|
||||
|
||||
|
||||
def BoardConfig(env, board=None):
|
||||
p = initPioPlatform(env['PLATFORM_MANIFEST'])
|
||||
p = env.PioPlatform()
|
||||
try:
|
||||
board = board or env.get("BOARD")
|
||||
assert board, "BoardConfig: Board is not defined"
|
||||
@@ -59,22 +60,29 @@ def GetFrameworkScript(env, framework):
|
||||
return script_path
|
||||
|
||||
|
||||
def LoadPioPlatform(env, variables):
|
||||
def LoadPioPlatform(env):
|
||||
p = env.PioPlatform()
|
||||
installed_packages = p.get_installed_packages()
|
||||
|
||||
# Ensure real platform name
|
||||
env['PIOPLATFORM'] = p.name
|
||||
|
||||
# Add toolchains and uploaders to $PATH
|
||||
# Add toolchains and uploaders to $PATH and $*_LIBRARY_PATH
|
||||
systype = util.get_systype()
|
||||
for name in installed_packages:
|
||||
type_ = p.get_package_type(name)
|
||||
if type_ not in ("toolchain", "uploader"):
|
||||
if type_ not in ("toolchain", "uploader", "debugger"):
|
||||
continue
|
||||
path = p.get_package_dir(name)
|
||||
if isdir(join(path, "bin")):
|
||||
path = join(path, "bin")
|
||||
env.PrependENVPath("PATH", path)
|
||||
pkg_dir = p.get_package_dir(name)
|
||||
env.PrependENVPath(
|
||||
"PATH",
|
||||
join(pkg_dir, "bin") if isdir(join(pkg_dir, "bin")) else pkg_dir)
|
||||
if (not WINDOWS and isdir(join(pkg_dir, "lib"))
|
||||
and type_ != "toolchain"):
|
||||
env.PrependENVPath(
|
||||
"DYLD_LIBRARY_PATH"
|
||||
if "darwin" in systype else "LD_LIBRARY_PATH",
|
||||
join(pkg_dir, "lib"))
|
||||
|
||||
# Platform specific LD Scripts
|
||||
if isdir(join(p.get_dir(), "ldscripts")):
|
||||
@@ -83,69 +91,119 @@ def LoadPioPlatform(env, variables):
|
||||
if "BOARD" not in env:
|
||||
return
|
||||
|
||||
# update board manifest with overridden data from INI config
|
||||
board_config = env.BoardConfig()
|
||||
for k in variables.keys():
|
||||
if k in env or \
|
||||
not any([k.startswith("BOARD_"), k.startswith("UPLOAD_")]):
|
||||
for option, value in env.GetProjectOptions():
|
||||
if option.startswith("board_"):
|
||||
board_config.update(option.lower()[6:], value)
|
||||
|
||||
# load default variables from board config
|
||||
for option_meta in ProjectOptions.values():
|
||||
if not option_meta.buildenvvar or option_meta.buildenvvar in env:
|
||||
continue
|
||||
_opt, _val = k.lower().split("_", 1)
|
||||
if _opt == "board":
|
||||
_opt = "build"
|
||||
if _val in board_config.get(_opt):
|
||||
env.Replace(**{k: board_config.get("%s.%s" % (_opt, _val))})
|
||||
data_path = (option_meta.name[6:]
|
||||
if option_meta.name.startswith("board_") else
|
||||
option_meta.name.replace("_", "."))
|
||||
try:
|
||||
env[option_meta.buildenvvar] = board_config.get(data_path)
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
if "build.ldscript" in board_config:
|
||||
env.Replace(LDSCRIPT_PATH=board_config.get("build.ldscript"))
|
||||
|
||||
|
||||
def PrintConfiguration(env): # pylint: disable=too-many-branches
|
||||
platform_data = ["PLATFORM: %s >" % env.PioPlatform().title]
|
||||
system_data = ["SYSTEM:"]
|
||||
mcu = env.subst("$BOARD_MCU")
|
||||
f_cpu = env.subst("$BOARD_F_CPU")
|
||||
if mcu:
|
||||
system_data.append(mcu.upper())
|
||||
if f_cpu:
|
||||
f_cpu = int("".join([c for c in str(f_cpu) if c.isdigit()]))
|
||||
system_data.append("%dMHz" % (f_cpu / 1000000))
|
||||
def PrintConfiguration(env): # pylint: disable=too-many-statements
|
||||
platform = env.PioPlatform()
|
||||
board_config = env.BoardConfig() if "BOARD" in env else None
|
||||
|
||||
debug_tools = None
|
||||
if "BOARD" in env:
|
||||
board_config = env.BoardConfig()
|
||||
platform_data.append(board_config.get("name"))
|
||||
def _get_configuration_data():
|
||||
return None if not board_config else [
|
||||
"CONFIGURATION:",
|
||||
"https://docs.platformio.org/page/boards/%s/%s.html" %
|
||||
(platform.name, board_config.id)
|
||||
]
|
||||
|
||||
debug_tools = board_config.get("debug", {}).get("tools")
|
||||
def _get_plaform_data():
|
||||
data = ["PLATFORM: %s %s" % (platform.title, platform.version)]
|
||||
src_manifest_path = platform.pm.get_src_manifest_path(
|
||||
platform.get_dir())
|
||||
if src_manifest_path:
|
||||
src_manifest = fs.load_json(src_manifest_path)
|
||||
if "version" in src_manifest:
|
||||
data.append("#" + src_manifest['version'])
|
||||
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
data.append("(%s)" % src_manifest['url'])
|
||||
if board_config:
|
||||
data.extend([">", board_config.get("name")])
|
||||
return data
|
||||
|
||||
def _get_hardware_data():
|
||||
data = ["HARDWARE:"]
|
||||
mcu = env.subst("$BOARD_MCU")
|
||||
f_cpu = env.subst("$BOARD_F_CPU")
|
||||
if mcu:
|
||||
data.append(mcu.upper())
|
||||
if f_cpu:
|
||||
f_cpu = int("".join([c for c in str(f_cpu) if c.isdigit()]))
|
||||
data.append("%dMHz," % (f_cpu / 1000000))
|
||||
if not board_config:
|
||||
return data
|
||||
ram = board_config.get("upload", {}).get("maximum_ram_size")
|
||||
flash = board_config.get("upload", {}).get("maximum_size")
|
||||
system_data.append("%s RAM (%s Flash)" % (util.format_filesize(ram),
|
||||
util.format_filesize(flash)))
|
||||
data.append("%s RAM, %s Flash" %
|
||||
(fs.format_filesize(ram), fs.format_filesize(flash)))
|
||||
return data
|
||||
|
||||
if platform_data:
|
||||
print " ".join(platform_data)
|
||||
if system_data:
|
||||
print " ".join(system_data)
|
||||
def _get_debug_data():
|
||||
debug_tools = board_config.get(
|
||||
"debug", {}).get("tools") if board_config else None
|
||||
if not debug_tools:
|
||||
return None
|
||||
data = [
|
||||
"DEBUG:", "Current",
|
||||
"(%s)" % board_config.get_debug_tool_name(
|
||||
env.GetProjectOption("debug_tool"))
|
||||
]
|
||||
onboard = []
|
||||
external = []
|
||||
for key, value in debug_tools.items():
|
||||
if value.get("onboard"):
|
||||
onboard.append(key)
|
||||
else:
|
||||
external.append(key)
|
||||
if onboard:
|
||||
data.extend(["On-board", "(%s)" % ", ".join(sorted(onboard))])
|
||||
if external:
|
||||
data.extend(["External", "(%s)" % ", ".join(sorted(external))])
|
||||
return data
|
||||
|
||||
# Debugging
|
||||
if not debug_tools:
|
||||
return
|
||||
def _get_packages_data():
|
||||
data = []
|
||||
for name, options in platform.packages.items():
|
||||
if options.get("optional"):
|
||||
continue
|
||||
pkg_dir = platform.get_package_dir(name)
|
||||
if not pkg_dir:
|
||||
continue
|
||||
manifest = platform.pm.load_manifest(pkg_dir)
|
||||
original_version = util.get_original_version(manifest['version'])
|
||||
info = "%s %s" % (manifest['name'], manifest['version'])
|
||||
extra = []
|
||||
if original_version:
|
||||
extra.append(original_version)
|
||||
if "__src_url" in manifest and int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
extra.append(manifest['__src_url'])
|
||||
if extra:
|
||||
info += " (%s)" % ", ".join(extra)
|
||||
data.append(info)
|
||||
return ["PACKAGES:", ", ".join(data)]
|
||||
|
||||
data = [
|
||||
"CURRENT(%s)" % board_config.get_debug_tool_name(
|
||||
env.subst("$DEBUG_TOOL"))
|
||||
]
|
||||
onboard = []
|
||||
external = []
|
||||
for key, value in debug_tools.items():
|
||||
if value.get("onboard"):
|
||||
onboard.append(key)
|
||||
else:
|
||||
external.append(key)
|
||||
if onboard:
|
||||
data.append("ON-BOARD(%s)" % ", ".join(sorted(onboard)))
|
||||
if external:
|
||||
data.append("EXTERNAL(%s)" % ", ".join(sorted(external)))
|
||||
|
||||
print "DEBUG: %s" % " ".join(data)
|
||||
for data in (_get_configuration_data(), _get_plaform_data(),
|
||||
_get_hardware_data(), _get_debug_data(),
|
||||
_get_packages_data()):
|
||||
if data and len(data) > 1:
|
||||
print(" ".join(data))
|
||||
|
||||
|
||||
def exists(_):
|
||||
|
||||
49
platformio/builder/tools/pioproject.py
Normal file
49
platformio/builder/tools/pioproject.py
Normal file
@@ -0,0 +1,49 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from platformio.project.config import ProjectConfig, ProjectOptions
|
||||
|
||||
|
||||
def GetProjectConfig(env):
|
||||
return ProjectConfig.get_instance(env['PROJECT_CONFIG'])
|
||||
|
||||
|
||||
def GetProjectOptions(env, as_dict=False):
|
||||
return env.GetProjectConfig().items(env=env['PIOENV'], as_dict=as_dict)
|
||||
|
||||
|
||||
def GetProjectOption(env, option, default=None):
|
||||
return env.GetProjectConfig().get("env:" + env['PIOENV'], option, default)
|
||||
|
||||
|
||||
def LoadProjectOptions(env):
|
||||
for option, value in env.GetProjectOptions():
|
||||
option_meta = ProjectOptions.get("env." + option)
|
||||
if not option_meta or not option_meta.buildenvvar:
|
||||
continue
|
||||
env[option_meta.buildenvvar] = value
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(GetProjectConfig)
|
||||
env.AddMethod(GetProjectOptions)
|
||||
env.AddMethod(GetProjectOption)
|
||||
env.AddMethod(LoadProjectOptions)
|
||||
return env
|
||||
@@ -14,6 +14,7 @@
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import re
|
||||
import sys
|
||||
from fnmatch import fnmatch
|
||||
from os import environ
|
||||
@@ -21,10 +22,14 @@ from os.path import isfile, join
|
||||
from shutil import copyfile
|
||||
from time import sleep
|
||||
|
||||
from SCons.Node.Alias import Alias
|
||||
from SCons.Script import ARGUMENTS # pylint: disable=import-error
|
||||
from serial import Serial, SerialException
|
||||
|
||||
from platformio import util
|
||||
from platformio import exception, fs, util
|
||||
from platformio.compat import WINDOWS
|
||||
from platformio.proc import exec_command
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
|
||||
|
||||
def FlushSerialBuffer(env, port):
|
||||
@@ -40,18 +45,18 @@ def FlushSerialBuffer(env, port):
|
||||
|
||||
def TouchSerialPort(env, port, baudrate):
|
||||
port = env.subst(port)
|
||||
print "Forcing reset using %dbps open/close on port %s" % (baudrate, port)
|
||||
print("Forcing reset using %dbps open/close on port %s" % (baudrate, port))
|
||||
try:
|
||||
s = Serial(port=port, baudrate=baudrate)
|
||||
s.setDTR(False)
|
||||
s.close()
|
||||
except: # pylint: disable=W0702
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
sleep(0.4) # DO NOT REMOVE THAT (required by SAM-BA based boards)
|
||||
|
||||
|
||||
def WaitForNewSerialPort(env, before):
|
||||
print "Waiting for the new upload port..."
|
||||
print("Waiting for the new upload port...")
|
||||
prev_port = env.subst("$UPLOAD_PORT")
|
||||
new_port = None
|
||||
elapsed = 0
|
||||
@@ -88,7 +93,7 @@ def WaitForNewSerialPort(env, before):
|
||||
return new_port
|
||||
|
||||
|
||||
def AutodetectUploadPort(*args, **kwargs): # pylint: disable=unused-argument
|
||||
def AutodetectUploadPort(*args, **kwargs):
|
||||
env = args[0]
|
||||
|
||||
def _get_pattern():
|
||||
@@ -130,10 +135,11 @@ def AutodetectUploadPort(*args, **kwargs): # pylint: disable=unused-argument
|
||||
if not _is_match_pattern(item['port']):
|
||||
continue
|
||||
port = item['port']
|
||||
if upload_protocol.startswith("blackmagic") \
|
||||
and "GDB" in item['description']:
|
||||
return ("\\\\.\\%s" % port if "windows" in util.get_systype()
|
||||
and port.startswith("COM") and len(port) > 4 else port)
|
||||
if upload_protocol.startswith("blackmagic"):
|
||||
if WINDOWS and port.startswith("COM") and len(port) > 4:
|
||||
port = "\\\\.\\%s" % port
|
||||
if "GDB" in item['description']:
|
||||
return port
|
||||
for hwid in board_hwids:
|
||||
hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "")
|
||||
if hwid_str in item['hwid']:
|
||||
@@ -141,7 +147,7 @@ def AutodetectUploadPort(*args, **kwargs): # pylint: disable=unused-argument
|
||||
return port
|
||||
|
||||
if "UPLOAD_PORT" in env and not _get_pattern():
|
||||
print env.subst("Use manually specified: $UPLOAD_PORT")
|
||||
print(env.subst("Use manually specified: $UPLOAD_PORT"))
|
||||
return
|
||||
|
||||
if (env.subst("$UPLOAD_PROTOCOL") == "mbed"
|
||||
@@ -149,19 +155,14 @@ def AutodetectUploadPort(*args, **kwargs): # pylint: disable=unused-argument
|
||||
and not env.subst("$UPLOAD_PROTOCOL"))):
|
||||
env.Replace(UPLOAD_PORT=_look_for_mbed_disk())
|
||||
else:
|
||||
if ("linux" in util.get_systype() and not any([
|
||||
isfile("/etc/udev/rules.d/99-platformio-udev.rules"),
|
||||
isfile("/lib/udev/rules.d/99-platformio-udev.rules")
|
||||
])):
|
||||
sys.stderr.write(
|
||||
"\nWarning! Please install `99-platformio-udev.rules` and "
|
||||
"check that your board's PID and VID are listed in the rules."
|
||||
"\n http://docs.platformio.org/en/latest/faq.html"
|
||||
"#platformio-udev-rules\n")
|
||||
try:
|
||||
fs.ensure_udev_rules()
|
||||
except exception.InvalidUdevRules as e:
|
||||
sys.stderr.write("\n%s\n\n" % e)
|
||||
env.Replace(UPLOAD_PORT=_look_for_serial_port())
|
||||
|
||||
if env.subst("$UPLOAD_PORT"):
|
||||
print env.subst("Auto-detected: $UPLOAD_PORT")
|
||||
print(env.subst("Auto-detected: $UPLOAD_PORT"))
|
||||
else:
|
||||
sys.stderr.write(
|
||||
"Error: Please specify `upload_port` for environment or use "
|
||||
@@ -171,7 +172,7 @@ def AutodetectUploadPort(*args, **kwargs): # pylint: disable=unused-argument
|
||||
env.Exit(1)
|
||||
|
||||
|
||||
def UploadToDisk(_, target, source, env): # pylint: disable=W0613,W0621
|
||||
def UploadToDisk(_, target, source, env):
|
||||
assert "UPLOAD_PORT" in env
|
||||
progname = env.subst("$PROGNAME")
|
||||
for ext in ("bin", "hex"):
|
||||
@@ -180,36 +181,90 @@ def UploadToDisk(_, target, source, env): # pylint: disable=W0613,W0621
|
||||
continue
|
||||
copyfile(fpath,
|
||||
join(env.subst("$UPLOAD_PORT"), "%s.%s" % (progname, ext)))
|
||||
print "Firmware has been successfully uploaded.\n"\
|
||||
"(Some boards may require manual hard reset)"
|
||||
print("Firmware has been successfully uploaded.\n"
|
||||
"(Some boards may require manual hard reset)")
|
||||
|
||||
|
||||
def CheckUploadSize(_, target, source, env): # pylint: disable=W0613,W0621
|
||||
if "BOARD" not in env:
|
||||
return
|
||||
max_size = int(env.BoardConfig().get("upload.maximum_size", 0))
|
||||
if max_size == 0 or "SIZETOOL" not in env:
|
||||
return
|
||||
|
||||
sysenv = environ.copy()
|
||||
sysenv['PATH'] = str(env['ENV']['PATH'])
|
||||
cmd = [
|
||||
env.subst("$SIZETOOL"), "-B",
|
||||
str(source[0] if isinstance(target[0], Alias) else target[0])
|
||||
def CheckUploadSize(_, target, source, env):
|
||||
check_conditions = [
|
||||
env.get("BOARD"),
|
||||
env.get("SIZETOOL") or env.get("SIZECHECKCMD")
|
||||
]
|
||||
result = util.exec_command(cmd, env=sysenv)
|
||||
if result['returncode'] != 0:
|
||||
if not all(check_conditions):
|
||||
return
|
||||
program_max_size = int(env.BoardConfig().get("upload.maximum_size", 0))
|
||||
data_max_size = int(env.BoardConfig().get("upload.maximum_ram_size", 0))
|
||||
if program_max_size == 0:
|
||||
return
|
||||
print result['out'].strip()
|
||||
|
||||
line = result['out'].strip().splitlines()[1]
|
||||
values = [v.strip() for v in line.split("\t")]
|
||||
used_size = int(values[0]) + int(values[1])
|
||||
def _configure_defaults():
|
||||
env.Replace(SIZECHECKCMD="$SIZETOOL -B -d $SOURCES",
|
||||
SIZEPROGREGEXP=r"^(\d+)\s+(\d+)\s+\d+\s",
|
||||
SIZEDATAREGEXP=r"^\d+\s+(\d+)\s+(\d+)\s+\d+")
|
||||
|
||||
if used_size > max_size:
|
||||
def _get_size_output():
|
||||
cmd = env.get("SIZECHECKCMD")
|
||||
if not cmd:
|
||||
return None
|
||||
if not isinstance(cmd, list):
|
||||
cmd = cmd.split()
|
||||
cmd = [arg.replace("$SOURCES", str(source[0])) for arg in cmd if arg]
|
||||
sysenv = environ.copy()
|
||||
sysenv['PATH'] = str(env['ENV']['PATH'])
|
||||
result = exec_command(env.subst(cmd), env=sysenv)
|
||||
if result['returncode'] != 0:
|
||||
return None
|
||||
return result['out'].strip()
|
||||
|
||||
def _calculate_size(output, pattern):
|
||||
if not output or not pattern:
|
||||
return -1
|
||||
size = 0
|
||||
regexp = re.compile(pattern)
|
||||
for line in output.split("\n"):
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
match = regexp.search(line)
|
||||
if not match:
|
||||
continue
|
||||
size += sum(int(value) for value in match.groups())
|
||||
return size
|
||||
|
||||
def _format_availale_bytes(value, total):
|
||||
percent_raw = float(value) / float(total)
|
||||
blocks_per_progress = 10
|
||||
used_blocks = int(round(blocks_per_progress * percent_raw))
|
||||
if used_blocks > blocks_per_progress:
|
||||
used_blocks = blocks_per_progress
|
||||
return "[{:{}}] {: 6.1%} (used {:d} bytes from {:d} bytes)".format(
|
||||
"=" * used_blocks, blocks_per_progress, percent_raw, value, total)
|
||||
|
||||
if not env.get("SIZECHECKCMD") and not env.get("SIZEPROGREGEXP"):
|
||||
_configure_defaults()
|
||||
output = _get_size_output()
|
||||
program_size = _calculate_size(output, env.get("SIZEPROGREGEXP"))
|
||||
data_size = _calculate_size(output, env.get("SIZEDATAREGEXP"))
|
||||
|
||||
print("Memory Usage -> http://bit.ly/pio-memory-usage")
|
||||
if data_max_size and data_size > -1:
|
||||
print("DATA: %s" % _format_availale_bytes(data_size, data_max_size))
|
||||
if program_size > -1:
|
||||
print("PROGRAM: %s" %
|
||||
_format_availale_bytes(program_size, program_max_size))
|
||||
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
print(output)
|
||||
|
||||
# raise error
|
||||
# if data_max_size and data_size > data_max_size:
|
||||
# sys.stderr.write(
|
||||
# "Error: The data size (%d bytes) is greater "
|
||||
# "than maximum allowed (%s bytes)\n" % (data_size, data_max_size))
|
||||
# env.Exit(1)
|
||||
if program_size > program_max_size:
|
||||
sys.stderr.write("Error: The program size (%d bytes) is greater "
|
||||
"than maximum allowed (%s bytes)\n" % (used_size,
|
||||
max_size))
|
||||
"than maximum allowed (%s bytes)\n" %
|
||||
(program_size, program_max_size))
|
||||
env.Exit(1)
|
||||
|
||||
|
||||
@@ -217,12 +272,12 @@ def PrintUploadInfo(env):
|
||||
configured = env.subst("$UPLOAD_PROTOCOL")
|
||||
available = [configured] if configured else []
|
||||
if "BOARD" in env:
|
||||
available.extend(env.BoardConfig().get("upload", {}).get(
|
||||
"protocols", []))
|
||||
available.extend(env.BoardConfig().get("upload",
|
||||
{}).get("protocols", []))
|
||||
if available:
|
||||
print "AVAILABLE: %s" % ", ".join(sorted(available))
|
||||
print("AVAILABLE: %s" % ", ".join(sorted(set(available))))
|
||||
if configured:
|
||||
print "CURRENT: upload_protocol = %s" % configured
|
||||
print("CURRENT: upload_protocol = %s" % configured)
|
||||
|
||||
|
||||
def exists(_):
|
||||
|
||||
@@ -12,10 +12,13 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from hashlib import md5
|
||||
from os import makedirs
|
||||
from os.path import isdir, isfile, join
|
||||
from platform import system
|
||||
|
||||
from platformio.compat import WINDOWS, hashlib_encode_data
|
||||
|
||||
# Windows CLI has limit with command length to 8192
|
||||
# Leave 2000 chars for flags and other options
|
||||
@@ -58,7 +61,8 @@ def _file_long_data(env, data):
|
||||
build_dir = env.subst("$BUILD_DIR")
|
||||
if not isdir(build_dir):
|
||||
makedirs(build_dir)
|
||||
tmp_file = join(build_dir, "longcmd-%s" % md5(data).hexdigest())
|
||||
tmp_file = join(build_dir,
|
||||
"longcmd-%s" % md5(hashlib_encode_data(data)).hexdigest())
|
||||
if isfile(tmp_file):
|
||||
return tmp_file
|
||||
with open(tmp_file, "w") as fp:
|
||||
@@ -71,7 +75,7 @@ def exists(_):
|
||||
|
||||
|
||||
def generate(env):
|
||||
if system() != "Windows":
|
||||
if not WINDOWS:
|
||||
return None
|
||||
|
||||
env.Replace(_long_sources_hook=long_sources_hook)
|
||||
|
||||
@@ -14,34 +14,72 @@
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
from glob import glob
|
||||
from os import sep, walk
|
||||
from os.path import basename, dirname, isdir, join, realpath
|
||||
|
||||
from SCons import Action, Builder, Util
|
||||
from SCons.Script import (COMMAND_LINE_TARGETS, AlwaysBuild,
|
||||
DefaultEnvironment, SConscript)
|
||||
from SCons import Builder, Util # pylint: disable=import-error
|
||||
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
|
||||
from SCons.Script import AlwaysBuild # pylint: disable=import-error
|
||||
from SCons.Script import DefaultEnvironment # pylint: disable=import-error
|
||||
from SCons.Script import Export # pylint: disable=import-error
|
||||
from SCons.Script import SConscript # pylint: disable=import-error
|
||||
|
||||
from platformio.util import glob_escape, pioversion_to_intstr
|
||||
from platformio import fs
|
||||
from platformio.compat import string_types
|
||||
from platformio.util import pioversion_to_intstr
|
||||
|
||||
SRC_HEADER_EXT = ["h", "hpp"]
|
||||
SRC_C_EXT = ["c", "cc", "cpp"]
|
||||
SRC_BUILD_EXT = SRC_C_EXT + ["S", "spp", "SPP", "sx", "s", "asm", "ASM"]
|
||||
SRC_FILTER_DEFAULT = ["+<*>", "-<.git%s>" % sep, "-<svn%s>" % sep]
|
||||
SRC_FILTER_DEFAULT = ["+<*>", "-<.git%s>" % os.sep, "-<.svn%s>" % os.sep]
|
||||
|
||||
|
||||
def scons_patched_match_splitext(path, suffixes=None):
|
||||
"""
|
||||
Patch SCons Builder, append $OBJSUFFIX to the end of each target
|
||||
"""
|
||||
"""Patch SCons Builder, append $OBJSUFFIX to the end of each target"""
|
||||
tokens = Util.splitext(path)
|
||||
if suffixes and tokens[1] and tokens[1] in suffixes:
|
||||
return (path, tokens[1])
|
||||
return tokens
|
||||
|
||||
|
||||
def _build_project_deps(env):
|
||||
project_lib_builder = env.ConfigureProjectLibBuilder()
|
||||
|
||||
# prepend project libs to the beginning of list
|
||||
env.Prepend(LIBS=project_lib_builder.build())
|
||||
# prepend extra linker related options from libs
|
||||
env.PrependUnique(
|
||||
**{
|
||||
key: project_lib_builder.env.get(key)
|
||||
for key in ("LIBS", "LIBPATH", "LINKFLAGS")
|
||||
if project_lib_builder.env.get(key)
|
||||
})
|
||||
|
||||
projenv = env.Clone()
|
||||
|
||||
# CPPPATH from dependencies
|
||||
projenv.PrependUnique(CPPPATH=project_lib_builder.env.get("CPPPATH"))
|
||||
# extra build flags from `platformio.ini`
|
||||
projenv.ProcessFlags(env.get("SRC_BUILD_FLAGS"))
|
||||
|
||||
is_test = "__test" in COMMAND_LINE_TARGETS
|
||||
if is_test:
|
||||
projenv.BuildSources("$BUILDTEST_DIR", "$PROJECTTEST_DIR",
|
||||
"$PIOTEST_SRC_FILTER")
|
||||
if not is_test or env.GetProjectOption("test_build_project_src", False):
|
||||
projenv.BuildSources("$BUILDSRC_DIR", "$PROJECTSRC_DIR",
|
||||
env.get("SRC_FILTER"))
|
||||
|
||||
if not env.get("PIOBUILDFILES") and not COMMAND_LINE_TARGETS:
|
||||
sys.stderr.write(
|
||||
"Error: Nothing to build. Please put your source code files "
|
||||
"to '%s' folder\n" % env.subst("$PROJECTSRC_DIR"))
|
||||
env.Exit(1)
|
||||
|
||||
Export("projenv")
|
||||
|
||||
|
||||
def BuildProgram(env):
|
||||
|
||||
def _append_pio_macros():
|
||||
@@ -57,14 +95,14 @@ def BuildProgram(env):
|
||||
if not Util.case_sensitive_suffixes(".s", ".S"):
|
||||
env.Replace(AS="$CC", ASCOM="$ASPPCOM")
|
||||
|
||||
if "__debug" in COMMAND_LINE_TARGETS:
|
||||
if ("debug" in COMMAND_LINE_TARGETS
|
||||
or env.GetProjectOption("build_type") == "debug"):
|
||||
env.ProcessDebug()
|
||||
|
||||
# process extra flags from board
|
||||
if "BOARD" in env and "build.extra_flags" in env.BoardConfig():
|
||||
env.ProcessFlags(env.BoardConfig().get("build.extra_flags"))
|
||||
# remove base flags
|
||||
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
|
||||
|
||||
# apply user flags
|
||||
env.ProcessFlags(env.get("BUILD_FLAGS"))
|
||||
|
||||
@@ -74,151 +112,127 @@ def BuildProgram(env):
|
||||
# restore PIO macros if it was deleted by framework
|
||||
_append_pio_macros()
|
||||
|
||||
# build dependent libs; place them before built-in libs
|
||||
env.Prepend(LIBS=env.BuildProjectLibraries())
|
||||
# remove specified flags
|
||||
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
|
||||
|
||||
# append specified LD_SCRIPT
|
||||
if ("LDSCRIPT_PATH" in env
|
||||
if "__test" in COMMAND_LINE_TARGETS:
|
||||
env.ProcessTest()
|
||||
|
||||
# build project with dependencies
|
||||
_build_project_deps(env)
|
||||
|
||||
# append into the beginning a main LD script
|
||||
if (env.get("LDSCRIPT_PATH")
|
||||
and not any("-Wl,-T" in f for f in env['LINKFLAGS'])):
|
||||
env.Append(LINKFLAGS=['-Wl,-T"$LDSCRIPT_PATH"'])
|
||||
env.Prepend(LINKFLAGS=["-T", "$LDSCRIPT_PATH"])
|
||||
|
||||
# enable "cyclic reference" for linker
|
||||
if env.get("LIBS") and env.GetCompilerType() == "gcc":
|
||||
env.Prepend(_LIBFLAGS="-Wl,--start-group ")
|
||||
env.Append(_LIBFLAGS=" -Wl,--end-group")
|
||||
|
||||
# Handle SRC_BUILD_FLAGS
|
||||
env.ProcessFlags(env.get("SRC_BUILD_FLAGS"))
|
||||
program = env.Program(join("$BUILD_DIR", env.subst("$PROGNAME")),
|
||||
env['PIOBUILDFILES'])
|
||||
env.Replace(PIOMAINPROG=program)
|
||||
|
||||
env.Append(
|
||||
LIBPATH=["$BUILD_DIR"],
|
||||
PIOBUILDFILES=env.CollectBuildFiles(
|
||||
"$BUILDSRC_DIR",
|
||||
"$PROJECTSRC_DIR",
|
||||
src_filter=env.get("SRC_FILTER"),
|
||||
duplicate=False))
|
||||
|
||||
if "__test" in COMMAND_LINE_TARGETS:
|
||||
env.Append(PIOBUILDFILES=env.ProcessTest())
|
||||
|
||||
if not env['PIOBUILDFILES'] and not COMMAND_LINE_TARGETS:
|
||||
sys.stderr.write(
|
||||
"Error: Nothing to build. Please put your source code files "
|
||||
"to '%s' folder\n" % env.subst("$PROJECTSRC_DIR"))
|
||||
env.Exit(1)
|
||||
|
||||
program = env.Program(
|
||||
join("$BUILD_DIR", env.subst("$PROGNAME")), env['PIOBUILDFILES'])
|
||||
|
||||
checksize_action = Action.Action(env.CheckUploadSize,
|
||||
"Checking program size")
|
||||
AlwaysBuild(env.Alias("checkprogsize", program, checksize_action))
|
||||
if set(["upload", "program"]) & set(COMMAND_LINE_TARGETS):
|
||||
env.AddPostAction(program, checksize_action)
|
||||
AlwaysBuild(
|
||||
env.Alias(
|
||||
"checkprogsize", program,
|
||||
env.VerboseAction(env.CheckUploadSize,
|
||||
"Checking size $PIOMAINPROG")))
|
||||
|
||||
return program
|
||||
|
||||
|
||||
def ParseFlagsExtended(env, flags): # pylint: disable=too-many-branches
|
||||
if not isinstance(flags, list):
|
||||
flags = [flags]
|
||||
result = {}
|
||||
for raw in flags:
|
||||
for key, value in env.ParseFlags(str(raw)).items():
|
||||
if key not in result:
|
||||
result[key] = []
|
||||
result[key].extend(value)
|
||||
|
||||
cppdefines = []
|
||||
for item in result['CPPDEFINES']:
|
||||
if not Util.is_Sequence(item):
|
||||
cppdefines.append(item)
|
||||
continue
|
||||
name, value = item[:2]
|
||||
if '\"' in value:
|
||||
value = value.replace('\"', '\\\"')
|
||||
elif value.isdigit():
|
||||
value = int(value)
|
||||
elif value.replace(".", "", 1).isdigit():
|
||||
value = float(value)
|
||||
cppdefines.append((name, value))
|
||||
result['CPPDEFINES'] = cppdefines
|
||||
|
||||
# fix relative CPPPATH & LIBPATH
|
||||
for k in ("CPPPATH", "LIBPATH"):
|
||||
for i, p in enumerate(result.get(k, [])):
|
||||
if isdir(p):
|
||||
result[k][i] = realpath(p)
|
||||
|
||||
# fix relative path for "-include"
|
||||
for i, f in enumerate(result.get("CCFLAGS", [])):
|
||||
if isinstance(f, tuple) and f[0] == "-include":
|
||||
result['CCFLAGS'][i] = (f[0], env.File(realpath(f[1].get_path())))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def ProcessFlags(env, flags): # pylint: disable=too-many-branches
|
||||
if not flags:
|
||||
return
|
||||
if isinstance(flags, list):
|
||||
flags = " ".join(flags)
|
||||
parsed_flags = env.ParseFlags(str(flags))
|
||||
for flag in parsed_flags.pop("CPPDEFINES"):
|
||||
if not Util.is_Sequence(flag):
|
||||
env.Append(CPPDEFINES=flag)
|
||||
continue
|
||||
_key, _value = flag[:2]
|
||||
if '\"' in _value:
|
||||
_value = _value.replace('\"', '\\\"')
|
||||
elif _value.isdigit():
|
||||
_value = int(_value)
|
||||
elif _value.replace(".", "", 1).isdigit():
|
||||
_value = float(_value)
|
||||
env.Append(CPPDEFINES=(_key, _value))
|
||||
env.Append(**parsed_flags)
|
||||
|
||||
# fix relative CPPPATH & LIBPATH
|
||||
for k in ("CPPPATH", "LIBPATH"):
|
||||
for i, p in enumerate(env.get(k, [])):
|
||||
if isdir(p):
|
||||
env[k][i] = realpath(p)
|
||||
# fix relative path for "-include"
|
||||
for i, f in enumerate(env.get("CCFLAGS", [])):
|
||||
if isinstance(f, tuple) and f[0] == "-include":
|
||||
env['CCFLAGS'][i] = (f[0], env.File(realpath(f[1].get_path())))
|
||||
env.Append(**env.ParseFlagsExtended(flags))
|
||||
|
||||
# Cancel any previous definition of name, either built in or
|
||||
# provided with a -D option // Issue #191
|
||||
# provided with a -U option // Issue #191
|
||||
undefines = [
|
||||
u for u in env.get("CCFLAGS", [])
|
||||
if isinstance(u, basestring) and u.startswith("-U")
|
||||
if isinstance(u, string_types) and u.startswith("-U")
|
||||
]
|
||||
if undefines:
|
||||
for undef in undefines:
|
||||
env['CCFLAGS'].remove(undef)
|
||||
if undef[2:] in env['CPPDEFINES']:
|
||||
env['CPPDEFINES'].remove(undef[2:])
|
||||
env.Append(_CPPDEFFLAGS=" %s" % " ".join(undefines))
|
||||
|
||||
|
||||
def ProcessUnFlags(env, flags):
|
||||
if not flags:
|
||||
return
|
||||
if isinstance(flags, list):
|
||||
flags = " ".join(flags)
|
||||
parsed_flags = env.ParseFlags(str(flags))
|
||||
parsed = env.ParseFlagsExtended(flags)
|
||||
|
||||
# get all flags and copy them to each "*FLAGS" variable
|
||||
all_flags = []
|
||||
for items in parsed_flags.values():
|
||||
all_flags.extend(items)
|
||||
all_flags = set(all_flags)
|
||||
for key, unflags in parsed.items():
|
||||
if key.endswith("FLAGS"):
|
||||
all_flags.extend(unflags)
|
||||
for key, unflags in parsed.items():
|
||||
if key.endswith("FLAGS"):
|
||||
parsed[key].extend(all_flags)
|
||||
|
||||
for key in parsed_flags:
|
||||
cur_flags = set(env.Flatten(env.get(key, [])))
|
||||
for item in cur_flags & all_flags:
|
||||
while item in env[key]:
|
||||
env[key].remove(item)
|
||||
|
||||
|
||||
def IsFileWithExt(env, file_, ext): # pylint: disable=W0613
|
||||
if basename(file_).startswith("."):
|
||||
return False
|
||||
for e in ext:
|
||||
if file_.endswith(".%s" % e):
|
||||
return True
|
||||
return False
|
||||
for key, unflags in parsed.items():
|
||||
for unflag in unflags:
|
||||
for current in env.get(key, []):
|
||||
conditions = [
|
||||
unflag == current,
|
||||
isinstance(current, (tuple, list))
|
||||
and unflag[0] == current[0]
|
||||
]
|
||||
if any(conditions):
|
||||
env[key].remove(current)
|
||||
|
||||
|
||||
def MatchSourceFiles(env, src_dir, src_filter=None):
|
||||
|
||||
SRC_FILTER_PATTERNS_RE = re.compile(r"(\+|\-)<([^>]+)>")
|
||||
|
||||
def _append_build_item(items, item, src_dir):
|
||||
if env.IsFileWithExt(item, SRC_BUILD_EXT + SRC_HEADER_EXT):
|
||||
items.add(item.replace(src_dir + sep, ""))
|
||||
|
||||
src_dir = env.subst(src_dir)
|
||||
src_filter = env.subst(src_filter) if src_filter else None
|
||||
src_filter = src_filter or SRC_FILTER_DEFAULT
|
||||
if isinstance(src_filter, (list, tuple)):
|
||||
src_filter = " ".join(src_filter)
|
||||
|
||||
matches = set()
|
||||
# correct fs directory separator
|
||||
src_filter = src_filter.replace("/", sep).replace("\\", sep)
|
||||
for (action, pattern) in SRC_FILTER_PATTERNS_RE.findall(src_filter):
|
||||
items = set()
|
||||
for item in glob(join(glob_escape(src_dir), pattern)):
|
||||
if isdir(item):
|
||||
for root, _, files in walk(item, followlinks=True):
|
||||
for f in files:
|
||||
_append_build_item(items, join(root, f), src_dir)
|
||||
else:
|
||||
_append_build_item(items, item, src_dir)
|
||||
if action == "+":
|
||||
matches |= items
|
||||
else:
|
||||
matches -= items
|
||||
return sorted(list(matches))
|
||||
return fs.match_src_files(env.subst(src_dir), src_filter,
|
||||
SRC_BUILD_EXT + SRC_HEADER_EXT)
|
||||
|
||||
|
||||
def CollectBuildFiles(env,
|
||||
@@ -230,7 +244,7 @@ def CollectBuildFiles(env,
|
||||
variants = []
|
||||
|
||||
src_dir = env.subst(src_dir)
|
||||
if src_dir.endswith(sep):
|
||||
if src_dir.endswith(os.sep):
|
||||
src_dir = src_dir[:-1]
|
||||
|
||||
for item in env.MatchSourceFiles(src_dir, src_filter):
|
||||
@@ -242,7 +256,7 @@ def CollectBuildFiles(env,
|
||||
variants.append(_var_dir)
|
||||
env.VariantDir(_var_dir, _src_dir, duplicate)
|
||||
|
||||
if env.IsFileWithExt(item, SRC_BUILD_EXT):
|
||||
if fs.path_endswith_ext(item, SRC_BUILD_EXT):
|
||||
sources.append(env.File(join(_var_dir, basename(item))))
|
||||
|
||||
return sources
|
||||
@@ -267,13 +281,14 @@ def BuildFrameworks(env, frameworks):
|
||||
env.Exit(1)
|
||||
|
||||
for f in frameworks:
|
||||
if f in ("arduino", "energia"):
|
||||
if f == "arduino":
|
||||
# Arduino IDE appends .o the end of filename
|
||||
Builder.match_splitext = scons_patched_match_splitext
|
||||
env.ConvertInoToCpp()
|
||||
if "nobuild" not in COMMAND_LINE_TARGETS:
|
||||
env.ConvertInoToCpp()
|
||||
|
||||
if f in board_frameworks:
|
||||
SConscript(env.GetFrameworkScript(f))
|
||||
SConscript(env.GetFrameworkScript(f), exports="env")
|
||||
else:
|
||||
sys.stderr.write(
|
||||
"Error: This board doesn't support %s framework!\n" % f)
|
||||
@@ -281,15 +296,16 @@ def BuildFrameworks(env, frameworks):
|
||||
|
||||
|
||||
def BuildLibrary(env, variant_dir, src_dir, src_filter=None):
|
||||
lib = env.Clone()
|
||||
return lib.StaticLibrary(
|
||||
lib.subst(variant_dir),
|
||||
lib.CollectBuildFiles(variant_dir, src_dir, src_filter))
|
||||
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
|
||||
return env.StaticLibrary(
|
||||
env.subst(variant_dir),
|
||||
env.CollectBuildFiles(variant_dir, src_dir, src_filter))
|
||||
|
||||
|
||||
def BuildSources(env, variant_dir, src_dir, src_filter=None):
|
||||
DefaultEnvironment().Append(PIOBUILDFILES=env.Clone().CollectBuildFiles(
|
||||
variant_dir, src_dir, src_filter))
|
||||
nodes = env.CollectBuildFiles(variant_dir, src_dir, src_filter)
|
||||
DefaultEnvironment().Append(
|
||||
PIOBUILDFILES=[env.Object(node) for node in nodes])
|
||||
|
||||
|
||||
def exists(_):
|
||||
@@ -298,9 +314,9 @@ def exists(_):
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(BuildProgram)
|
||||
env.AddMethod(ParseFlagsExtended)
|
||||
env.AddMethod(ProcessFlags)
|
||||
env.AddMethod(ProcessUnFlags)
|
||||
env.AddMethod(IsFileWithExt)
|
||||
env.AddMethod(MatchSourceFiles)
|
||||
env.AddMethod(CollectBuildFiles)
|
||||
env.AddMethod(BuildFrameworks)
|
||||
|
||||
@@ -11,3 +11,62 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
from os.path import dirname, isfile, join
|
||||
|
||||
import click
|
||||
|
||||
|
||||
class PlatformioCLI(click.MultiCommand):
|
||||
|
||||
leftover_args = []
|
||||
|
||||
@staticmethod
|
||||
def in_silence():
|
||||
args = PlatformioCLI.leftover_args
|
||||
return args and any([
|
||||
args[0] == "debug" and "--interpreter" in " ".join(args),
|
||||
args[0] == "upgrade", "--json-output" in args, "--version" in args
|
||||
])
|
||||
|
||||
def invoke(self, ctx):
|
||||
PlatformioCLI.leftover_args = ctx.args
|
||||
if hasattr(ctx, "protected_args"):
|
||||
PlatformioCLI.leftover_args = ctx.protected_args + ctx.args
|
||||
return super(PlatformioCLI, self).invoke(ctx)
|
||||
|
||||
def list_commands(self, ctx):
|
||||
cmds = []
|
||||
cmds_dir = dirname(__file__)
|
||||
for name in os.listdir(cmds_dir):
|
||||
if name.startswith("__init__"):
|
||||
continue
|
||||
if isfile(join(cmds_dir, name, "command.py")):
|
||||
cmds.append(name)
|
||||
elif name.endswith(".py"):
|
||||
cmds.append(name[:-3])
|
||||
cmds.sort()
|
||||
return cmds
|
||||
|
||||
def get_command(self, ctx, cmd_name):
|
||||
mod = None
|
||||
try:
|
||||
mod = __import__("platformio.commands." + cmd_name, None, None,
|
||||
["cli"])
|
||||
except ImportError:
|
||||
try:
|
||||
return self._handle_obsolate_command(cmd_name)
|
||||
except AttributeError:
|
||||
raise click.UsageError('No such command "%s"' % cmd_name, ctx)
|
||||
return mod.cli
|
||||
|
||||
@staticmethod
|
||||
def _handle_obsolate_command(name):
|
||||
if name == "platforms":
|
||||
from platformio.commands import platform
|
||||
return platform.cli
|
||||
if name == "serialports":
|
||||
from platformio.commands import device
|
||||
return device.cli
|
||||
raise AttributeError()
|
||||
|
||||
@@ -15,8 +15,10 @@
|
||||
import json
|
||||
|
||||
import click
|
||||
from tabulate import tabulate
|
||||
|
||||
from platformio import util
|
||||
from platformio import fs
|
||||
from platformio.compat import dump_json_to_unicode
|
||||
from platformio.managers.platform import PlatformManager
|
||||
|
||||
|
||||
@@ -41,34 +43,18 @@ def cli(query, installed, json_output): # pylint: disable=R0912
|
||||
click.echo("")
|
||||
click.echo("Platform: ", nl=False)
|
||||
click.secho(platform, bold=True)
|
||||
click.echo("-" * terminal_width)
|
||||
click.echo("=" * terminal_width)
|
||||
print_boards(boards)
|
||||
return True
|
||||
|
||||
|
||||
def print_boards(boards):
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
BOARDLIST_TPL = ("{type:<30} {mcu:<14} {frequency:<8} "
|
||||
" {flash:<7} {ram:<6} {name}")
|
||||
click.echo(
|
||||
BOARDLIST_TPL.format(
|
||||
type=click.style("ID", fg="cyan"),
|
||||
mcu="MCU",
|
||||
frequency="Frequency",
|
||||
flash="Flash",
|
||||
ram="RAM",
|
||||
name="Name"))
|
||||
click.echo("-" * terminal_width)
|
||||
|
||||
for board in boards:
|
||||
click.echo(
|
||||
BOARDLIST_TPL.format(
|
||||
type=click.style(board['id'], fg="cyan"),
|
||||
mcu=board['mcu'],
|
||||
frequency="%dMHz" % (board['fcpu'] / 1000000),
|
||||
flash=util.format_filesize(board['rom']),
|
||||
ram=util.format_filesize(board['ram']),
|
||||
name=board['name']))
|
||||
tabulate([(click.style(b['id'], fg="cyan"), b['mcu'], "%dMHz" %
|
||||
(b['fcpu'] / 1000000), fs.format_filesize(
|
||||
b['rom']), fs.format_filesize(b['ram']), b['name'])
|
||||
for b in boards],
|
||||
headers=["ID", "MCU", "Frequency", "Flash", "RAM", "Name"]))
|
||||
|
||||
|
||||
def _get_boards(installed=False):
|
||||
@@ -84,4 +70,4 @@ def _print_boards_json(query, installed=False):
|
||||
if query.lower() not in search_data.lower():
|
||||
continue
|
||||
result.append(board)
|
||||
click.echo(json.dumps(result))
|
||||
click.echo(dump_json_to_unicode(result))
|
||||
|
||||
@@ -20,11 +20,13 @@ from tempfile import mkdtemp
|
||||
|
||||
import click
|
||||
|
||||
from platformio import app, util
|
||||
from platformio import app, fs
|
||||
from platformio.commands.init import cli as cmd_init
|
||||
from platformio.commands.init import validate_boards
|
||||
from platformio.commands.run import cli as cmd_run
|
||||
from platformio.compat import glob_escape
|
||||
from platformio.exception import CIBuildEnvsEmpty
|
||||
from platformio.project.config import ProjectConfig
|
||||
|
||||
|
||||
def validate_path(ctx, param, value): # pylint: disable=unused-argument
|
||||
@@ -46,34 +48,35 @@ def validate_path(ctx, param, value): # pylint: disable=unused-argument
|
||||
|
||||
@click.command("ci", short_help="Continuous Integration")
|
||||
@click.argument("src", nargs=-1, callback=validate_path)
|
||||
@click.option(
|
||||
"-l", "--lib", multiple=True, callback=validate_path, metavar="DIRECTORY")
|
||||
@click.option("-l",
|
||||
"--lib",
|
||||
multiple=True,
|
||||
callback=validate_path,
|
||||
metavar="DIRECTORY")
|
||||
@click.option("--exclude", multiple=True)
|
||||
@click.option(
|
||||
"-b", "--board", multiple=True, metavar="ID", callback=validate_boards)
|
||||
@click.option(
|
||||
"--build-dir",
|
||||
default=mkdtemp,
|
||||
type=click.Path(
|
||||
exists=True,
|
||||
file_okay=False,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
resolve_path=True))
|
||||
@click.option("-b",
|
||||
"--board",
|
||||
multiple=True,
|
||||
metavar="ID",
|
||||
callback=validate_boards)
|
||||
@click.option("--build-dir",
|
||||
default=mkdtemp,
|
||||
type=click.Path(file_okay=False,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
resolve_path=True))
|
||||
@click.option("--keep-build-dir", is_flag=True)
|
||||
@click.option(
|
||||
"-C",
|
||||
"--project-conf",
|
||||
type=click.Path(
|
||||
exists=True,
|
||||
file_okay=True,
|
||||
dir_okay=False,
|
||||
readable=True,
|
||||
resolve_path=True))
|
||||
@click.option("-c",
|
||||
"--project-conf",
|
||||
type=click.Path(exists=True,
|
||||
file_okay=True,
|
||||
dir_okay=False,
|
||||
readable=True,
|
||||
resolve_path=True))
|
||||
@click.option("-O", "--project-option", multiple=True)
|
||||
@click.option("-v", "--verbose", is_flag=True)
|
||||
@click.pass_context
|
||||
def cli( # pylint: disable=too-many-arguments
|
||||
def cli( # pylint: disable=too-many-arguments, too-many-branches
|
||||
ctx, src, lib, exclude, board, build_dir, keep_build_dir, project_conf,
|
||||
project_option, verbose):
|
||||
|
||||
@@ -84,9 +87,13 @@ def cli( # pylint: disable=too-many-arguments
|
||||
|
||||
try:
|
||||
app.set_session_var("force_option", True)
|
||||
_clean_dir(build_dir)
|
||||
|
||||
for dir_name, patterns in dict(lib=lib, src=src).iteritems():
|
||||
if not keep_build_dir and isdir(build_dir):
|
||||
fs.rmtree(build_dir)
|
||||
if not isdir(build_dir):
|
||||
makedirs(build_dir)
|
||||
|
||||
for dir_name, patterns in dict(lib=lib, src=src).items():
|
||||
if not patterns:
|
||||
continue
|
||||
contents = []
|
||||
@@ -103,22 +110,16 @@ def cli( # pylint: disable=too-many-arguments
|
||||
_exclude_contents(build_dir, exclude)
|
||||
|
||||
# initialise project
|
||||
ctx.invoke(
|
||||
cmd_init,
|
||||
project_dir=build_dir,
|
||||
board=board,
|
||||
project_option=project_option)
|
||||
ctx.invoke(cmd_init,
|
||||
project_dir=build_dir,
|
||||
board=board,
|
||||
project_option=project_option)
|
||||
|
||||
# process project
|
||||
ctx.invoke(cmd_run, project_dir=build_dir, verbose=verbose)
|
||||
finally:
|
||||
if not keep_build_dir:
|
||||
util.rmtree_(build_dir)
|
||||
|
||||
|
||||
def _clean_dir(dirpath):
|
||||
util.rmtree_(dirpath)
|
||||
makedirs(dirpath)
|
||||
fs.rmtree(build_dir)
|
||||
|
||||
|
||||
def _copy_contents(dst_dir, contents):
|
||||
@@ -135,7 +136,8 @@ def _copy_contents(dst_dir, contents):
|
||||
if dst_dir_name == "src" and len(items['dirs']) == 1:
|
||||
copytree(list(items['dirs']).pop(), dst_dir, symlinks=True)
|
||||
else:
|
||||
makedirs(dst_dir)
|
||||
if not isdir(dst_dir):
|
||||
makedirs(dst_dir)
|
||||
for d in items['dirs']:
|
||||
copytree(d, join(dst_dir, basename(d)), symlinks=True)
|
||||
|
||||
@@ -146,24 +148,26 @@ def _copy_contents(dst_dir, contents):
|
||||
dst_dir = join(dst_dir, mkdtemp(dir=dst_dir))
|
||||
|
||||
for f in items['files']:
|
||||
copyfile(f, join(dst_dir, basename(f)))
|
||||
dst_file = join(dst_dir, basename(f))
|
||||
if f == dst_file:
|
||||
continue
|
||||
copyfile(f, dst_file)
|
||||
|
||||
|
||||
def _exclude_contents(dst_dir, patterns):
|
||||
contents = []
|
||||
for p in patterns:
|
||||
contents += glob(join(util.glob_escape(dst_dir), p))
|
||||
contents += glob(join(glob_escape(dst_dir), p))
|
||||
for path in contents:
|
||||
path = abspath(path)
|
||||
if isdir(path):
|
||||
util.rmtree_(path)
|
||||
fs.rmtree(path)
|
||||
elif isfile(path):
|
||||
remove(path)
|
||||
|
||||
|
||||
def _copy_project_conf(build_dir, project_conf):
|
||||
config = util.load_project_config(project_conf)
|
||||
config = ProjectConfig(project_conf, parse_extra=False)
|
||||
if config.has_section("platformio"):
|
||||
config.remove_section("platformio")
|
||||
with open(join(build_dir, "platformio.ini"), "w") as fp:
|
||||
config.write(fp)
|
||||
config.save(join(build_dir, "platformio.ini"))
|
||||
|
||||
@@ -1,42 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
from os import getcwd
|
||||
|
||||
import click
|
||||
|
||||
from platformio.managers.core import pioplus_call
|
||||
|
||||
|
||||
@click.command(
|
||||
"debug",
|
||||
context_settings=dict(ignore_unknown_options=True),
|
||||
short_help="PIO Unified Debugger")
|
||||
@click.option(
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=getcwd,
|
||||
type=click.Path(
|
||||
exists=True,
|
||||
file_okay=False,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
resolve_path=True))
|
||||
@click.option("--environment", "-e", metavar="<environment>")
|
||||
@click.option("--verbose", "-v", is_flag=True)
|
||||
@click.option("--interface", type=click.Choice(["gdb"]))
|
||||
@click.argument("__unprocessed", nargs=-1, type=click.UNPROCESSED)
|
||||
def cli(*args, **kwargs): # pylint: disable=unused-argument
|
||||
pioplus_call(sys.argv[1:])
|
||||
15
platformio/commands/debug/__init__.py
Normal file
15
platformio/commands/debug/__init__.py
Normal file
@@ -0,0 +1,15 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from platformio.commands.debug.command import cli
|
||||
292
platformio/commands/debug/client.py
Normal file
292
platformio/commands/debug/client.py
Normal file
@@ -0,0 +1,292 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import signal
|
||||
import time
|
||||
from hashlib import sha1
|
||||
from os.path import abspath, basename, dirname, isdir, join, splitext
|
||||
from tempfile import mkdtemp
|
||||
|
||||
from twisted.internet import protocol # pylint: disable=import-error
|
||||
from twisted.internet import reactor # pylint: disable=import-error
|
||||
from twisted.internet import stdio # pylint: disable=import-error
|
||||
from twisted.internet import task # pylint: disable=import-error
|
||||
|
||||
from platformio import app, exception, fs, proc, util
|
||||
from platformio.commands.debug import helpers, initcfgs
|
||||
from platformio.commands.debug.process import BaseProcess
|
||||
from platformio.commands.debug.server import DebugServer
|
||||
from platformio.compat import hashlib_encode_data
|
||||
from platformio.project.helpers import get_project_cache_dir
|
||||
from platformio.telemetry import MeasurementProtocol
|
||||
|
||||
LOG_FILE = None
|
||||
|
||||
|
||||
class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
|
||||
|
||||
PIO_SRC_NAME = ".pioinit"
|
||||
INIT_COMPLETED_BANNER = "PlatformIO: Initialization completed"
|
||||
|
||||
def __init__(self, project_dir, args, debug_options, env_options):
|
||||
self.project_dir = project_dir
|
||||
self.args = list(args)
|
||||
self.debug_options = debug_options
|
||||
self.env_options = env_options
|
||||
|
||||
self._debug_server = DebugServer(debug_options, env_options)
|
||||
self._session_id = None
|
||||
|
||||
if not isdir(get_project_cache_dir()):
|
||||
os.makedirs(get_project_cache_dir())
|
||||
self._gdbsrc_dir = mkdtemp(dir=get_project_cache_dir(),
|
||||
prefix=".piodebug-")
|
||||
|
||||
self._target_is_run = False
|
||||
self._last_server_activity = 0
|
||||
self._auto_continue_timer = None
|
||||
|
||||
def spawn(self, gdb_path, prog_path):
|
||||
session_hash = gdb_path + prog_path
|
||||
self._session_id = sha1(hashlib_encode_data(session_hash)).hexdigest()
|
||||
self._kill_previous_session()
|
||||
|
||||
patterns = {
|
||||
"PROJECT_DIR": self.project_dir,
|
||||
"PROG_PATH": prog_path,
|
||||
"PROG_DIR": dirname(prog_path),
|
||||
"PROG_NAME": basename(splitext(prog_path)[0]),
|
||||
"DEBUG_PORT": self.debug_options['port'],
|
||||
"UPLOAD_PROTOCOL": self.debug_options['upload_protocol'],
|
||||
"INIT_BREAK": self.debug_options['init_break'] or "",
|
||||
"LOAD_CMDS": "\n".join(self.debug_options['load_cmds'] or []),
|
||||
}
|
||||
|
||||
self._debug_server.spawn(patterns)
|
||||
|
||||
if not patterns['DEBUG_PORT']:
|
||||
patterns['DEBUG_PORT'] = self._debug_server.get_debug_port()
|
||||
self.generate_pioinit(self._gdbsrc_dir, patterns)
|
||||
|
||||
# start GDB client
|
||||
args = [
|
||||
"piogdb",
|
||||
"-q",
|
||||
"--directory", self._gdbsrc_dir,
|
||||
"--directory", self.project_dir,
|
||||
"-l", "10"
|
||||
] # yapf: disable
|
||||
args.extend(self.args)
|
||||
if not gdb_path:
|
||||
raise exception.DebugInvalidOptions("GDB client is not configured")
|
||||
gdb_data_dir = self._get_data_dir(gdb_path)
|
||||
if gdb_data_dir:
|
||||
args.extend(["--data-directory", gdb_data_dir])
|
||||
args.append(patterns['PROG_PATH'])
|
||||
|
||||
return reactor.spawnProcess(self,
|
||||
gdb_path,
|
||||
args,
|
||||
path=self.project_dir,
|
||||
env=os.environ)
|
||||
|
||||
@staticmethod
|
||||
def _get_data_dir(gdb_path):
|
||||
if "msp430" in gdb_path:
|
||||
return None
|
||||
gdb_data_dir = abspath(join(dirname(gdb_path), "..", "share", "gdb"))
|
||||
return gdb_data_dir if isdir(gdb_data_dir) else None
|
||||
|
||||
def generate_pioinit(self, dst_dir, patterns):
|
||||
server_exe = (self.debug_options.get("server")
|
||||
or {}).get("executable", "").lower()
|
||||
if "jlink" in server_exe:
|
||||
cfg = initcfgs.GDB_JLINK_INIT_CONFIG
|
||||
elif "st-util" in server_exe:
|
||||
cfg = initcfgs.GDB_STUTIL_INIT_CONFIG
|
||||
elif "mspdebug" in server_exe:
|
||||
cfg = initcfgs.GDB_MSPDEBUG_INIT_CONFIG
|
||||
elif "qemu" in server_exe:
|
||||
cfg = initcfgs.GDB_QEMU_INIT_CONFIG
|
||||
elif self.debug_options['require_debug_port']:
|
||||
cfg = initcfgs.GDB_BLACKMAGIC_INIT_CONFIG
|
||||
else:
|
||||
cfg = initcfgs.GDB_DEFAULT_INIT_CONFIG
|
||||
commands = cfg.split("\n")
|
||||
|
||||
if self.debug_options['init_cmds']:
|
||||
commands = self.debug_options['init_cmds']
|
||||
commands.extend(self.debug_options['extra_cmds'])
|
||||
|
||||
if not any("define pio_reset_target" in cmd for cmd in commands):
|
||||
commands = [
|
||||
"define pio_reset_target",
|
||||
" echo Warning! Undefined pio_reset_target command\\n",
|
||||
" mon reset",
|
||||
"end"
|
||||
] + commands # yapf: disable
|
||||
if not any("define pio_reset_halt_target" in cmd for cmd in commands):
|
||||
commands = [
|
||||
"define pio_reset_halt_target",
|
||||
" echo Warning! Undefined pio_reset_halt_target command\\n",
|
||||
" mon reset halt",
|
||||
"end"
|
||||
] + commands # yapf: disable
|
||||
if not any("define pio_restart_target" in cmd for cmd in commands):
|
||||
commands += [
|
||||
"define pio_restart_target",
|
||||
" pio_reset_halt_target",
|
||||
" $INIT_BREAK",
|
||||
" %s" % ("continue" if patterns['INIT_BREAK'] else "next"),
|
||||
"end"
|
||||
] # yapf: disable
|
||||
|
||||
banner = [
|
||||
"echo PlatformIO Unified Debugger -> http://bit.ly/pio-debug\\n",
|
||||
"echo PlatformIO: debug_tool = %s\\n" % self.debug_options['tool'],
|
||||
"echo PlatformIO: Initializing remote target...\\n"
|
||||
]
|
||||
footer = ["echo %s\\n" % self.INIT_COMPLETED_BANNER]
|
||||
commands = banner + commands + footer
|
||||
|
||||
with open(join(dst_dir, self.PIO_SRC_NAME), "w") as fp:
|
||||
fp.write("\n".join(self.apply_patterns(commands, patterns)))
|
||||
|
||||
def connectionMade(self):
|
||||
self._lock_session(self.transport.pid)
|
||||
|
||||
p = protocol.Protocol()
|
||||
p.dataReceived = self.onStdInData
|
||||
stdio.StandardIO(p)
|
||||
|
||||
def onStdInData(self, data):
|
||||
if LOG_FILE:
|
||||
with open(LOG_FILE, "ab") as fp:
|
||||
fp.write(data)
|
||||
|
||||
self._last_server_activity = time.time()
|
||||
|
||||
if b"-exec-run" in data:
|
||||
if self._target_is_run:
|
||||
token, _ = data.split(b"-", 1)
|
||||
self.outReceived(token + b"^running\n")
|
||||
return
|
||||
data = data.replace(b"-exec-run", b"-exec-continue")
|
||||
|
||||
if b"-exec-continue" in data:
|
||||
self._target_is_run = True
|
||||
if b"-gdb-exit" in data or data.strip() in (b"q", b"quit"):
|
||||
# Allow terminating via SIGINT/CTRL+C
|
||||
signal.signal(signal.SIGINT, signal.default_int_handler)
|
||||
self.transport.write(b"pio_reset_target\n")
|
||||
self.transport.write(data)
|
||||
|
||||
def processEnded(self, reason): # pylint: disable=unused-argument
|
||||
self._unlock_session()
|
||||
if self._gdbsrc_dir and isdir(self._gdbsrc_dir):
|
||||
fs.rmtree(self._gdbsrc_dir)
|
||||
if self._debug_server:
|
||||
self._debug_server.terminate()
|
||||
|
||||
reactor.stop()
|
||||
|
||||
def outReceived(self, data):
|
||||
if LOG_FILE:
|
||||
with open(LOG_FILE, "ab") as fp:
|
||||
fp.write(data)
|
||||
|
||||
self._last_server_activity = time.time()
|
||||
super(GDBClient, self).outReceived(data)
|
||||
self._handle_error(data)
|
||||
# go to init break automatically
|
||||
if self.INIT_COMPLETED_BANNER.encode() in data:
|
||||
self._auto_continue_timer = task.LoopingCall(
|
||||
self._auto_exec_continue)
|
||||
self._auto_continue_timer.start(0.1)
|
||||
|
||||
def errReceived(self, data):
|
||||
super(GDBClient, self).errReceived(data)
|
||||
self._handle_error(data)
|
||||
|
||||
def console_log(self, msg):
|
||||
if helpers.is_mi_mode(self.args):
|
||||
self.outReceived(('~"%s\\n"\n' % msg).encode())
|
||||
else:
|
||||
self.outReceived(("%s\n" % msg).encode())
|
||||
|
||||
def _auto_exec_continue(self):
|
||||
auto_exec_delay = 0.5 # in seconds
|
||||
if self._last_server_activity > (time.time() - auto_exec_delay):
|
||||
return
|
||||
if self._auto_continue_timer:
|
||||
self._auto_continue_timer.stop()
|
||||
self._auto_continue_timer = None
|
||||
|
||||
if not self.debug_options['init_break'] or self._target_is_run:
|
||||
return
|
||||
self.console_log(
|
||||
"PlatformIO: Resume the execution to `debug_init_break = %s`" %
|
||||
self.debug_options['init_break'])
|
||||
self.console_log("PlatformIO: More configuration options -> "
|
||||
"http://bit.ly/pio-debug")
|
||||
self.transport.write(b"0-exec-continue\n" if helpers.
|
||||
is_mi_mode(self.args) else b"continue\n")
|
||||
self._target_is_run = True
|
||||
|
||||
def _handle_error(self, data):
|
||||
if (self.PIO_SRC_NAME.encode() not in data
|
||||
or b"Error in sourced" not in data):
|
||||
return
|
||||
configuration = {"debug": self.debug_options, "env": self.env_options}
|
||||
exd = re.sub(r'\\(?!")', "/", json.dumps(configuration))
|
||||
exd = re.sub(r'"(?:[a-z]\:)?((/[^"/]+)+)"',
|
||||
lambda m: '"%s"' % join(*m.group(1).split("/")[-2:]), exd,
|
||||
re.I | re.M)
|
||||
mp = MeasurementProtocol()
|
||||
mp['exd'] = "DebugGDBPioInitError: %s" % exd
|
||||
mp['exf'] = 1
|
||||
mp.send("exception")
|
||||
self.transport.loseConnection()
|
||||
|
||||
def _kill_previous_session(self):
|
||||
assert self._session_id
|
||||
pid = None
|
||||
with app.ContentCache() as cc:
|
||||
pid = cc.get(self._session_id)
|
||||
cc.delete(self._session_id)
|
||||
if not pid:
|
||||
return
|
||||
if "windows" in util.get_systype():
|
||||
kill = ["Taskkill", "/PID", pid, "/F"]
|
||||
else:
|
||||
kill = ["kill", pid]
|
||||
try:
|
||||
proc.exec_command(kill)
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
def _lock_session(self, pid):
|
||||
if not self._session_id:
|
||||
return
|
||||
with app.ContentCache() as cc:
|
||||
cc.set(self._session_id, str(pid), "1h")
|
||||
|
||||
def _unlock_session(self):
|
||||
if not self._session_id:
|
||||
return
|
||||
with app.ContentCache() as cc:
|
||||
cc.delete(self._session_id)
|
||||
151
platformio/commands/debug/command.py
Normal file
151
platformio/commands/debug/command.py
Normal file
@@ -0,0 +1,151 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=too-many-arguments, too-many-statements
|
||||
# pylint: disable=too-many-locals, too-many-branches
|
||||
|
||||
import os
|
||||
import signal
|
||||
from os.path import isfile, join
|
||||
|
||||
import click
|
||||
|
||||
from platformio import exception, fs, proc, util
|
||||
from platformio.commands.debug import helpers
|
||||
from platformio.managers.core import inject_contrib_pysite
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.helpers import (is_platformio_project,
|
||||
load_project_ide_data)
|
||||
|
||||
|
||||
@click.command("debug",
|
||||
context_settings=dict(ignore_unknown_options=True),
|
||||
short_help="PIO Unified Debugger")
|
||||
@click.option("-d",
|
||||
"--project-dir",
|
||||
default=os.getcwd,
|
||||
type=click.Path(exists=True,
|
||||
file_okay=False,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
resolve_path=True))
|
||||
@click.option("-c",
|
||||
"--project-conf",
|
||||
type=click.Path(exists=True,
|
||||
file_okay=True,
|
||||
dir_okay=False,
|
||||
readable=True,
|
||||
resolve_path=True))
|
||||
@click.option("--environment", "-e", metavar="<environment>")
|
||||
@click.option("--verbose", "-v", is_flag=True)
|
||||
@click.option("--interface", type=click.Choice(["gdb"]))
|
||||
@click.argument("__unprocessed", nargs=-1, type=click.UNPROCESSED)
|
||||
@click.pass_context
|
||||
def cli(ctx, project_dir, project_conf, environment, verbose, interface,
|
||||
__unprocessed):
|
||||
# use env variables from Eclipse or CLion
|
||||
for sysenv in ("CWD", "PWD", "PLATFORMIO_PROJECT_DIR"):
|
||||
if is_platformio_project(project_dir):
|
||||
break
|
||||
if os.getenv(sysenv):
|
||||
project_dir = os.getenv(sysenv)
|
||||
|
||||
with fs.cd(project_dir):
|
||||
config = ProjectConfig.get_instance(
|
||||
project_conf or join(project_dir, "platformio.ini"))
|
||||
config.validate(envs=[environment] if environment else None)
|
||||
|
||||
env_name = environment or helpers.get_default_debug_env(config)
|
||||
env_options = config.items(env=env_name, as_dict=True)
|
||||
if not set(env_options.keys()) >= set(["platform", "board"]):
|
||||
raise exception.ProjectEnvsNotAvailable()
|
||||
debug_options = helpers.validate_debug_options(ctx, env_options)
|
||||
assert debug_options
|
||||
|
||||
if not interface:
|
||||
return helpers.predebug_project(ctx, project_dir, env_name, False,
|
||||
verbose)
|
||||
|
||||
configuration = load_project_ide_data(project_dir, env_name)
|
||||
if not configuration:
|
||||
raise exception.DebugInvalidOptions(
|
||||
"Could not load debug configuration")
|
||||
|
||||
if "--version" in __unprocessed:
|
||||
result = proc.exec_command([configuration['gdb_path'], "--version"])
|
||||
if result['returncode'] == 0:
|
||||
return click.echo(result['out'])
|
||||
raise exception.PlatformioException("\n".join(
|
||||
[result['out'], result['err']]))
|
||||
|
||||
try:
|
||||
fs.ensure_udev_rules()
|
||||
except exception.InvalidUdevRules as e:
|
||||
for line in str(e).split("\n") + [""]:
|
||||
click.echo(
|
||||
('~"%s\\n"' if helpers.is_mi_mode(__unprocessed) else "%s") %
|
||||
line)
|
||||
|
||||
debug_options['load_cmds'] = helpers.configure_esp32_load_cmds(
|
||||
debug_options, configuration)
|
||||
|
||||
rebuild_prog = False
|
||||
preload = debug_options['load_cmds'] == ["preload"]
|
||||
load_mode = debug_options['load_mode']
|
||||
if load_mode == "always":
|
||||
rebuild_prog = (
|
||||
preload
|
||||
or not helpers.has_debug_symbols(configuration['prog_path']))
|
||||
elif load_mode == "modified":
|
||||
rebuild_prog = (
|
||||
helpers.is_prog_obsolete(configuration['prog_path'])
|
||||
or not helpers.has_debug_symbols(configuration['prog_path']))
|
||||
else:
|
||||
rebuild_prog = not isfile(configuration['prog_path'])
|
||||
|
||||
if preload or (not rebuild_prog and load_mode != "always"):
|
||||
# don't load firmware through debug server
|
||||
debug_options['load_cmds'] = []
|
||||
|
||||
if rebuild_prog:
|
||||
if helpers.is_mi_mode(__unprocessed):
|
||||
click.echo('~"Preparing firmware for debugging...\\n"')
|
||||
output = helpers.GDBBytesIO()
|
||||
with util.capture_std_streams(output):
|
||||
helpers.predebug_project(ctx, project_dir, env_name, preload,
|
||||
verbose)
|
||||
output.close()
|
||||
else:
|
||||
click.echo("Preparing firmware for debugging...")
|
||||
helpers.predebug_project(ctx, project_dir, env_name, preload,
|
||||
verbose)
|
||||
|
||||
# save SHA sum of newly created prog
|
||||
if load_mode == "modified":
|
||||
helpers.is_prog_obsolete(configuration['prog_path'])
|
||||
|
||||
if not isfile(configuration['prog_path']):
|
||||
raise exception.DebugInvalidOptions("Program/firmware is missed")
|
||||
|
||||
# run debugging client
|
||||
inject_contrib_pysite()
|
||||
from platformio.commands.debug.client import GDBClient, reactor
|
||||
|
||||
client = GDBClient(project_dir, __unprocessed, debug_options, env_options)
|
||||
client.spawn(configuration['gdb_path'], configuration['prog_path'])
|
||||
|
||||
signal.signal(signal.SIGINT, lambda *args, **kwargs: None)
|
||||
reactor.run()
|
||||
|
||||
return True
|
||||
265
platformio/commands/debug/helpers.py
Normal file
265
platformio/commands/debug/helpers.py
Normal file
@@ -0,0 +1,265 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
import time
|
||||
from fnmatch import fnmatch
|
||||
from hashlib import sha1
|
||||
from io import BytesIO
|
||||
from os.path import isfile
|
||||
|
||||
from platformio import exception, util
|
||||
from platformio.commands.platform import \
|
||||
platform_install as cmd_platform_install
|
||||
from platformio.commands.run import cli as cmd_run
|
||||
from platformio.managers.platform import PlatformFactory
|
||||
from platformio.project.config import ProjectConfig
|
||||
|
||||
|
||||
class GDBBytesIO(BytesIO): # pylint: disable=too-few-public-methods
|
||||
|
||||
STDOUT = sys.stdout
|
||||
|
||||
def write(self, text):
|
||||
if "\n" in text:
|
||||
for line in text.strip().split("\n"):
|
||||
self.STDOUT.write('~"%s\\n"\n' % line)
|
||||
else:
|
||||
self.STDOUT.write('~"%s"' % text)
|
||||
self.STDOUT.flush()
|
||||
|
||||
|
||||
def is_mi_mode(args):
|
||||
return "--interpreter" in " ".join(args)
|
||||
|
||||
|
||||
def get_default_debug_env(config):
|
||||
default_envs = config.default_envs()
|
||||
all_envs = config.envs()
|
||||
for env in default_envs:
|
||||
if config.get("env:" + env, "build_type") == "debug":
|
||||
return env
|
||||
for env in all_envs:
|
||||
if config.get("env:" + env, "build_type") == "debug":
|
||||
return env
|
||||
return default_envs[0] if default_envs else all_envs[0]
|
||||
|
||||
|
||||
def predebug_project(ctx, project_dir, env_name, preload, verbose):
|
||||
ctx.invoke(cmd_run,
|
||||
project_dir=project_dir,
|
||||
environment=[env_name],
|
||||
target=["debug"] + (["upload"] if preload else []),
|
||||
verbose=verbose)
|
||||
if preload:
|
||||
time.sleep(5)
|
||||
|
||||
|
||||
def validate_debug_options(cmd_ctx, env_options):
|
||||
|
||||
def _cleanup_cmds(items):
|
||||
items = ProjectConfig.parse_multi_values(items)
|
||||
return [
|
||||
"$LOAD_CMDS" if item == "$LOAD_CMD" else item for item in items
|
||||
]
|
||||
|
||||
try:
|
||||
platform = PlatformFactory.newPlatform(env_options['platform'])
|
||||
except exception.UnknownPlatform:
|
||||
cmd_ctx.invoke(cmd_platform_install,
|
||||
platforms=[env_options['platform']],
|
||||
skip_default_package=True)
|
||||
platform = PlatformFactory.newPlatform(env_options['platform'])
|
||||
|
||||
board_config = platform.board_config(env_options['board'])
|
||||
tool_name = board_config.get_debug_tool_name(env_options.get("debug_tool"))
|
||||
tool_settings = board_config.get("debug", {}).get("tools",
|
||||
{}).get(tool_name, {})
|
||||
server_options = None
|
||||
|
||||
# specific server per a system
|
||||
if isinstance(tool_settings.get("server", {}), list):
|
||||
for item in tool_settings['server'][:]:
|
||||
tool_settings['server'] = item
|
||||
if util.get_systype() in item.get("system", []):
|
||||
break
|
||||
|
||||
# user overwrites debug server
|
||||
if env_options.get("debug_server"):
|
||||
server_options = {
|
||||
"cwd": None,
|
||||
"executable": None,
|
||||
"arguments": env_options.get("debug_server")
|
||||
}
|
||||
server_options['executable'] = server_options['arguments'][0]
|
||||
server_options['arguments'] = server_options['arguments'][1:]
|
||||
elif "server" in tool_settings:
|
||||
server_package = tool_settings['server'].get("package")
|
||||
server_package_dir = platform.get_package_dir(
|
||||
server_package) if server_package else None
|
||||
if server_package and not server_package_dir:
|
||||
platform.install_packages(with_packages=[server_package],
|
||||
skip_default_package=True,
|
||||
silent=True)
|
||||
server_package_dir = platform.get_package_dir(server_package)
|
||||
server_options = dict(
|
||||
cwd=server_package_dir if server_package else None,
|
||||
executable=tool_settings['server'].get("executable"),
|
||||
arguments=[
|
||||
a.replace("$PACKAGE_DIR", server_package_dir)
|
||||
if server_package_dir else a
|
||||
for a in tool_settings['server'].get("arguments", [])
|
||||
])
|
||||
|
||||
extra_cmds = _cleanup_cmds(env_options.get("debug_extra_cmds"))
|
||||
extra_cmds.extend(_cleanup_cmds(tool_settings.get("extra_cmds")))
|
||||
result = dict(
|
||||
tool=tool_name,
|
||||
upload_protocol=env_options.get(
|
||||
"upload_protocol",
|
||||
board_config.get("upload", {}).get("protocol")),
|
||||
load_cmds=_cleanup_cmds(
|
||||
env_options.get(
|
||||
"debug_load_cmds",
|
||||
tool_settings.get("load_cmds",
|
||||
tool_settings.get("load_cmd", "load")))),
|
||||
load_mode=env_options.get("debug_load_mode",
|
||||
tool_settings.get("load_mode", "always")),
|
||||
init_break=env_options.get(
|
||||
"debug_init_break", tool_settings.get("init_break",
|
||||
"tbreak main")),
|
||||
init_cmds=_cleanup_cmds(
|
||||
env_options.get("debug_init_cmds",
|
||||
tool_settings.get("init_cmds"))),
|
||||
extra_cmds=extra_cmds,
|
||||
require_debug_port=tool_settings.get("require_debug_port", False),
|
||||
port=reveal_debug_port(
|
||||
env_options.get("debug_port", tool_settings.get("port")),
|
||||
tool_name, tool_settings),
|
||||
server=server_options)
|
||||
return result
|
||||
|
||||
|
||||
def configure_esp32_load_cmds(debug_options, configuration):
|
||||
ignore_conds = [
|
||||
debug_options['load_cmds'] != ["load"],
|
||||
"xtensa-esp32" not in configuration.get("cc_path", ""),
|
||||
not configuration.get("flash_extra_images"), not all([
|
||||
isfile(item['path'])
|
||||
for item in configuration.get("flash_extra_images")
|
||||
])
|
||||
]
|
||||
if any(ignore_conds):
|
||||
return debug_options['load_cmds']
|
||||
|
||||
mon_cmds = [
|
||||
'monitor program_esp32 "{{{path}}}" {offset} verify'.format(
|
||||
path=item['path'], offset=item['offset'])
|
||||
for item in configuration.get("flash_extra_images")
|
||||
]
|
||||
mon_cmds.append('monitor program_esp32 "{%s.bin}" 0x10000 verify' %
|
||||
configuration['prog_path'][:-4])
|
||||
return mon_cmds
|
||||
|
||||
|
||||
def has_debug_symbols(prog_path):
|
||||
if not isfile(prog_path):
|
||||
return False
|
||||
matched = {
|
||||
b".debug_info": False,
|
||||
b".debug_abbrev": False,
|
||||
b" -Og": False,
|
||||
b" -g": False,
|
||||
b"__PLATFORMIO_BUILD_DEBUG__": False
|
||||
}
|
||||
with open(prog_path, "rb") as fp:
|
||||
last_data = b""
|
||||
while True:
|
||||
data = fp.read(1024)
|
||||
if not data:
|
||||
break
|
||||
for pattern, found in matched.items():
|
||||
if found:
|
||||
continue
|
||||
if pattern in last_data + data:
|
||||
matched[pattern] = True
|
||||
last_data = data
|
||||
return all(matched.values())
|
||||
|
||||
|
||||
def is_prog_obsolete(prog_path):
|
||||
prog_hash_path = prog_path + ".sha1"
|
||||
if not isfile(prog_path):
|
||||
return True
|
||||
shasum = sha1()
|
||||
with open(prog_path, "rb") as fp:
|
||||
while True:
|
||||
data = fp.read(1024)
|
||||
if not data:
|
||||
break
|
||||
shasum.update(data)
|
||||
new_digest = shasum.hexdigest()
|
||||
old_digest = None
|
||||
if isfile(prog_hash_path):
|
||||
with open(prog_hash_path, "r") as fp:
|
||||
old_digest = fp.read()
|
||||
if new_digest == old_digest:
|
||||
return False
|
||||
with open(prog_hash_path, "w") as fp:
|
||||
fp.write(new_digest)
|
||||
return True
|
||||
|
||||
|
||||
def reveal_debug_port(env_debug_port, tool_name, tool_settings):
|
||||
|
||||
def _get_pattern():
|
||||
if not env_debug_port:
|
||||
return None
|
||||
if set(["*", "?", "[", "]"]) & set(env_debug_port):
|
||||
return env_debug_port
|
||||
return None
|
||||
|
||||
def _is_match_pattern(port):
|
||||
pattern = _get_pattern()
|
||||
if not pattern:
|
||||
return True
|
||||
return fnmatch(port, pattern)
|
||||
|
||||
def _look_for_serial_port(hwids):
|
||||
for item in util.get_serialports(filter_hwid=True):
|
||||
if not _is_match_pattern(item['port']):
|
||||
continue
|
||||
port = item['port']
|
||||
if tool_name.startswith("blackmagic"):
|
||||
if "windows" in util.get_systype() and \
|
||||
port.startswith("COM") and len(port) > 4:
|
||||
port = "\\\\.\\%s" % port
|
||||
if "GDB" in item['description']:
|
||||
return port
|
||||
for hwid in hwids:
|
||||
hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "")
|
||||
if hwid_str in item['hwid']:
|
||||
return port
|
||||
return None
|
||||
|
||||
if env_debug_port and not _get_pattern():
|
||||
return env_debug_port
|
||||
if not tool_settings.get("require_debug_port"):
|
||||
return None
|
||||
|
||||
debug_port = _look_for_serial_port(tool_settings.get("hwids", []))
|
||||
if not debug_port:
|
||||
raise exception.DebugInvalidOptions(
|
||||
"Please specify `debug_port` for environment")
|
||||
return debug_port
|
||||
124
platformio/commands/debug/initcfgs.py
Normal file
124
platformio/commands/debug/initcfgs.py
Normal file
@@ -0,0 +1,124 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
GDB_DEFAULT_INIT_CONFIG = """
|
||||
define pio_reset_halt_target
|
||||
monitor reset halt
|
||||
end
|
||||
|
||||
define pio_reset_target
|
||||
monitor reset
|
||||
end
|
||||
|
||||
target extended-remote $DEBUG_PORT
|
||||
$INIT_BREAK
|
||||
pio_reset_halt_target
|
||||
$LOAD_CMDS
|
||||
monitor init
|
||||
pio_reset_halt_target
|
||||
"""
|
||||
|
||||
GDB_STUTIL_INIT_CONFIG = """
|
||||
define pio_reset_halt_target
|
||||
monitor halt
|
||||
monitor reset
|
||||
end
|
||||
|
||||
define pio_reset_target
|
||||
monitor reset
|
||||
end
|
||||
|
||||
target extended-remote $DEBUG_PORT
|
||||
$INIT_BREAK
|
||||
pio_reset_halt_target
|
||||
$LOAD_CMDS
|
||||
pio_reset_halt_target
|
||||
"""
|
||||
|
||||
GDB_JLINK_INIT_CONFIG = """
|
||||
define pio_reset_halt_target
|
||||
monitor halt
|
||||
monitor reset
|
||||
end
|
||||
|
||||
define pio_reset_target
|
||||
monitor reset
|
||||
end
|
||||
|
||||
target extended-remote $DEBUG_PORT
|
||||
$INIT_BREAK
|
||||
pio_reset_halt_target
|
||||
$LOAD_CMDS
|
||||
pio_reset_halt_target
|
||||
"""
|
||||
|
||||
GDB_BLACKMAGIC_INIT_CONFIG = """
|
||||
define pio_reset_halt_target
|
||||
set language c
|
||||
set *0xE000ED0C = 0x05FA0004
|
||||
set $busy = (*0xE000ED0C & 0x4)
|
||||
while ($busy)
|
||||
set $busy = (*0xE000ED0C & 0x4)
|
||||
end
|
||||
set language auto
|
||||
end
|
||||
|
||||
define pio_reset_target
|
||||
pio_reset_halt_target
|
||||
end
|
||||
|
||||
target extended-remote $DEBUG_PORT
|
||||
monitor swdp_scan
|
||||
attach 1
|
||||
set mem inaccessible-by-default off
|
||||
$INIT_BREAK
|
||||
$LOAD_CMDS
|
||||
|
||||
set language c
|
||||
set *0xE000ED0C = 0x05FA0004
|
||||
set $busy = (*0xE000ED0C & 0x4)
|
||||
while ($busy)
|
||||
set $busy = (*0xE000ED0C & 0x4)
|
||||
end
|
||||
set language auto
|
||||
"""
|
||||
|
||||
GDB_MSPDEBUG_INIT_CONFIG = """
|
||||
define pio_reset_halt_target
|
||||
end
|
||||
|
||||
define pio_reset_target
|
||||
end
|
||||
|
||||
target extended-remote $DEBUG_PORT
|
||||
$INIT_BREAK
|
||||
monitor erase
|
||||
$LOAD_CMDS
|
||||
pio_reset_halt_target
|
||||
"""
|
||||
|
||||
GDB_QEMU_INIT_CONFIG = """
|
||||
define pio_reset_halt_target
|
||||
monitor system_reset
|
||||
end
|
||||
|
||||
define pio_reset_target
|
||||
pio_reset_halt_target
|
||||
end
|
||||
|
||||
target extended-remote $DEBUG_PORT
|
||||
$INIT_BREAK
|
||||
$LOAD_CMDS
|
||||
pio_reset_halt_target
|
||||
"""
|
||||
79
platformio/commands/debug/process.py
Normal file
79
platformio/commands/debug/process.py
Normal file
@@ -0,0 +1,79 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import signal
|
||||
|
||||
import click
|
||||
from twisted.internet import protocol # pylint: disable=import-error
|
||||
|
||||
from platformio.compat import string_types
|
||||
from platformio.proc import get_pythonexe_path
|
||||
from platformio.project.helpers import get_project_core_dir
|
||||
|
||||
LOG_FILE = None
|
||||
|
||||
|
||||
class BaseProcess(protocol.ProcessProtocol, object):
|
||||
|
||||
STDOUT_CHUNK_SIZE = 2048
|
||||
|
||||
COMMON_PATTERNS = {
|
||||
"PLATFORMIO_HOME_DIR": get_project_core_dir(),
|
||||
"PLATFORMIO_CORE_DIR": get_project_core_dir(),
|
||||
"PYTHONEXE": get_pythonexe_path()
|
||||
}
|
||||
|
||||
def apply_patterns(self, source, patterns=None):
|
||||
_patterns = self.COMMON_PATTERNS.copy()
|
||||
_patterns.update(patterns or {})
|
||||
|
||||
def _replace(text):
|
||||
for key, value in _patterns.items():
|
||||
pattern = "$%s" % key
|
||||
text = text.replace(pattern, value or "")
|
||||
return text
|
||||
|
||||
if isinstance(source, string_types):
|
||||
source = _replace(source)
|
||||
elif isinstance(source, (list, dict)):
|
||||
items = enumerate(source) if isinstance(source,
|
||||
list) else source.items()
|
||||
for key, value in items:
|
||||
if isinstance(value, string_types):
|
||||
source[key] = _replace(value)
|
||||
elif isinstance(value, (list, dict)):
|
||||
source[key] = self.apply_patterns(value, patterns)
|
||||
|
||||
return source
|
||||
|
||||
def outReceived(self, data):
|
||||
if LOG_FILE:
|
||||
with open(LOG_FILE, "ab") as fp:
|
||||
fp.write(data)
|
||||
while data:
|
||||
chunk = data[:self.STDOUT_CHUNK_SIZE]
|
||||
click.echo(chunk, nl=False)
|
||||
data = data[self.STDOUT_CHUNK_SIZE:]
|
||||
|
||||
@staticmethod
|
||||
def errReceived(data):
|
||||
if LOG_FILE:
|
||||
with open(LOG_FILE, "ab") as fp:
|
||||
fp.write(data)
|
||||
click.echo(data, nl=False, err=True)
|
||||
|
||||
@staticmethod
|
||||
def processEnded(_):
|
||||
# Allow terminating via SIGINT/CTRL+C
|
||||
signal.signal(signal.SIGINT, signal.default_int_handler)
|
||||
122
platformio/commands/debug/server.py
Normal file
122
platformio/commands/debug/server.py
Normal file
@@ -0,0 +1,122 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
from os.path import isdir, isfile, join
|
||||
|
||||
from twisted.internet import error # pylint: disable=import-error
|
||||
from twisted.internet import reactor # pylint: disable=import-error
|
||||
|
||||
from platformio import exception, util
|
||||
from platformio.commands.debug.process import BaseProcess
|
||||
from platformio.proc import where_is_program
|
||||
|
||||
|
||||
class DebugServer(BaseProcess):
|
||||
|
||||
def __init__(self, debug_options, env_options):
|
||||
self.debug_options = debug_options
|
||||
self.env_options = env_options
|
||||
|
||||
self._debug_port = None
|
||||
self._transport = None
|
||||
self._process_ended = False
|
||||
|
||||
def spawn(self, patterns): # pylint: disable=too-many-branches
|
||||
systype = util.get_systype()
|
||||
server = self.debug_options.get("server")
|
||||
if not server:
|
||||
return None
|
||||
server = self.apply_patterns(server, patterns)
|
||||
server_executable = server['executable']
|
||||
if not server_executable:
|
||||
return None
|
||||
if server['cwd']:
|
||||
server_executable = join(server['cwd'], server_executable)
|
||||
if ("windows" in systype and not server_executable.endswith(".exe")
|
||||
and isfile(server_executable + ".exe")):
|
||||
server_executable = server_executable + ".exe"
|
||||
|
||||
if not isfile(server_executable):
|
||||
server_executable = where_is_program(server_executable)
|
||||
if not isfile(server_executable):
|
||||
raise exception.DebugInvalidOptions(
|
||||
"\nCould not launch Debug Server '%s'. Please check that it "
|
||||
"is installed and is included in a system PATH\n\n"
|
||||
"See documentation or contact contact@platformio.org:\n"
|
||||
"http://docs.platformio.org/page/plus/debugging.html\n" %
|
||||
server_executable)
|
||||
|
||||
self._debug_port = ":3333"
|
||||
openocd_pipe_allowed = all([
|
||||
not self.debug_options['port'],
|
||||
"openocd" in server_executable
|
||||
]) # yapf: disable
|
||||
if openocd_pipe_allowed:
|
||||
args = []
|
||||
if server['cwd']:
|
||||
args.extend(["-s", server['cwd']])
|
||||
args.extend([
|
||||
"-c", "gdb_port pipe; tcl_port disabled; telnet_port disabled"
|
||||
])
|
||||
args.extend(server['arguments'])
|
||||
str_args = " ".join(
|
||||
[arg if arg.startswith("-") else '"%s"' % arg for arg in args])
|
||||
self._debug_port = '| "%s" %s' % (server_executable, str_args)
|
||||
self._debug_port = self._debug_port.replace("\\", "\\\\")
|
||||
else:
|
||||
env = os.environ.copy()
|
||||
# prepend server "lib" folder to LD path
|
||||
if ("windows" not in systype and server['cwd']
|
||||
and isdir(join(server['cwd'], "lib"))):
|
||||
ld_key = ("DYLD_LIBRARY_PATH"
|
||||
if "darwin" in systype else "LD_LIBRARY_PATH")
|
||||
env[ld_key] = join(server['cwd'], "lib")
|
||||
if os.environ.get(ld_key):
|
||||
env[ld_key] = "%s:%s" % (env[ld_key],
|
||||
os.environ.get(ld_key))
|
||||
# prepend BIN to PATH
|
||||
if server['cwd'] and isdir(join(server['cwd'], "bin")):
|
||||
env['PATH'] = "%s%s%s" % (
|
||||
join(server['cwd'], "bin"), os.pathsep,
|
||||
os.environ.get("PATH", os.environ.get("Path", "")))
|
||||
|
||||
self._transport = reactor.spawnProcess(
|
||||
self,
|
||||
server_executable, [server_executable] + server['arguments'],
|
||||
path=server['cwd'],
|
||||
env=env)
|
||||
if "mspdebug" in server_executable.lower():
|
||||
self._debug_port = ":2000"
|
||||
elif "jlink" in server_executable.lower():
|
||||
self._debug_port = ":2331"
|
||||
elif "qemu" in server_executable.lower():
|
||||
self._debug_port = ":1234"
|
||||
|
||||
return self._transport
|
||||
|
||||
def get_debug_port(self):
|
||||
return self._debug_port
|
||||
|
||||
def processEnded(self, reason):
|
||||
self._process_ended = True
|
||||
super(DebugServer, self).processEnded(reason)
|
||||
|
||||
def terminate(self):
|
||||
if self._process_ended or not self._transport:
|
||||
return
|
||||
try:
|
||||
self._transport.signalProcess("KILL")
|
||||
except (OSError, error.ProcessExitedAlready):
|
||||
pass
|
||||
@@ -12,14 +12,17 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import sys
|
||||
from fnmatch import fnmatch
|
||||
from os import getcwd
|
||||
from os.path import join
|
||||
|
||||
import click
|
||||
from serial.tools import miniterm
|
||||
|
||||
from platformio import exception, util
|
||||
from platformio.compat import dump_json_to_unicode
|
||||
from platformio.project.config import ProjectConfig
|
||||
|
||||
|
||||
@click.group(short_help="Monitor device or list existing")
|
||||
@@ -44,10 +47,11 @@ def device_list( # pylint: disable=too-many-branches
|
||||
if mdns:
|
||||
data['mdns'] = util.get_mdns_services()
|
||||
|
||||
single_key = data.keys()[0] if len(data.keys()) == 1 else None
|
||||
single_key = list(data)[0] if len(list(data)) == 1 else None
|
||||
|
||||
if json_output:
|
||||
return click.echo(json.dumps(data[single_key] if single_key else data))
|
||||
return click.echo(
|
||||
dump_json_to_unicode(data[single_key] if single_key else data))
|
||||
|
||||
titles = {
|
||||
"serial": "Serial Ports",
|
||||
@@ -55,7 +59,7 @@ def device_list( # pylint: disable=too-many-branches
|
||||
"mdns": "Multicast DNS Services"
|
||||
}
|
||||
|
||||
for key, value in data.iteritems():
|
||||
for key, value in data.items():
|
||||
if not single_key:
|
||||
click.secho(titles[key], bold=True)
|
||||
click.echo("=" * len(titles[key]))
|
||||
@@ -85,7 +89,7 @@ def device_list( # pylint: disable=too-many-branches
|
||||
if item['properties']:
|
||||
click.echo("Properties: %s" % ("; ".join([
|
||||
"%s=%s" % (k, v)
|
||||
for k, v in item['properties'].iteritems()
|
||||
for k, v in item['properties'].items()
|
||||
])))
|
||||
click.echo("")
|
||||
|
||||
@@ -98,79 +102,74 @@ def device_list( # pylint: disable=too-many-branches
|
||||
@cli.command("monitor", short_help="Monitor device (Serial)")
|
||||
@click.option("--port", "-p", help="Port, a number or a device name")
|
||||
@click.option("--baud", "-b", type=int, help="Set baud rate, default=9600")
|
||||
@click.option(
|
||||
"--parity",
|
||||
default="N",
|
||||
type=click.Choice(["N", "E", "O", "S", "M"]),
|
||||
help="Set parity, default=N")
|
||||
@click.option(
|
||||
"--rtscts", is_flag=True, help="Enable RTS/CTS flow control, default=Off")
|
||||
@click.option(
|
||||
"--xonxoff",
|
||||
is_flag=True,
|
||||
help="Enable software flow control, default=Off")
|
||||
@click.option(
|
||||
"--rts",
|
||||
default=None,
|
||||
type=click.IntRange(0, 1),
|
||||
help="Set initial RTS line state")
|
||||
@click.option(
|
||||
"--dtr",
|
||||
default=None,
|
||||
type=click.IntRange(0, 1),
|
||||
help="Set initial DTR line state")
|
||||
@click.option("--parity",
|
||||
default="N",
|
||||
type=click.Choice(["N", "E", "O", "S", "M"]),
|
||||
help="Set parity, default=N")
|
||||
@click.option("--rtscts",
|
||||
is_flag=True,
|
||||
help="Enable RTS/CTS flow control, default=Off")
|
||||
@click.option("--xonxoff",
|
||||
is_flag=True,
|
||||
help="Enable software flow control, default=Off")
|
||||
@click.option("--rts",
|
||||
default=None,
|
||||
type=click.IntRange(0, 1),
|
||||
help="Set initial RTS line state")
|
||||
@click.option("--dtr",
|
||||
default=None,
|
||||
type=click.IntRange(0, 1),
|
||||
help="Set initial DTR line state")
|
||||
@click.option("--echo", is_flag=True, help="Enable local echo, default=Off")
|
||||
@click.option(
|
||||
"--encoding",
|
||||
default="UTF-8",
|
||||
help="Set the encoding for the serial port (e.g. hexlify, "
|
||||
"Latin1, UTF-8), default: UTF-8")
|
||||
@click.option("--encoding",
|
||||
default="UTF-8",
|
||||
help="Set the encoding for the serial port (e.g. hexlify, "
|
||||
"Latin1, UTF-8), default: UTF-8")
|
||||
@click.option("--filter", "-f", multiple=True, help="Add text transformation")
|
||||
@click.option(
|
||||
"--eol",
|
||||
default="CRLF",
|
||||
type=click.Choice(["CR", "LF", "CRLF"]),
|
||||
help="End of line mode, default=CRLF")
|
||||
@click.option(
|
||||
"--raw", is_flag=True, help="Do not apply any encodings/transformations")
|
||||
@click.option(
|
||||
"--exit-char",
|
||||
type=int,
|
||||
default=3,
|
||||
help="ASCII code of special character that is used to exit "
|
||||
"the application, default=3 (Ctrl+C)")
|
||||
@click.option(
|
||||
"--menu-char",
|
||||
type=int,
|
||||
default=20,
|
||||
help="ASCII code of special character that is used to "
|
||||
"control miniterm (menu), default=20 (DEC)")
|
||||
@click.option(
|
||||
"--quiet",
|
||||
is_flag=True,
|
||||
help="Diagnostics: suppress non-error messages, default=Off")
|
||||
@click.option(
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=getcwd,
|
||||
type=click.Path(
|
||||
exists=True, file_okay=False, dir_okay=True, resolve_path=True))
|
||||
@click.option("--eol",
|
||||
default="CRLF",
|
||||
type=click.Choice(["CR", "LF", "CRLF"]),
|
||||
help="End of line mode, default=CRLF")
|
||||
@click.option("--raw",
|
||||
is_flag=True,
|
||||
help="Do not apply any encodings/transformations")
|
||||
@click.option("--exit-char",
|
||||
type=int,
|
||||
default=3,
|
||||
help="ASCII code of special character that is used to exit "
|
||||
"the application, default=3 (Ctrl+C)")
|
||||
@click.option("--menu-char",
|
||||
type=int,
|
||||
default=20,
|
||||
help="ASCII code of special character that is used to "
|
||||
"control miniterm (menu), default=20 (DEC)")
|
||||
@click.option("--quiet",
|
||||
is_flag=True,
|
||||
help="Diagnostics: suppress non-error messages, default=Off")
|
||||
@click.option("-d",
|
||||
"--project-dir",
|
||||
default=getcwd,
|
||||
type=click.Path(exists=True,
|
||||
file_okay=False,
|
||||
dir_okay=True,
|
||||
resolve_path=True))
|
||||
@click.option(
|
||||
"-e",
|
||||
"--environment",
|
||||
help="Load configuration from `platformio.ini` and specified environment")
|
||||
def device_monitor(**kwargs): # pylint: disable=too-many-branches
|
||||
env_options = {}
|
||||
try:
|
||||
project_options = get_project_options(kwargs['project_dir'],
|
||||
kwargs['environment'])
|
||||
monitor_options = {k: v for k, v in project_options or []}
|
||||
if monitor_options:
|
||||
for k in ("port", "baud", "rts", "dtr"):
|
||||
k2 = "monitor_%s" % k
|
||||
if kwargs[k] is None and k2 in monitor_options:
|
||||
kwargs[k] = monitor_options[k2]
|
||||
if k != "port":
|
||||
kwargs[k] = int(kwargs[k])
|
||||
env_options = get_project_options(kwargs['project_dir'],
|
||||
kwargs['environment'])
|
||||
for k in ("port", "speed", "rts", "dtr"):
|
||||
k2 = "monitor_%s" % k
|
||||
if k == "speed":
|
||||
k = "baud"
|
||||
if kwargs[k] is None and k2 in env_options:
|
||||
kwargs[k] = env_options[k2]
|
||||
if k != "port":
|
||||
kwargs[k] = int(kwargs[k])
|
||||
except exception.NotPlatformIOProject:
|
||||
pass
|
||||
|
||||
@@ -179,11 +178,13 @@ def device_monitor(**kwargs): # pylint: disable=too-many-branches
|
||||
if len(ports) == 1:
|
||||
kwargs['port'] = ports[0]['port']
|
||||
|
||||
sys.argv = ["monitor"]
|
||||
for k, v in kwargs.iteritems():
|
||||
sys.argv = ["monitor"] + env_options.get("monitor_flags", [])
|
||||
for k, v in kwargs.items():
|
||||
if k in ("port", "baud", "rts", "dtr", "environment", "project_dir"):
|
||||
continue
|
||||
k = "--" + k.replace("_", "-")
|
||||
if k in env_options.get("monitor_flags", []):
|
||||
continue
|
||||
if isinstance(v, bool):
|
||||
if v:
|
||||
sys.argv.append(k)
|
||||
@@ -193,34 +194,28 @@ def device_monitor(**kwargs): # pylint: disable=too-many-branches
|
||||
else:
|
||||
sys.argv.extend([k, str(v)])
|
||||
|
||||
if kwargs['port'] and (set(["*", "?", "[", "]"]) & set(kwargs['port'])):
|
||||
for item in util.get_serial_ports():
|
||||
if fnmatch(item['port'], kwargs['port']):
|
||||
kwargs['port'] = item['port']
|
||||
break
|
||||
|
||||
try:
|
||||
miniterm.main(
|
||||
default_port=kwargs['port'],
|
||||
default_baudrate=kwargs['baud'] or 9600,
|
||||
default_rts=kwargs['rts'],
|
||||
default_dtr=kwargs['dtr'])
|
||||
miniterm.main(default_port=kwargs['port'],
|
||||
default_baudrate=kwargs['baud'] or 9600,
|
||||
default_rts=kwargs['rts'],
|
||||
default_dtr=kwargs['dtr'])
|
||||
except Exception as e:
|
||||
raise exception.MinitermException(e)
|
||||
|
||||
|
||||
def get_project_options(project_dir, environment):
|
||||
config = util.load_project_config(project_dir)
|
||||
if not config.sections():
|
||||
return None
|
||||
|
||||
known_envs = [s[4:] for s in config.sections() if s.startswith("env:")]
|
||||
if environment:
|
||||
if environment in known_envs:
|
||||
return config.items("env:%s" % environment)
|
||||
raise exception.UnknownEnvNames(environment, ", ".join(known_envs))
|
||||
|
||||
if not known_envs:
|
||||
return None
|
||||
|
||||
if config.has_option("platformio", "env_default"):
|
||||
env_default = config.get("platformio",
|
||||
"env_default").split(", ")[0].strip()
|
||||
if env_default and env_default in known_envs:
|
||||
return config.items("env:%s" % env_default)
|
||||
|
||||
return config.items("env:%s" % known_envs[0])
|
||||
def get_project_options(project_dir, environment=None):
|
||||
config = ProjectConfig.get_instance(join(project_dir, "platformio.ini"))
|
||||
config.validate(envs=[environment] if environment else None)
|
||||
if not environment:
|
||||
default_envs = config.default_envs()
|
||||
if default_envs:
|
||||
environment = default_envs[0]
|
||||
else:
|
||||
environment = config.envs()[0]
|
||||
return config.items(env=environment, as_dict=True)
|
||||
|
||||
@@ -1,42 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
|
||||
import click
|
||||
import requests
|
||||
|
||||
from platformio.managers.core import pioplus_call
|
||||
|
||||
|
||||
@click.command("home", short_help="PIO Home")
|
||||
@click.option("--port", type=int, default=8008, help="HTTP port, default=8008")
|
||||
@click.option(
|
||||
"--host",
|
||||
default="127.0.0.1",
|
||||
help="HTTP host, default=127.0.0.1. "
|
||||
"You can open PIO Home for inbound connections with --host=0.0.0.0")
|
||||
@click.option("--no-open", is_flag=True)
|
||||
def cli(*args, **kwargs): # pylint: disable=unused-argument
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
def shutdown_servers():
|
||||
port = 8010
|
||||
while port < 9000:
|
||||
try:
|
||||
requests.get("http://127.0.0.1:%d?__shutdown__=1" % port)
|
||||
port += 1
|
||||
except: # pylint: disable=bare-except
|
||||
return
|
||||
15
platformio/commands/home/__init__.py
Normal file
15
platformio/commands/home/__init__.py
Normal file
@@ -0,0 +1,15 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from platformio.commands.home.command import cli
|
||||
109
platformio/commands/home/command.py
Normal file
109
platformio/commands/home/command.py
Normal file
@@ -0,0 +1,109 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import mimetypes
|
||||
import socket
|
||||
from os.path import isdir
|
||||
|
||||
import click
|
||||
|
||||
from platformio import exception
|
||||
from platformio.managers.core import (get_core_package_dir,
|
||||
inject_contrib_pysite)
|
||||
|
||||
|
||||
@click.command("home", short_help="PIO Home")
|
||||
@click.option("--port", type=int, default=8008, help="HTTP port, default=8008")
|
||||
@click.option(
|
||||
"--host",
|
||||
default="127.0.0.1",
|
||||
help="HTTP host, default=127.0.0.1. "
|
||||
"You can open PIO Home for inbound connections with --host=0.0.0.0")
|
||||
@click.option("--no-open", is_flag=True) # pylint: disable=too-many-locals
|
||||
def cli(port, host, no_open):
|
||||
# import contrib modules
|
||||
inject_contrib_pysite()
|
||||
# pylint: disable=import-error
|
||||
from autobahn.twisted.resource import WebSocketResource
|
||||
from twisted.internet import reactor
|
||||
from twisted.web import server
|
||||
# pylint: enable=import-error
|
||||
from platformio.commands.home.rpc.handlers.app import AppRPC
|
||||
from platformio.commands.home.rpc.handlers.ide import IDERPC
|
||||
from platformio.commands.home.rpc.handlers.misc import MiscRPC
|
||||
from platformio.commands.home.rpc.handlers.os import OSRPC
|
||||
from platformio.commands.home.rpc.handlers.piocore import PIOCoreRPC
|
||||
from platformio.commands.home.rpc.handlers.project import ProjectRPC
|
||||
from platformio.commands.home.rpc.server import JSONRPCServerFactory
|
||||
from platformio.commands.home.web import WebRoot
|
||||
|
||||
factory = JSONRPCServerFactory()
|
||||
factory.addHandler(AppRPC(), namespace="app")
|
||||
factory.addHandler(IDERPC(), namespace="ide")
|
||||
factory.addHandler(MiscRPC(), namespace="misc")
|
||||
factory.addHandler(OSRPC(), namespace="os")
|
||||
factory.addHandler(PIOCoreRPC(), namespace="core")
|
||||
factory.addHandler(ProjectRPC(), namespace="project")
|
||||
|
||||
contrib_dir = get_core_package_dir("contrib-piohome")
|
||||
if not isdir(contrib_dir):
|
||||
raise exception.PlatformioException("Invalid path to PIO Home Contrib")
|
||||
|
||||
# Ensure PIO Home mimetypes are known
|
||||
mimetypes.add_type("text/html", ".html")
|
||||
mimetypes.add_type("text/css", ".css")
|
||||
mimetypes.add_type("application/javascript", ".js")
|
||||
|
||||
root = WebRoot(contrib_dir)
|
||||
root.putChild(b"wsrpc", WebSocketResource(factory))
|
||||
site = server.Site(root)
|
||||
|
||||
# hook for `platformio-node-helpers`
|
||||
if host == "__do_not_start__":
|
||||
return
|
||||
|
||||
# if already started
|
||||
already_started = False
|
||||
socket.setdefaulttimeout(1)
|
||||
try:
|
||||
socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect((host, port))
|
||||
already_started = True
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
home_url = "http://%s:%d" % (host, port)
|
||||
if not no_open:
|
||||
if already_started:
|
||||
click.launch(home_url)
|
||||
else:
|
||||
reactor.callLater(1, lambda: click.launch(home_url))
|
||||
|
||||
click.echo("\n".join([
|
||||
"",
|
||||
" ___I_",
|
||||
" /\\-_--\\ PlatformIO Home",
|
||||
"/ \\_-__\\",
|
||||
"|[]| [] | %s" % home_url,
|
||||
"|__|____|______________%s" % ("_" * len(host)),
|
||||
]))
|
||||
click.echo("")
|
||||
click.echo("Open PIO Home in your browser by this URL => %s" % home_url)
|
||||
|
||||
if already_started:
|
||||
return
|
||||
|
||||
click.echo("PIO Home has been started. Press Ctrl+C to shutdown.")
|
||||
|
||||
reactor.listenTCP(port, site, interface=host)
|
||||
reactor.run()
|
||||
71
platformio/commands/home/helpers.py
Normal file
71
platformio/commands/home/helpers.py
Normal file
@@ -0,0 +1,71 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=keyword-arg-before-vararg, arguments-differ
|
||||
|
||||
import os
|
||||
import socket
|
||||
|
||||
import requests
|
||||
from twisted.internet import defer # pylint: disable=import-error
|
||||
from twisted.internet import reactor # pylint: disable=import-error
|
||||
from twisted.internet import threads # pylint: disable=import-error
|
||||
|
||||
from platformio import util
|
||||
from platformio.proc import where_is_program
|
||||
|
||||
|
||||
class AsyncSession(requests.Session):
|
||||
|
||||
def __init__(self, n=None, *args, **kwargs):
|
||||
if n:
|
||||
pool = reactor.getThreadPool()
|
||||
pool.adjustPoolsize(0, n)
|
||||
|
||||
super(AsyncSession, self).__init__(*args, **kwargs)
|
||||
|
||||
def request(self, *args, **kwargs):
|
||||
func = super(AsyncSession, self).request
|
||||
return threads.deferToThread(func, *args, **kwargs)
|
||||
|
||||
def wrap(self, *args, **kwargs): # pylint: disable=no-self-use
|
||||
return defer.ensureDeferred(*args, **kwargs)
|
||||
|
||||
|
||||
@util.memoized(expire="60s")
|
||||
def requests_session():
|
||||
return AsyncSession(n=5)
|
||||
|
||||
|
||||
@util.memoized(expire="60s")
|
||||
def get_core_fullpath():
|
||||
return where_is_program(
|
||||
"platformio" + (".exe" if "windows" in util.get_systype() else ""))
|
||||
|
||||
|
||||
@util.memoized(expire="10s")
|
||||
def is_twitter_blocked():
|
||||
ip = "104.244.42.1"
|
||||
timeout = 2
|
||||
try:
|
||||
if os.getenv("HTTP_PROXY", os.getenv("HTTPS_PROXY")):
|
||||
requests.get("http://%s" % ip,
|
||||
allow_redirects=False,
|
||||
timeout=timeout)
|
||||
else:
|
||||
socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect((ip, 80))
|
||||
return False
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
return True
|
||||
13
platformio/commands/home/rpc/__init__.py
Normal file
13
platformio/commands/home/rpc/__init__.py
Normal file
@@ -0,0 +1,13 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
13
platformio/commands/home/rpc/handlers/__init__.py
Normal file
13
platformio/commands/home/rpc/handlers/__init__.py
Normal file
@@ -0,0 +1,13 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
81
platformio/commands/home/rpc/handlers/app.py
Normal file
81
platformio/commands/home/rpc/handlers/app.py
Normal file
@@ -0,0 +1,81 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from os.path import expanduser, join
|
||||
|
||||
from platformio import __version__, app, util
|
||||
from platformio.project.helpers import (get_project_core_dir,
|
||||
is_platformio_project)
|
||||
|
||||
|
||||
class AppRPC(object):
|
||||
|
||||
APPSTATE_PATH = join(get_project_core_dir(), "homestate.json")
|
||||
|
||||
IGNORE_STORAGE_KEYS = [
|
||||
"cid", "coreVersion", "coreSystype", "coreCaller", "coreSettings",
|
||||
"homeDir", "projectsDir"
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def load_state():
|
||||
with app.State(AppRPC.APPSTATE_PATH, lock=True) as state:
|
||||
storage = state.get("storage", {})
|
||||
|
||||
# base data
|
||||
caller_id = app.get_session_var("caller_id")
|
||||
storage['cid'] = app.get_cid()
|
||||
storage['coreVersion'] = __version__
|
||||
storage['coreSystype'] = util.get_systype()
|
||||
storage['coreCaller'] = (str(caller_id).lower()
|
||||
if caller_id else None)
|
||||
storage['coreSettings'] = {
|
||||
name: {
|
||||
"description": data['description'],
|
||||
"default_value": data['value'],
|
||||
"value": app.get_setting(name)
|
||||
}
|
||||
for name, data in app.DEFAULT_SETTINGS.items()
|
||||
}
|
||||
|
||||
storage['homeDir'] = expanduser("~")
|
||||
storage['projectsDir'] = storage['coreSettings']['projects_dir'][
|
||||
'value']
|
||||
|
||||
# skip non-existing recent projects
|
||||
storage['recentProjects'] = [
|
||||
p for p in storage.get("recentProjects", [])
|
||||
if is_platformio_project(p)
|
||||
]
|
||||
|
||||
state['storage'] = storage
|
||||
state.modified = False # skip saving extra fields
|
||||
return state.as_dict()
|
||||
|
||||
@staticmethod
|
||||
def get_state():
|
||||
return AppRPC.load_state()
|
||||
|
||||
@staticmethod
|
||||
def save_state(state):
|
||||
with app.State(AppRPC.APPSTATE_PATH, lock=True) as s:
|
||||
s.clear()
|
||||
s.update(state)
|
||||
storage = s.get("storage", {})
|
||||
for k in AppRPC.IGNORE_STORAGE_KEYS:
|
||||
if k in storage:
|
||||
del storage[k]
|
||||
return True
|
||||
44
platformio/commands/home/rpc/handlers/ide.py
Normal file
44
platformio/commands/home/rpc/handlers/ide.py
Normal file
@@ -0,0 +1,44 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import time
|
||||
|
||||
import jsonrpc # pylint: disable=import-error
|
||||
from twisted.internet import defer # pylint: disable=import-error
|
||||
|
||||
|
||||
class IDERPC(object):
|
||||
|
||||
def __init__(self):
|
||||
self._queue = {}
|
||||
|
||||
def send_command(self, command, params, sid=0):
|
||||
if not self._queue.get(sid):
|
||||
raise jsonrpc.exceptions.JSONRPCDispatchException(
|
||||
code=4005, message="PIO Home IDE agent is not started")
|
||||
while self._queue[sid]:
|
||||
self._queue[sid].pop().callback({
|
||||
"id": time.time(),
|
||||
"method": command,
|
||||
"params": params
|
||||
})
|
||||
|
||||
def listen_commands(self, sid=0):
|
||||
if sid not in self._queue:
|
||||
self._queue[sid] = []
|
||||
self._queue[sid].append(defer.Deferred())
|
||||
return self._queue[sid][-1]
|
||||
|
||||
def open_project(self, project_dir, sid=0):
|
||||
return self.send_command("open_project", project_dir, sid)
|
||||
54
platformio/commands/home/rpc/handlers/misc.py
Normal file
54
platformio/commands/home/rpc/handlers/misc.py
Normal file
@@ -0,0 +1,54 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import time
|
||||
|
||||
from twisted.internet import defer, reactor # pylint: disable=import-error
|
||||
|
||||
from platformio import app
|
||||
from platformio.commands.home.rpc.handlers.os import OSRPC
|
||||
|
||||
|
||||
class MiscRPC(object):
|
||||
|
||||
def load_latest_tweets(self, username):
|
||||
cache_key = "piohome_latest_tweets_" + str(username)
|
||||
cache_valid = "7d"
|
||||
with app.ContentCache() as cc:
|
||||
cache_data = cc.get(cache_key)
|
||||
if cache_data:
|
||||
cache_data = json.loads(cache_data)
|
||||
# automatically update cache in background every 12 hours
|
||||
if cache_data['time'] < (time.time() - (3600 * 12)):
|
||||
reactor.callLater(5, self._preload_latest_tweets, username,
|
||||
cache_key, cache_valid)
|
||||
return cache_data['result']
|
||||
|
||||
result = self._preload_latest_tweets(username, cache_key, cache_valid)
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
@defer.inlineCallbacks
|
||||
def _preload_latest_tweets(username, cache_key, cache_valid):
|
||||
result = yield OSRPC.fetch_content(
|
||||
"https://api.platformio.org/tweets/" + username)
|
||||
result = json.loads(result)
|
||||
with app.ContentCache() as cc:
|
||||
cc.set(cache_key,
|
||||
json.dumps({
|
||||
"time": int(time.time()),
|
||||
"result": result
|
||||
}), cache_valid)
|
||||
defer.returnValue(result)
|
||||
152
platformio/commands/home/rpc/handlers/os.py
Normal file
152
platformio/commands/home/rpc/handlers/os.py
Normal file
@@ -0,0 +1,152 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import codecs
|
||||
import glob
|
||||
import os
|
||||
import shutil
|
||||
from functools import cmp_to_key
|
||||
from os.path import expanduser, isdir, isfile, join
|
||||
|
||||
import click
|
||||
from twisted.internet import defer # pylint: disable=import-error
|
||||
|
||||
from platformio import app, util
|
||||
from platformio.commands.home import helpers
|
||||
from platformio.compat import PY2, get_filesystem_encoding
|
||||
|
||||
|
||||
class OSRPC(object):
|
||||
|
||||
@staticmethod
|
||||
@defer.inlineCallbacks
|
||||
def fetch_content(uri, data=None, headers=None, cache_valid=None):
|
||||
if not headers:
|
||||
headers = {
|
||||
"User-Agent":
|
||||
("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) "
|
||||
"AppleWebKit/603.3.8 (KHTML, like Gecko) Version/10.1.2 "
|
||||
"Safari/603.3.8")
|
||||
}
|
||||
cache_key = (app.ContentCache.key_from_args(uri, data)
|
||||
if cache_valid else None)
|
||||
with app.ContentCache() as cc:
|
||||
if cache_key:
|
||||
result = cc.get(cache_key)
|
||||
if result is not None:
|
||||
defer.returnValue(result)
|
||||
|
||||
# check internet before and resolve issue with 60 seconds timeout
|
||||
util.internet_on(raise_exception=True)
|
||||
|
||||
session = helpers.requests_session()
|
||||
if data:
|
||||
r = yield session.post(uri, data=data, headers=headers)
|
||||
else:
|
||||
r = yield session.get(uri, headers=headers)
|
||||
|
||||
r.raise_for_status()
|
||||
result = r.text
|
||||
if cache_valid:
|
||||
with app.ContentCache() as cc:
|
||||
cc.set(cache_key, result, cache_valid)
|
||||
defer.returnValue(result)
|
||||
|
||||
def request_content(self, uri, data=None, headers=None, cache_valid=None):
|
||||
if uri.startswith('http'):
|
||||
return self.fetch_content(uri, data, headers, cache_valid)
|
||||
if not isfile(uri):
|
||||
return None
|
||||
with codecs.open(uri, encoding="utf-8") as fp:
|
||||
return fp.read()
|
||||
|
||||
@staticmethod
|
||||
def open_url(url):
|
||||
return click.launch(url)
|
||||
|
||||
@staticmethod
|
||||
def reveal_file(path):
|
||||
return click.launch(
|
||||
path.encode(get_filesystem_encoding()) if PY2 else path,
|
||||
locate=True)
|
||||
|
||||
@staticmethod
|
||||
def is_file(path):
|
||||
return isfile(path)
|
||||
|
||||
@staticmethod
|
||||
def is_dir(path):
|
||||
return isdir(path)
|
||||
|
||||
@staticmethod
|
||||
def make_dirs(path):
|
||||
return os.makedirs(path)
|
||||
|
||||
@staticmethod
|
||||
def rename(src, dst):
|
||||
return os.rename(src, dst)
|
||||
|
||||
@staticmethod
|
||||
def copy(src, dst):
|
||||
return shutil.copytree(src, dst)
|
||||
|
||||
@staticmethod
|
||||
def glob(pathnames, root=None):
|
||||
if not isinstance(pathnames, list):
|
||||
pathnames = [pathnames]
|
||||
result = set()
|
||||
for pathname in pathnames:
|
||||
result |= set(
|
||||
glob.glob(join(root, pathname) if root else pathname))
|
||||
return list(result)
|
||||
|
||||
@staticmethod
|
||||
def list_dir(path):
|
||||
|
||||
def _cmp(x, y):
|
||||
if x[1] and not y[1]:
|
||||
return -1
|
||||
if not x[1] and y[1]:
|
||||
return 1
|
||||
if x[0].lower() > y[0].lower():
|
||||
return 1
|
||||
if x[0].lower() < y[0].lower():
|
||||
return -1
|
||||
return 0
|
||||
|
||||
items = []
|
||||
if path.startswith("~"):
|
||||
path = expanduser(path)
|
||||
if not isdir(path):
|
||||
return items
|
||||
for item in os.listdir(path):
|
||||
try:
|
||||
item_is_dir = isdir(join(path, item))
|
||||
if item_is_dir:
|
||||
os.listdir(join(path, item))
|
||||
items.append((item, item_is_dir))
|
||||
except OSError:
|
||||
pass
|
||||
return sorted(items, key=cmp_to_key(_cmp))
|
||||
|
||||
@staticmethod
|
||||
def get_logical_devices():
|
||||
items = []
|
||||
for item in util.get_logical_devices():
|
||||
if item['name']:
|
||||
item['name'] = item['name']
|
||||
items.append(item)
|
||||
return items
|
||||
162
platformio/commands/home/rpc/handlers/piocore.py
Normal file
162
platformio/commands/home/rpc/handlers/piocore.py
Normal file
@@ -0,0 +1,162 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from io import BytesIO, StringIO
|
||||
|
||||
import click
|
||||
import jsonrpc # pylint: disable=import-error
|
||||
from twisted.internet import defer # pylint: disable=import-error
|
||||
from twisted.internet import threads # pylint: disable=import-error
|
||||
from twisted.internet import utils # pylint: disable=import-error
|
||||
|
||||
from platformio import __main__, __version__, fs
|
||||
from platformio.commands.home import helpers
|
||||
from platformio.compat import (PY2, get_filesystem_encoding, is_bytes,
|
||||
string_types)
|
||||
|
||||
try:
|
||||
from thread import get_ident as thread_get_ident
|
||||
except ImportError:
|
||||
from threading import get_ident as thread_get_ident
|
||||
|
||||
|
||||
class MultiThreadingStdStream(object):
|
||||
|
||||
def __init__(self, parent_stream):
|
||||
self._buffers = {thread_get_ident(): parent_stream}
|
||||
|
||||
def __getattr__(self, name):
|
||||
thread_id = thread_get_ident()
|
||||
self._ensure_thread_buffer(thread_id)
|
||||
return getattr(self._buffers[thread_id], name)
|
||||
|
||||
def _ensure_thread_buffer(self, thread_id):
|
||||
if thread_id not in self._buffers:
|
||||
self._buffers[thread_id] = BytesIO() if PY2 else StringIO()
|
||||
|
||||
def write(self, value):
|
||||
thread_id = thread_get_ident()
|
||||
self._ensure_thread_buffer(thread_id)
|
||||
return self._buffers[thread_id].write(
|
||||
value.decode() if is_bytes(value) else value)
|
||||
|
||||
def get_value_and_reset(self):
|
||||
result = ""
|
||||
try:
|
||||
result = self.getvalue()
|
||||
self.truncate(0)
|
||||
self.seek(0)
|
||||
except AttributeError:
|
||||
pass
|
||||
return result
|
||||
|
||||
|
||||
class PIOCoreRPC(object):
|
||||
|
||||
@staticmethod
|
||||
def version():
|
||||
return __version__
|
||||
|
||||
@staticmethod
|
||||
def setup_multithreading_std_streams():
|
||||
if isinstance(sys.stdout, MultiThreadingStdStream):
|
||||
return
|
||||
PIOCoreRPC.thread_stdout = MultiThreadingStdStream(sys.stdout)
|
||||
PIOCoreRPC.thread_stderr = MultiThreadingStdStream(sys.stderr)
|
||||
sys.stdout = PIOCoreRPC.thread_stdout
|
||||
sys.stderr = PIOCoreRPC.thread_stderr
|
||||
|
||||
@staticmethod
|
||||
def call(args, options=None):
|
||||
return defer.maybeDeferred(PIOCoreRPC._call_generator, args, options)
|
||||
|
||||
@staticmethod
|
||||
@defer.inlineCallbacks
|
||||
def _call_generator(args, options=None):
|
||||
for i, arg in enumerate(args):
|
||||
if isinstance(arg, string_types):
|
||||
args[i] = arg.encode(get_filesystem_encoding()) if PY2 else arg
|
||||
else:
|
||||
args[i] = str(arg)
|
||||
|
||||
to_json = "--json-output" in args
|
||||
|
||||
try:
|
||||
if args and args[0] in ("account", "remote"):
|
||||
result = yield PIOCoreRPC._call_subprocess(args, options)
|
||||
defer.returnValue(PIOCoreRPC._process_result(result, to_json))
|
||||
else:
|
||||
result = yield PIOCoreRPC._call_inline(args, options)
|
||||
try:
|
||||
defer.returnValue(
|
||||
PIOCoreRPC._process_result(result, to_json))
|
||||
except ValueError:
|
||||
# fall-back to subprocess method
|
||||
result = yield PIOCoreRPC._call_subprocess(args, options)
|
||||
defer.returnValue(
|
||||
PIOCoreRPC._process_result(result, to_json))
|
||||
except Exception as e: # pylint: disable=bare-except
|
||||
raise jsonrpc.exceptions.JSONRPCDispatchException(
|
||||
code=4003, message="PIO Core Call Error", data=str(e))
|
||||
|
||||
@staticmethod
|
||||
def _call_inline(args, options):
|
||||
PIOCoreRPC.setup_multithreading_std_streams()
|
||||
cwd = (options or {}).get("cwd") or os.getcwd()
|
||||
|
||||
def _thread_task():
|
||||
with fs.cd(cwd):
|
||||
exit_code = __main__.main(["-c"] + args)
|
||||
return (PIOCoreRPC.thread_stdout.get_value_and_reset(),
|
||||
PIOCoreRPC.thread_stderr.get_value_and_reset(), exit_code)
|
||||
|
||||
return threads.deferToThread(_thread_task)
|
||||
|
||||
@staticmethod
|
||||
def _call_subprocess(args, options):
|
||||
cwd = (options or {}).get("cwd") or os.getcwd()
|
||||
return utils.getProcessOutputAndValue(
|
||||
helpers.get_core_fullpath(),
|
||||
args,
|
||||
path=cwd,
|
||||
env={k: v
|
||||
for k, v in os.environ.items() if "%" not in k})
|
||||
|
||||
@staticmethod
|
||||
def _process_result(result, to_json=False):
|
||||
out, err, code = result
|
||||
text = ("%s\n\n%s" % (out, err)).strip()
|
||||
if code != 0:
|
||||
raise Exception(text)
|
||||
if not to_json:
|
||||
return text
|
||||
try:
|
||||
return json.loads(out)
|
||||
except ValueError as e:
|
||||
click.secho("%s => `%s`" % (e, out), fg="red", err=True)
|
||||
# if PIO Core prints unhandled warnings
|
||||
for line in out.split("\n"):
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
try:
|
||||
return json.loads(line)
|
||||
except ValueError:
|
||||
pass
|
||||
raise e
|
||||
277
platformio/commands/home/rpc/handlers/project.py
Normal file
277
platformio/commands/home/rpc/handlers/project.py
Normal file
@@ -0,0 +1,277 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import time
|
||||
from os.path import (basename, expanduser, getmtime, isdir, isfile, join,
|
||||
realpath, sep)
|
||||
|
||||
import jsonrpc # pylint: disable=import-error
|
||||
|
||||
from platformio import exception, fs
|
||||
from platformio.commands.home.rpc.handlers.app import AppRPC
|
||||
from platformio.commands.home.rpc.handlers.piocore import PIOCoreRPC
|
||||
from platformio.compat import PY2, get_filesystem_encoding
|
||||
from platformio.ide.projectgenerator import ProjectGenerator
|
||||
from platformio.managers.platform import PlatformManager
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.helpers import (get_project_libdeps_dir,
|
||||
get_project_src_dir,
|
||||
is_platformio_project)
|
||||
|
||||
|
||||
class ProjectRPC(object):
|
||||
|
||||
@staticmethod
|
||||
def _get_projects(project_dirs=None):
|
||||
|
||||
def _get_project_data(project_dir):
|
||||
data = {"boards": [], "envLibdepsDirs": [], "libExtraDirs": []}
|
||||
config = ProjectConfig(join(project_dir, "platformio.ini"))
|
||||
libdeps_dir = get_project_libdeps_dir()
|
||||
|
||||
data['libExtraDirs'].extend(
|
||||
config.get("platformio", "lib_extra_dirs", []))
|
||||
|
||||
for section in config.sections():
|
||||
if not section.startswith("env:"):
|
||||
continue
|
||||
data['envLibdepsDirs'].append(join(libdeps_dir, section[4:]))
|
||||
if config.has_option(section, "board"):
|
||||
data['boards'].append(config.get(section, "board"))
|
||||
data['libExtraDirs'].extend(
|
||||
config.get(section, "lib_extra_dirs", []))
|
||||
|
||||
# skip non existing folders and resolve full path
|
||||
for key in ("envLibdepsDirs", "libExtraDirs"):
|
||||
data[key] = [
|
||||
expanduser(d) if d.startswith("~") else realpath(d)
|
||||
for d in data[key] if isdir(d)
|
||||
]
|
||||
|
||||
return data
|
||||
|
||||
def _path_to_name(path):
|
||||
return (sep).join(path.split(sep)[-2:])
|
||||
|
||||
if not project_dirs:
|
||||
project_dirs = AppRPC.load_state()['storage']['recentProjects']
|
||||
|
||||
result = []
|
||||
pm = PlatformManager()
|
||||
for project_dir in project_dirs:
|
||||
data = {}
|
||||
boards = []
|
||||
try:
|
||||
with fs.cd(project_dir):
|
||||
data = _get_project_data(project_dir)
|
||||
except exception.PlatformIOProjectException:
|
||||
continue
|
||||
|
||||
for board_id in data.get("boards", []):
|
||||
name = board_id
|
||||
try:
|
||||
name = pm.board_config(board_id)['name']
|
||||
except exception.PlatformioException:
|
||||
pass
|
||||
boards.append({"id": board_id, "name": name})
|
||||
|
||||
result.append({
|
||||
"path":
|
||||
project_dir,
|
||||
"name":
|
||||
_path_to_name(project_dir),
|
||||
"modified":
|
||||
int(getmtime(project_dir)),
|
||||
"boards":
|
||||
boards,
|
||||
"envLibStorages": [{
|
||||
"name": basename(d),
|
||||
"path": d
|
||||
} for d in data.get("envLibdepsDirs", [])],
|
||||
"extraLibStorages": [{
|
||||
"name": _path_to_name(d),
|
||||
"path": d
|
||||
} for d in data.get("libExtraDirs", [])]
|
||||
})
|
||||
return result
|
||||
|
||||
def get_projects(self, project_dirs=None):
|
||||
return self._get_projects(project_dirs)
|
||||
|
||||
@staticmethod
|
||||
def get_project_examples():
|
||||
result = []
|
||||
for manifest in PlatformManager().get_installed():
|
||||
examples_dir = join(manifest['__pkg_dir'], "examples")
|
||||
if not isdir(examples_dir):
|
||||
continue
|
||||
items = []
|
||||
for project_dir, _, __ in os.walk(examples_dir):
|
||||
project_description = None
|
||||
try:
|
||||
config = ProjectConfig(join(project_dir, "platformio.ini"))
|
||||
config.validate(silent=True)
|
||||
project_description = config.get("platformio",
|
||||
"description")
|
||||
except exception.PlatformIOProjectException:
|
||||
continue
|
||||
|
||||
path_tokens = project_dir.split(sep)
|
||||
items.append({
|
||||
"name":
|
||||
"/".join(path_tokens[path_tokens.index("examples") + 1:]),
|
||||
"path":
|
||||
project_dir,
|
||||
"description":
|
||||
project_description
|
||||
})
|
||||
result.append({
|
||||
"platform": {
|
||||
"title": manifest['title'],
|
||||
"version": manifest['version']
|
||||
},
|
||||
"items": sorted(items, key=lambda item: item['name'])
|
||||
})
|
||||
return sorted(result, key=lambda data: data['platform']['title'])
|
||||
|
||||
def init(self, board, framework, project_dir):
|
||||
assert project_dir
|
||||
state = AppRPC.load_state()
|
||||
if not isdir(project_dir):
|
||||
os.makedirs(project_dir)
|
||||
args = ["init", "--board", board]
|
||||
if framework:
|
||||
args.extend(["--project-option", "framework = %s" % framework])
|
||||
if (state['storage']['coreCaller'] and state['storage']['coreCaller']
|
||||
in ProjectGenerator.get_supported_ides()):
|
||||
args.extend(["--ide", state['storage']['coreCaller']])
|
||||
d = PIOCoreRPC.call(args, options={"cwd": project_dir})
|
||||
d.addCallback(self._generate_project_main, project_dir, framework)
|
||||
return d
|
||||
|
||||
@staticmethod
|
||||
def _generate_project_main(_, project_dir, framework):
|
||||
main_content = None
|
||||
if framework == "arduino":
|
||||
main_content = "\n".join([
|
||||
"#include <Arduino.h>",
|
||||
"",
|
||||
"void setup() {",
|
||||
" // put your setup code here, to run once:",
|
||||
"}",
|
||||
"",
|
||||
"void loop() {",
|
||||
" // put your main code here, to run repeatedly:",
|
||||
"}"
|
||||
""
|
||||
]) # yapf: disable
|
||||
elif framework == "mbed":
|
||||
main_content = "\n".join([
|
||||
"#include <mbed.h>",
|
||||
"",
|
||||
"int main() {",
|
||||
"",
|
||||
" // put your setup code here, to run once:",
|
||||
"",
|
||||
" while(1) {",
|
||||
" // put your main code here, to run repeatedly:",
|
||||
" }",
|
||||
"}",
|
||||
""
|
||||
]) # yapf: disable
|
||||
if not main_content:
|
||||
return project_dir
|
||||
with fs.cd(project_dir):
|
||||
src_dir = get_project_src_dir()
|
||||
main_path = join(src_dir, "main.cpp")
|
||||
if isfile(main_path):
|
||||
return project_dir
|
||||
if not isdir(src_dir):
|
||||
os.makedirs(src_dir)
|
||||
with open(main_path, "w") as f:
|
||||
f.write(main_content.strip())
|
||||
return project_dir
|
||||
|
||||
def import_arduino(self, board, use_arduino_libs, arduino_project_dir):
|
||||
board = str(board)
|
||||
if arduino_project_dir and PY2:
|
||||
arduino_project_dir = arduino_project_dir.encode(
|
||||
get_filesystem_encoding())
|
||||
# don't import PIO Project
|
||||
if is_platformio_project(arduino_project_dir):
|
||||
return arduino_project_dir
|
||||
|
||||
is_arduino_project = any([
|
||||
isfile(
|
||||
join(arduino_project_dir,
|
||||
"%s.%s" % (basename(arduino_project_dir), ext)))
|
||||
for ext in ("ino", "pde")
|
||||
])
|
||||
if not is_arduino_project:
|
||||
raise jsonrpc.exceptions.JSONRPCDispatchException(
|
||||
code=4000,
|
||||
message="Not an Arduino project: %s" % arduino_project_dir)
|
||||
|
||||
state = AppRPC.load_state()
|
||||
project_dir = join(state['storage']['projectsDir'],
|
||||
time.strftime("%y%m%d-%H%M%S-") + board)
|
||||
if not isdir(project_dir):
|
||||
os.makedirs(project_dir)
|
||||
args = ["init", "--board", board]
|
||||
args.extend(["--project-option", "framework = arduino"])
|
||||
if use_arduino_libs:
|
||||
args.extend([
|
||||
"--project-option",
|
||||
"lib_extra_dirs = ~/Documents/Arduino/libraries"
|
||||
])
|
||||
if (state['storage']['coreCaller'] and state['storage']['coreCaller']
|
||||
in ProjectGenerator.get_supported_ides()):
|
||||
args.extend(["--ide", state['storage']['coreCaller']])
|
||||
d = PIOCoreRPC.call(args, options={"cwd": project_dir})
|
||||
d.addCallback(self._finalize_arduino_import, project_dir,
|
||||
arduino_project_dir)
|
||||
return d
|
||||
|
||||
@staticmethod
|
||||
def _finalize_arduino_import(_, project_dir, arduino_project_dir):
|
||||
with fs.cd(project_dir):
|
||||
src_dir = get_project_src_dir()
|
||||
if isdir(src_dir):
|
||||
fs.rmtree(src_dir)
|
||||
shutil.copytree(arduino_project_dir, src_dir)
|
||||
return project_dir
|
||||
|
||||
@staticmethod
|
||||
def import_pio(project_dir):
|
||||
if not project_dir or not is_platformio_project(project_dir):
|
||||
raise jsonrpc.exceptions.JSONRPCDispatchException(
|
||||
code=4001,
|
||||
message="Not an PlatformIO project: %s" % project_dir)
|
||||
new_project_dir = join(
|
||||
AppRPC.load_state()['storage']['projectsDir'],
|
||||
time.strftime("%y%m%d-%H%M%S-") + basename(project_dir))
|
||||
shutil.copytree(project_dir, new_project_dir)
|
||||
|
||||
state = AppRPC.load_state()
|
||||
args = ["init"]
|
||||
if (state['storage']['coreCaller'] and state['storage']['coreCaller']
|
||||
in ProjectGenerator.get_supported_ides()):
|
||||
args.extend(["--ide", state['storage']['coreCaller']])
|
||||
d = PIOCoreRPC.call(args, options={"cwd": new_project_dir})
|
||||
d.addCallback(lambda _: new_project_dir)
|
||||
return d
|
||||
77
platformio/commands/home/rpc/server.py
Normal file
77
platformio/commands/home/rpc/server.py
Normal file
@@ -0,0 +1,77 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=import-error
|
||||
|
||||
import click
|
||||
import jsonrpc
|
||||
from autobahn.twisted.websocket import (WebSocketServerFactory,
|
||||
WebSocketServerProtocol)
|
||||
from jsonrpc.exceptions import JSONRPCDispatchException
|
||||
from twisted.internet import defer
|
||||
|
||||
from platformio.compat import PY2, dump_json_to_unicode, is_bytes
|
||||
|
||||
|
||||
class JSONRPCServerProtocol(WebSocketServerProtocol):
|
||||
|
||||
def onMessage(self, payload, isBinary): # pylint: disable=unused-argument
|
||||
# click.echo("> %s" % payload)
|
||||
response = jsonrpc.JSONRPCResponseManager.handle(
|
||||
payload, self.factory.dispatcher).data
|
||||
# if error
|
||||
if "result" not in response:
|
||||
self.sendJSONResponse(response)
|
||||
return None
|
||||
|
||||
d = defer.maybeDeferred(lambda: response['result'])
|
||||
d.addCallback(self._callback, response)
|
||||
d.addErrback(self._errback, response)
|
||||
|
||||
return None
|
||||
|
||||
def _callback(self, result, response):
|
||||
response['result'] = result
|
||||
self.sendJSONResponse(response)
|
||||
|
||||
def _errback(self, failure, response):
|
||||
if isinstance(failure.value, JSONRPCDispatchException):
|
||||
e = failure.value
|
||||
else:
|
||||
e = JSONRPCDispatchException(code=4999,
|
||||
message=failure.getErrorMessage())
|
||||
del response["result"]
|
||||
response['error'] = e.error._data # pylint: disable=protected-access
|
||||
self.sendJSONResponse(response)
|
||||
|
||||
def sendJSONResponse(self, response):
|
||||
# click.echo("< %s" % response)
|
||||
if "error" in response:
|
||||
click.secho("Error: %s" % response['error'], fg="red", err=True)
|
||||
response = dump_json_to_unicode(response)
|
||||
if not PY2 and not is_bytes(response):
|
||||
response = response.encode("utf-8")
|
||||
self.sendMessage(response)
|
||||
|
||||
|
||||
class JSONRPCServerFactory(WebSocketServerFactory):
|
||||
|
||||
protocol = JSONRPCServerProtocol
|
||||
|
||||
def __init__(self):
|
||||
super(JSONRPCServerFactory, self).__init__()
|
||||
self.dispatcher = jsonrpc.Dispatcher()
|
||||
|
||||
def addHandler(self, handler, namespace):
|
||||
self.dispatcher.build_method_map(handler, prefix="%s." % namespace)
|
||||
30
platformio/commands/home/web.py
Normal file
30
platformio/commands/home/web.py
Normal file
@@ -0,0 +1,30 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from twisted.internet import reactor # pylint: disable=import-error
|
||||
from twisted.web import static # pylint: disable=import-error
|
||||
|
||||
|
||||
class WebRoot(static.File):
|
||||
|
||||
def render_GET(self, request):
|
||||
if request.args.get("__shutdown__", False):
|
||||
reactor.stop()
|
||||
return "Server has been stopped"
|
||||
|
||||
request.setHeader("cache-control",
|
||||
"no-cache, no-store, must-revalidate")
|
||||
request.setHeader("pragma", "no-cache")
|
||||
request.setHeader("expires", "0")
|
||||
return static.File.render_GET(self, request)
|
||||
@@ -16,15 +16,20 @@
|
||||
|
||||
from os import getcwd, makedirs
|
||||
from os.path import isdir, isfile, join
|
||||
from shutil import copyfile
|
||||
|
||||
import click
|
||||
|
||||
from platformio import exception, util
|
||||
from platformio import exception, fs
|
||||
from platformio.commands.platform import \
|
||||
platform_install as cli_platform_install
|
||||
from platformio.ide.projectgenerator import ProjectGenerator
|
||||
from platformio.managers.platform import PlatformManager
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.helpers import (get_project_include_dir,
|
||||
get_project_lib_dir,
|
||||
get_project_src_dir,
|
||||
get_project_test_dir,
|
||||
is_platformio_project)
|
||||
|
||||
|
||||
def validate_boards(ctx, param, value): # pylint: disable=W0613
|
||||
@@ -39,22 +44,23 @@ def validate_boards(ctx, param, value): # pylint: disable=W0613
|
||||
return value
|
||||
|
||||
|
||||
@click.command(
|
||||
"init", short_help="Initialize PlatformIO project or update existing")
|
||||
@click.option(
|
||||
"--project-dir",
|
||||
"-d",
|
||||
default=getcwd,
|
||||
type=click.Path(
|
||||
exists=True,
|
||||
file_okay=False,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
resolve_path=True))
|
||||
@click.option(
|
||||
"-b", "--board", multiple=True, metavar="ID", callback=validate_boards)
|
||||
@click.option(
|
||||
"--ide", type=click.Choice(ProjectGenerator.get_supported_ides()))
|
||||
@click.command("init",
|
||||
short_help="Initialize PlatformIO project or update existing")
|
||||
@click.option("--project-dir",
|
||||
"-d",
|
||||
default=getcwd,
|
||||
type=click.Path(exists=True,
|
||||
file_okay=False,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
resolve_path=True))
|
||||
@click.option("-b",
|
||||
"--board",
|
||||
multiple=True,
|
||||
metavar="ID",
|
||||
callback=validate_boards)
|
||||
@click.option("--ide",
|
||||
type=click.Choice(ProjectGenerator.get_supported_ides()))
|
||||
@click.option("-O", "--project-option", multiple=True)
|
||||
@click.option("--env-prefix", default="")
|
||||
@click.option("-s", "--silent", is_flag=True)
|
||||
@@ -67,143 +73,199 @@ def cli(
|
||||
project_option,
|
||||
env_prefix,
|
||||
silent):
|
||||
|
||||
if not silent:
|
||||
if project_dir == getcwd():
|
||||
click.secho(
|
||||
"\nThe current working directory", fg="yellow", nl=False)
|
||||
click.secho("\nThe current working directory",
|
||||
fg="yellow",
|
||||
nl=False)
|
||||
click.secho(" %s " % project_dir, fg="cyan", nl=False)
|
||||
click.secho(
|
||||
"will be used for project.\n"
|
||||
"You can specify another project directory via\n"
|
||||
"`platformio init -d %PATH_TO_THE_PROJECT_DIR%` command.",
|
||||
fg="yellow")
|
||||
click.secho("will be used for the project.", fg="yellow")
|
||||
click.echo("")
|
||||
|
||||
click.echo("The next files/directories have been created in %s" %
|
||||
click.style(project_dir, fg="cyan"))
|
||||
click.echo("%s - Project Configuration File" % click.style(
|
||||
"platformio.ini", fg="cyan"))
|
||||
click.echo(
|
||||
"%s - Put your source files here" % click.style("src", fg="cyan"))
|
||||
click.echo("%s - Put project header files here" %
|
||||
click.style("include", fg="cyan"))
|
||||
click.echo("%s - Put here project specific (private) libraries" %
|
||||
click.style("lib", fg="cyan"))
|
||||
click.echo("%s - Put project source files here" %
|
||||
click.style("src", fg="cyan"))
|
||||
click.echo("%s - Project Configuration File" %
|
||||
click.style("platformio.ini", fg="cyan"))
|
||||
|
||||
init_base_project(project_dir)
|
||||
is_new_project = not is_platformio_project(project_dir)
|
||||
if is_new_project:
|
||||
init_base_project(project_dir)
|
||||
|
||||
if board:
|
||||
fill_project_envs(ctx, project_dir, board, project_option, env_prefix,
|
||||
ide is not None)
|
||||
|
||||
if ide:
|
||||
env_name = get_best_envname(project_dir, board)
|
||||
if not env_name:
|
||||
raise exception.BoardNotDefined()
|
||||
pg = ProjectGenerator(project_dir, ide, env_name)
|
||||
pg = ProjectGenerator(project_dir, ide, board)
|
||||
pg.generate()
|
||||
|
||||
if not silent:
|
||||
if is_new_project:
|
||||
init_ci_conf(project_dir)
|
||||
init_cvs_ignore(project_dir)
|
||||
|
||||
if silent:
|
||||
return
|
||||
|
||||
if ide:
|
||||
click.secho(
|
||||
"\nProject has been successfully initialized!\nUseful commands:\n"
|
||||
"`platformio run` - process/build project from the current "
|
||||
"directory\n"
|
||||
"`platformio run --target upload` or `platformio run -t upload` "
|
||||
"- upload firmware to embedded board\n"
|
||||
"`platformio run --target clean` - clean project (remove compiled "
|
||||
"files)\n"
|
||||
"`platformio run --help` - additional information",
|
||||
"\nProject has been successfully %s including configuration files "
|
||||
"for `%s` IDE." %
|
||||
("initialized" if is_new_project else "updated", ide),
|
||||
fg="green")
|
||||
else:
|
||||
click.secho(
|
||||
"\nProject has been successfully %s! Useful commands:\n"
|
||||
"`pio run` - process/build project from the current directory\n"
|
||||
"`pio run --target upload` or `pio run -t upload` "
|
||||
"- upload firmware to a target\n"
|
||||
"`pio run --target clean` - clean project (remove compiled files)"
|
||||
"\n`pio run --help` - additional information" %
|
||||
("initialized" if is_new_project else "updated"),
|
||||
fg="green")
|
||||
|
||||
|
||||
def get_best_envname(project_dir, boards=None):
|
||||
config = util.load_project_config(project_dir)
|
||||
env_default = None
|
||||
if config.has_option("platformio", "env_default"):
|
||||
env_default = config.get("platformio",
|
||||
"env_default").split(", ")[0].strip()
|
||||
if env_default:
|
||||
return env_default
|
||||
section = None
|
||||
for section in config.sections():
|
||||
if not section.startswith("env:"):
|
||||
continue
|
||||
elif config.has_option(section, "board") and (not boards or config.get(
|
||||
section, "board") in boards):
|
||||
break
|
||||
return section[4:] if section else None
|
||||
|
||||
|
||||
def init_base_project(project_dir):
|
||||
if not util.is_platformio_project(project_dir):
|
||||
copyfile(
|
||||
join(util.get_source_dir(), "projectconftpl.ini"),
|
||||
join(project_dir, "platformio.ini"))
|
||||
ProjectConfig(join(project_dir, "platformio.ini")).save()
|
||||
with fs.cd(project_dir):
|
||||
dir_to_readme = [
|
||||
(get_project_src_dir(), None),
|
||||
(get_project_include_dir(), init_include_readme),
|
||||
(get_project_lib_dir(), init_lib_readme),
|
||||
(get_project_test_dir(), init_test_readme),
|
||||
]
|
||||
for (path, cb) in dir_to_readme:
|
||||
if isdir(path):
|
||||
continue
|
||||
makedirs(path)
|
||||
if cb:
|
||||
cb(path)
|
||||
|
||||
lib_dir = join(project_dir, "lib")
|
||||
src_dir = join(project_dir, "src")
|
||||
config = util.load_project_config(project_dir)
|
||||
if config.has_option("platformio", "src_dir"):
|
||||
src_dir = join(project_dir, config.get("platformio", "src_dir"))
|
||||
|
||||
for d in (src_dir, lib_dir):
|
||||
if not isdir(d):
|
||||
makedirs(d)
|
||||
def init_include_readme(include_dir):
|
||||
with open(join(include_dir, "README"), "w") as f:
|
||||
f.write("""
|
||||
This directory is intended for project header files.
|
||||
|
||||
init_lib_readme(lib_dir)
|
||||
init_ci_conf(project_dir)
|
||||
init_cvs_ignore(project_dir)
|
||||
A header file is a file containing C declarations and macro definitions
|
||||
to be shared between several project source files. You request the use of a
|
||||
header file in your project source file (C, C++, etc) located in `src` folder
|
||||
by including it, with the C preprocessing directive `#include'.
|
||||
|
||||
```src/main.c
|
||||
|
||||
#include "header.h"
|
||||
|
||||
int main (void)
|
||||
{
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
Including a header file produces the same results as copying the header file
|
||||
into each source file that needs it. Such copying would be time-consuming
|
||||
and error-prone. With a header file, the related declarations appear
|
||||
in only one place. If they need to be changed, they can be changed in one
|
||||
place, and programs that include the header file will automatically use the
|
||||
new version when next recompiled. The header file eliminates the labor of
|
||||
finding and changing all the copies as well as the risk that a failure to
|
||||
find one copy will result in inconsistencies within a program.
|
||||
|
||||
In C, the usual convention is to give header files names that end with `.h'.
|
||||
It is most portable to use only letters, digits, dashes, and underscores in
|
||||
header file names, and at most one dot.
|
||||
|
||||
Read more about using header files in official GCC documentation:
|
||||
|
||||
* Include Syntax
|
||||
* Include Operation
|
||||
* Once-Only Headers
|
||||
* Computed Includes
|
||||
|
||||
https://gcc.gnu.org/onlinedocs/cpp/Header-Files.html
|
||||
""")
|
||||
|
||||
|
||||
def init_lib_readme(lib_dir):
|
||||
if isfile(join(lib_dir, "readme.txt")):
|
||||
return
|
||||
with open(join(lib_dir, "readme.txt"), "w") as f:
|
||||
with open(join(lib_dir, "README"), "w") as f:
|
||||
f.write("""
|
||||
This directory is intended for the project specific (private) libraries.
|
||||
PlatformIO will compile them to static libraries and link to executable file.
|
||||
This directory is intended for project specific (private) libraries.
|
||||
PlatformIO will compile them to static libraries and link into executable file.
|
||||
|
||||
The source code of each library should be placed in separate directory, like
|
||||
"lib/private_lib/[here are source files]".
|
||||
The source code of each library should be placed in a an own separate directory
|
||||
("lib/your_library_name/[here are source files]").
|
||||
|
||||
For example, see how can be organized `Foo` and `Bar` libraries:
|
||||
For example, see a structure of the following two libraries `Foo` and `Bar`:
|
||||
|
||||
|--lib
|
||||
| |
|
||||
| |--Bar
|
||||
| | |--docs
|
||||
| | |--examples
|
||||
| | |--src
|
||||
| | |- Bar.c
|
||||
| | |- Bar.h
|
||||
| | |- library.json (optional, custom build options, etc) https://docs.platformio.org/page/librarymanager/config.html
|
||||
| |
|
||||
| |--Foo
|
||||
| | |- Foo.c
|
||||
| | |- Foo.h
|
||||
| |- readme.txt --> THIS FILE
|
||||
| |
|
||||
| |- README --> THIS FILE
|
||||
|
|
||||
|- platformio.ini
|
||||
|--src
|
||||
|- main.c
|
||||
|
||||
Then in `src/main.c` you should use:
|
||||
|
||||
and a contents of `src/main.c`:
|
||||
```
|
||||
#include <Foo.h>
|
||||
#include <Bar.h>
|
||||
|
||||
// rest H/C/CPP code
|
||||
int main (void)
|
||||
{
|
||||
...
|
||||
}
|
||||
|
||||
PlatformIO will find your libraries automatically, configure preprocessor's
|
||||
include paths and build them.
|
||||
```
|
||||
|
||||
PlatformIO Library Dependency Finder will find automatically dependent
|
||||
libraries scanning project source files.
|
||||
|
||||
More information about PlatformIO Library Dependency Finder
|
||||
- http://docs.platformio.org/page/librarymanager/ldf.html
|
||||
- https://docs.platformio.org/page/librarymanager/ldf.html
|
||||
""")
|
||||
|
||||
|
||||
def init_test_readme(test_dir):
|
||||
with open(join(test_dir, "README"), "w") as f:
|
||||
f.write("""
|
||||
This directory is intended for PIO Unit Testing and project tests.
|
||||
|
||||
Unit Testing is a software testing method by which individual units of
|
||||
source code, sets of one or more MCU program modules together with associated
|
||||
control data, usage procedures, and operating procedures, are tested to
|
||||
determine whether they are fit for use. Unit testing finds problems early
|
||||
in the development cycle.
|
||||
|
||||
More information about PIO Unit Testing:
|
||||
- https://docs.platformio.org/page/plus/unit-testing.html
|
||||
""")
|
||||
|
||||
|
||||
def init_ci_conf(project_dir):
|
||||
if isfile(join(project_dir, ".travis.yml")):
|
||||
conf_path = join(project_dir, ".travis.yml")
|
||||
if isfile(conf_path):
|
||||
return
|
||||
with open(join(project_dir, ".travis.yml"), "w") as f:
|
||||
with open(conf_path, "w") as f:
|
||||
f.write("""# Continuous Integration (CI) is the practice, in software
|
||||
# engineering, of merging all developer working copies with a shared mainline
|
||||
# several times a day < http://docs.platformio.org/page/ci/index.html >
|
||||
# several times a day < https://docs.platformio.org/page/ci/index.html >
|
||||
#
|
||||
# Documentation:
|
||||
#
|
||||
@@ -211,13 +273,13 @@ def init_ci_conf(project_dir):
|
||||
# < https://docs.travis-ci.com/user/integration/platformio/ >
|
||||
#
|
||||
# * PlatformIO integration with Travis CI
|
||||
# < http://docs.platformio.org/page/ci/travis.html >
|
||||
# < https://docs.platformio.org/page/ci/travis.html >
|
||||
#
|
||||
# * User Guide for `platformio ci` command
|
||||
# < http://docs.platformio.org/page/userguide/cmd_ci.html >
|
||||
# < https://docs.platformio.org/page/userguide/cmd_ci.html >
|
||||
#
|
||||
#
|
||||
# Please choice one of the following templates (proposed below) and uncomment
|
||||
# Please choose one of the following templates (proposed below) and uncomment
|
||||
# it (remove "# " before each line) or use own configuration according to the
|
||||
# Travis CI documentation (see above).
|
||||
#
|
||||
@@ -245,7 +307,7 @@ def init_ci_conf(project_dir):
|
||||
|
||||
|
||||
#
|
||||
# Template #2: The project is intended to by used as a library with examples
|
||||
# Template #2: The project is intended to be used as a library with examples.
|
||||
#
|
||||
|
||||
# language: python
|
||||
@@ -272,32 +334,18 @@ def init_ci_conf(project_dir):
|
||||
|
||||
|
||||
def init_cvs_ignore(project_dir):
|
||||
ignore_path = join(project_dir, ".gitignore")
|
||||
default = [".pioenvs\n", ".piolibdeps\n"]
|
||||
current = []
|
||||
modified = False
|
||||
if isfile(ignore_path):
|
||||
with open(ignore_path) as fp:
|
||||
current = fp.readlines()
|
||||
if current and not current[-1].endswith("\n"):
|
||||
current[-1] += "\n"
|
||||
for d in default:
|
||||
if d not in current:
|
||||
modified = True
|
||||
current.append(d)
|
||||
if not modified:
|
||||
conf_path = join(project_dir, ".gitignore")
|
||||
if isfile(conf_path):
|
||||
return
|
||||
with open(ignore_path, "w") as fp:
|
||||
fp.writelines(current)
|
||||
with open(conf_path, "w") as fp:
|
||||
fp.write(".pio\n")
|
||||
|
||||
|
||||
def fill_project_envs(ctx, project_dir, board_ids, project_option, env_prefix,
|
||||
force_download):
|
||||
content = []
|
||||
config = ProjectConfig(join(project_dir, "platformio.ini"),
|
||||
parse_extra=False)
|
||||
used_boards = []
|
||||
used_platforms = []
|
||||
|
||||
config = util.load_project_config(project_dir)
|
||||
for section in config.sections():
|
||||
cond = [
|
||||
section.startswith("env:"),
|
||||
@@ -307,12 +355,15 @@ def fill_project_envs(ctx, project_dir, board_ids, project_option, env_prefix,
|
||||
used_boards.append(config.get(section, "board"))
|
||||
|
||||
pm = PlatformManager()
|
||||
used_platforms = []
|
||||
modified = False
|
||||
for id_ in board_ids:
|
||||
board_config = pm.board_config(id_)
|
||||
used_platforms.append(board_config['platform'])
|
||||
if id_ in used_boards:
|
||||
continue
|
||||
used_boards.append(id_)
|
||||
modified = True
|
||||
|
||||
envopts = {"platform": board_config['platform'], "board": id_}
|
||||
# find default framework for board
|
||||
@@ -326,20 +377,18 @@ def fill_project_envs(ctx, project_dir, board_ids, project_option, env_prefix,
|
||||
_name, _value = item.split("=", 1)
|
||||
envopts[_name.strip()] = _value.strip()
|
||||
|
||||
content.append("")
|
||||
content.append("[env:%s%s]" % (env_prefix, id_))
|
||||
for name, value in envopts.items():
|
||||
content.append("%s = %s" % (name, value))
|
||||
section = "env:%s%s" % (env_prefix, id_)
|
||||
config.add_section(section)
|
||||
|
||||
for option, value in envopts.items():
|
||||
config.set(section, option, value)
|
||||
|
||||
if force_download and used_platforms:
|
||||
_install_dependent_platforms(ctx, used_platforms)
|
||||
|
||||
if not content:
|
||||
return
|
||||
|
||||
with open(join(project_dir, "platformio.ini"), "a") as f:
|
||||
content.append("")
|
||||
f.write("\n".join(content))
|
||||
if modified:
|
||||
config.save()
|
||||
config.reset_instances()
|
||||
|
||||
|
||||
def _install_dependent_platforms(ctx, platforms):
|
||||
@@ -348,6 +397,5 @@ def _install_dependent_platforms(ctx, platforms):
|
||||
]
|
||||
if set(platforms) <= set(installed_platforms):
|
||||
return
|
||||
ctx.invoke(
|
||||
cli_platform_install,
|
||||
platforms=list(set(platforms) - set(installed_platforms)))
|
||||
ctx.invoke(cli_platform_install,
|
||||
platforms=list(set(platforms) - set(installed_platforms)))
|
||||
|
||||
@@ -14,172 +14,277 @@
|
||||
|
||||
# pylint: disable=too-many-branches, too-many-locals
|
||||
|
||||
import json
|
||||
import time
|
||||
from os.path import isdir, join
|
||||
from urllib import quote
|
||||
|
||||
import click
|
||||
import semantic_version
|
||||
from tabulate import tabulate
|
||||
|
||||
from platformio import exception, util
|
||||
from platformio.managers.lib import LibraryManager, get_builtin_libs
|
||||
from platformio.util import get_api_result
|
||||
from platformio import exception, fs, util
|
||||
from platformio.commands import PlatformioCLI
|
||||
from platformio.compat import dump_json_to_unicode
|
||||
from platformio.managers.lib import (LibraryManager, get_builtin_libs,
|
||||
is_builtin_lib)
|
||||
from platformio.proc import is_ci
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.helpers import (get_project_dir,
|
||||
get_project_global_lib_dir,
|
||||
get_project_libdeps_dir,
|
||||
is_platformio_project)
|
||||
|
||||
try:
|
||||
from urllib.parse import quote
|
||||
except ImportError:
|
||||
from urllib import quote
|
||||
|
||||
CTX_META_INPUT_DIRS_KEY = __name__ + ".input_dirs"
|
||||
CTX_META_PROJECT_ENVIRONMENTS_KEY = __name__ + ".project_environments"
|
||||
CTX_META_STORAGE_DIRS_KEY = __name__ + ".storage_dirs"
|
||||
CTX_META_STORAGE_LIBDEPS_KEY = __name__ + ".storage_lib_deps"
|
||||
|
||||
|
||||
@click.group(short_help="Library Manager")
|
||||
@click.option("-d",
|
||||
"--storage-dir",
|
||||
multiple=True,
|
||||
default=None,
|
||||
type=click.Path(exists=True,
|
||||
file_okay=False,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
resolve_path=True),
|
||||
help="Manage custom library storage")
|
||||
@click.option("-g",
|
||||
"--global",
|
||||
is_flag=True,
|
||||
help="Manage global PlatformIO library storage")
|
||||
@click.option(
|
||||
"-g",
|
||||
"--global",
|
||||
is_flag=True,
|
||||
help="Manage global PlatformIO library storage")
|
||||
@click.option(
|
||||
"-d",
|
||||
"--storage-dir",
|
||||
default=None,
|
||||
type=click.Path(
|
||||
exists=True,
|
||||
file_okay=False,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
resolve_path=True),
|
||||
help="Manage custom library storage")
|
||||
"-e",
|
||||
"--environment",
|
||||
multiple=True,
|
||||
help=("Manage libraries for the specific project build environments "
|
||||
"declared in `platformio.ini`"))
|
||||
@click.pass_context
|
||||
def cli(ctx, **options):
|
||||
non_storage_cmds = ("search", "show", "register", "stats", "builtin")
|
||||
storage_cmds = ("install", "uninstall", "update", "list")
|
||||
# skip commands that don't need storage folder
|
||||
if ctx.invoked_subcommand in non_storage_cmds or \
|
||||
if ctx.invoked_subcommand not in storage_cmds or \
|
||||
(len(ctx.args) == 2 and ctx.args[1] in ("-h", "--help")):
|
||||
return
|
||||
storage_dir = options['storage_dir']
|
||||
if not storage_dir:
|
||||
if options['global']:
|
||||
storage_dir = join(util.get_home_dir(), "lib")
|
||||
elif util.is_platformio_project():
|
||||
storage_dir = util.get_projectlibdeps_dir()
|
||||
elif util.is_ci():
|
||||
storage_dir = join(util.get_home_dir(), "lib")
|
||||
storage_dirs = list(options['storage_dir'])
|
||||
if options['global']:
|
||||
storage_dirs.append(get_project_global_lib_dir())
|
||||
if not storage_dirs:
|
||||
if is_platformio_project():
|
||||
storage_dirs = [get_project_dir()]
|
||||
elif is_ci():
|
||||
storage_dirs = [get_project_global_lib_dir()]
|
||||
click.secho(
|
||||
"Warning! Global library storage is used automatically. "
|
||||
"Please use `platformio lib --global %s` command to remove "
|
||||
"this warning." % ctx.invoked_subcommand,
|
||||
fg="yellow")
|
||||
elif util.is_platformio_project(storage_dir):
|
||||
with util.cd(storage_dir):
|
||||
storage_dir = util.get_projectlibdeps_dir()
|
||||
|
||||
if not storage_dir and not util.is_platformio_project():
|
||||
raise exception.NotGlobalLibDir(util.get_project_dir(),
|
||||
join(util.get_home_dir(), "lib"),
|
||||
if not storage_dirs:
|
||||
raise exception.NotGlobalLibDir(get_project_dir(),
|
||||
get_project_global_lib_dir(),
|
||||
ctx.invoked_subcommand)
|
||||
|
||||
ctx.obj = LibraryManager(storage_dir)
|
||||
if "--json-output" not in ctx.args:
|
||||
click.echo("Library Storage: " + storage_dir)
|
||||
in_silence = PlatformioCLI.in_silence()
|
||||
ctx.meta[CTX_META_PROJECT_ENVIRONMENTS_KEY] = options['environment']
|
||||
ctx.meta[CTX_META_INPUT_DIRS_KEY] = storage_dirs
|
||||
ctx.meta[CTX_META_STORAGE_DIRS_KEY] = []
|
||||
ctx.meta[CTX_META_STORAGE_LIBDEPS_KEY] = {}
|
||||
for storage_dir in storage_dirs:
|
||||
if not is_platformio_project(storage_dir):
|
||||
ctx.meta[CTX_META_STORAGE_DIRS_KEY].append(storage_dir)
|
||||
continue
|
||||
with fs.cd(storage_dir):
|
||||
libdeps_dir = get_project_libdeps_dir()
|
||||
config = ProjectConfig.get_instance(join(storage_dir,
|
||||
"platformio.ini"))
|
||||
config.validate(options['environment'], silent=in_silence)
|
||||
for env in config.envs():
|
||||
if options['environment'] and env not in options['environment']:
|
||||
continue
|
||||
storage_dir = join(libdeps_dir, env)
|
||||
ctx.meta[CTX_META_STORAGE_DIRS_KEY].append(storage_dir)
|
||||
ctx.meta[CTX_META_STORAGE_LIBDEPS_KEY][storage_dir] = config.get(
|
||||
"env:" + env, "lib_deps", [])
|
||||
|
||||
|
||||
@cli.command("install", short_help="Install library")
|
||||
@click.argument("libraries", required=False, nargs=-1, metavar="[LIBRARY...]")
|
||||
# @click.option(
|
||||
# "--save",
|
||||
# is_flag=True,
|
||||
# help="Save installed libraries into the project's platformio.ini "
|
||||
# "library dependencies")
|
||||
@click.option(
|
||||
"-s", "--silent", is_flag=True, help="Suppress progress reporting")
|
||||
@click.option(
|
||||
"--interactive",
|
||||
"--save",
|
||||
is_flag=True,
|
||||
help="Allow to make a choice for all prompts")
|
||||
@click.option(
|
||||
"-f",
|
||||
"--force",
|
||||
is_flag=True,
|
||||
help="Reinstall/redownload library if exists")
|
||||
@click.pass_obj
|
||||
def lib_install(lm, libraries, silent, interactive, force):
|
||||
# @TODO: "save" option
|
||||
for library in libraries:
|
||||
lm.install(
|
||||
library, silent=silent, interactive=interactive, force=force)
|
||||
help="Save installed libraries into the `platformio.ini` dependency list")
|
||||
@click.option("-s",
|
||||
"--silent",
|
||||
is_flag=True,
|
||||
help="Suppress progress reporting")
|
||||
@click.option("--interactive",
|
||||
is_flag=True,
|
||||
help="Allow to make a choice for all prompts")
|
||||
@click.option("-f",
|
||||
"--force",
|
||||
is_flag=True,
|
||||
help="Reinstall/redownload library if exists")
|
||||
@click.pass_context
|
||||
def lib_install( # pylint: disable=too-many-arguments
|
||||
ctx, libraries, save, silent, interactive, force):
|
||||
storage_dirs = ctx.meta[CTX_META_STORAGE_DIRS_KEY]
|
||||
storage_libdeps = ctx.meta.get(CTX_META_STORAGE_LIBDEPS_KEY, [])
|
||||
|
||||
installed_manifests = {}
|
||||
for storage_dir in storage_dirs:
|
||||
if not silent and (libraries or storage_dir in storage_libdeps):
|
||||
print_storage_header(storage_dirs, storage_dir)
|
||||
lm = LibraryManager(storage_dir)
|
||||
if libraries:
|
||||
for library in libraries:
|
||||
pkg_dir = lm.install(library,
|
||||
silent=silent,
|
||||
interactive=interactive,
|
||||
force=force)
|
||||
installed_manifests[library] = lm.load_manifest(pkg_dir)
|
||||
elif storage_dir in storage_libdeps:
|
||||
builtin_lib_storages = None
|
||||
for library in storage_libdeps[storage_dir]:
|
||||
try:
|
||||
pkg_dir = lm.install(library,
|
||||
silent=silent,
|
||||
interactive=interactive,
|
||||
force=force)
|
||||
installed_manifests[library] = lm.load_manifest(pkg_dir)
|
||||
except exception.LibNotFound as e:
|
||||
if builtin_lib_storages is None:
|
||||
builtin_lib_storages = get_builtin_libs()
|
||||
if not silent or not is_builtin_lib(
|
||||
builtin_lib_storages, library):
|
||||
click.secho("Warning! %s" % e, fg="yellow")
|
||||
|
||||
if not save or not libraries:
|
||||
return
|
||||
|
||||
input_dirs = ctx.meta.get(CTX_META_INPUT_DIRS_KEY, [])
|
||||
project_environments = ctx.meta[CTX_META_PROJECT_ENVIRONMENTS_KEY]
|
||||
for input_dir in input_dirs:
|
||||
config = ProjectConfig.get_instance(join(input_dir, "platformio.ini"))
|
||||
config.validate(project_environments)
|
||||
for env in config.envs():
|
||||
if project_environments and env not in project_environments:
|
||||
continue
|
||||
config.expand_interpolations = False
|
||||
lib_deps = config.get("env:" + env, "lib_deps", [])
|
||||
for library in libraries:
|
||||
if library in lib_deps:
|
||||
continue
|
||||
manifest = installed_manifests[library]
|
||||
try:
|
||||
assert library.lower() == manifest['name'].lower()
|
||||
assert semantic_version.Version(manifest['version'])
|
||||
lib_deps.append("{name}@^{version}".format(**manifest))
|
||||
except (AssertionError, ValueError):
|
||||
lib_deps.append(library)
|
||||
config.set("env:" + env, "lib_deps", lib_deps)
|
||||
config.save()
|
||||
|
||||
|
||||
@cli.command("uninstall", short_help="Uninstall libraries")
|
||||
@click.argument("libraries", nargs=-1, metavar="[LIBRARY...]")
|
||||
@click.pass_obj
|
||||
def lib_uninstall(lm, libraries):
|
||||
for library in libraries:
|
||||
lm.uninstall(library)
|
||||
@click.pass_context
|
||||
def lib_uninstall(ctx, libraries):
|
||||
storage_dirs = ctx.meta[CTX_META_STORAGE_DIRS_KEY]
|
||||
for storage_dir in storage_dirs:
|
||||
print_storage_header(storage_dirs, storage_dir)
|
||||
lm = LibraryManager(storage_dir)
|
||||
for library in libraries:
|
||||
lm.uninstall(library)
|
||||
|
||||
|
||||
@cli.command("update", short_help="Update installed libraries")
|
||||
@click.argument("libraries", required=False, nargs=-1, metavar="[LIBRARY...]")
|
||||
@click.option(
|
||||
"-c",
|
||||
"--only-check",
|
||||
is_flag=True,
|
||||
help="Do not update, only check for new version")
|
||||
@click.option("-c",
|
||||
"--only-check",
|
||||
is_flag=True,
|
||||
help="DEPRECATED. Please use `--dry-run` instead")
|
||||
@click.option("--dry-run",
|
||||
is_flag=True,
|
||||
help="Do not update, only check for the new versions")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@click.pass_obj
|
||||
def lib_update(lm, libraries, only_check, json_output):
|
||||
if not libraries:
|
||||
libraries = [manifest['__pkg_dir'] for manifest in lm.get_installed()]
|
||||
@click.pass_context
|
||||
def lib_update(ctx, libraries, only_check, dry_run, json_output):
|
||||
storage_dirs = ctx.meta[CTX_META_STORAGE_DIRS_KEY]
|
||||
only_check = dry_run or only_check
|
||||
json_result = {}
|
||||
for storage_dir in storage_dirs:
|
||||
if not json_output:
|
||||
print_storage_header(storage_dirs, storage_dir)
|
||||
lm = LibraryManager(storage_dir)
|
||||
|
||||
if only_check and json_output:
|
||||
result = []
|
||||
for library in libraries:
|
||||
pkg_dir = library if isdir(library) else None
|
||||
requirements = None
|
||||
url = None
|
||||
if not pkg_dir:
|
||||
name, requirements, url = lm.parse_pkg_uri(library)
|
||||
pkg_dir = lm.get_package_dir(name, requirements, url)
|
||||
if not pkg_dir:
|
||||
continue
|
||||
latest = lm.outdated(pkg_dir, requirements)
|
||||
if not latest:
|
||||
continue
|
||||
manifest = lm.load_manifest(pkg_dir)
|
||||
manifest['versionLatest'] = latest
|
||||
result.append(manifest)
|
||||
return click.echo(json.dumps(result))
|
||||
else:
|
||||
for library in libraries:
|
||||
lm.update(library, only_check=only_check)
|
||||
_libraries = libraries
|
||||
if not _libraries:
|
||||
_libraries = [
|
||||
manifest['__pkg_dir'] for manifest in lm.get_installed()
|
||||
]
|
||||
|
||||
if only_check and json_output:
|
||||
result = []
|
||||
for library in _libraries:
|
||||
pkg_dir = library if isdir(library) else None
|
||||
requirements = None
|
||||
url = None
|
||||
if not pkg_dir:
|
||||
name, requirements, url = lm.parse_pkg_uri(library)
|
||||
pkg_dir = lm.get_package_dir(name, requirements, url)
|
||||
if not pkg_dir:
|
||||
continue
|
||||
latest = lm.outdated(pkg_dir, requirements)
|
||||
if not latest:
|
||||
continue
|
||||
manifest = lm.load_manifest(pkg_dir)
|
||||
manifest['versionLatest'] = latest
|
||||
result.append(manifest)
|
||||
json_result[storage_dir] = result
|
||||
else:
|
||||
for library in _libraries:
|
||||
lm.update(library, only_check=only_check)
|
||||
|
||||
if json_output:
|
||||
return click.echo(
|
||||
dump_json_to_unicode(json_result[storage_dirs[0]]
|
||||
if len(storage_dirs) == 1 else json_result))
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def print_lib_item(item):
|
||||
click.secho(item['name'], fg="cyan")
|
||||
click.echo("=" * len(item['name']))
|
||||
if "id" in item:
|
||||
click.secho("#ID: %d" % item['id'], bold=True)
|
||||
if "description" in item or "url" in item:
|
||||
click.echo(item.get("description", item.get("url", "")))
|
||||
click.echo()
|
||||
|
||||
for key in ("version", "homepage", "license", "keywords"):
|
||||
if key not in item or not item[key]:
|
||||
continue
|
||||
if isinstance(item[key], list):
|
||||
click.echo("%s: %s" % (key.title(), ", ".join(item[key])))
|
||||
@cli.command("list", short_help="List installed libraries")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@click.pass_context
|
||||
def lib_list(ctx, json_output):
|
||||
storage_dirs = ctx.meta[CTX_META_STORAGE_DIRS_KEY]
|
||||
json_result = {}
|
||||
for storage_dir in storage_dirs:
|
||||
if not json_output:
|
||||
print_storage_header(storage_dirs, storage_dir)
|
||||
lm = LibraryManager(storage_dir)
|
||||
items = lm.get_installed()
|
||||
if json_output:
|
||||
json_result[storage_dir] = items
|
||||
elif items:
|
||||
for item in sorted(items, key=lambda i: i['name']):
|
||||
print_lib_item(item)
|
||||
else:
|
||||
click.echo("%s: %s" % (key.title(), item[key]))
|
||||
click.echo("No items found")
|
||||
|
||||
for key in ("frameworks", "platforms"):
|
||||
if key not in item:
|
||||
continue
|
||||
click.echo("Compatible %s: %s" % (key, ", ".join(
|
||||
[i['title'] if isinstance(i, dict) else i for i in item[key]])))
|
||||
if json_output:
|
||||
return click.echo(
|
||||
dump_json_to_unicode(json_result[storage_dirs[0]]
|
||||
if len(storage_dirs) == 1 else json_result))
|
||||
|
||||
if "authors" in item or "authornames" in item:
|
||||
click.echo("Authors: %s" % ", ".join(
|
||||
item.get("authornames",
|
||||
[a.get("name", "") for a in item.get("authors", [])])))
|
||||
|
||||
if "__src_url" in item:
|
||||
click.secho("Source: %s" % item['__src_url'])
|
||||
click.echo()
|
||||
return True
|
||||
|
||||
|
||||
@cli.command("search", short_help="Search for a library")
|
||||
@@ -193,27 +298,25 @@ def print_lib_item(item):
|
||||
@click.option("-f", "--framework", multiple=True)
|
||||
@click.option("-p", "--platform", multiple=True)
|
||||
@click.option("-i", "--header", multiple=True)
|
||||
@click.option(
|
||||
"--noninteractive",
|
||||
is_flag=True,
|
||||
help="Do not prompt, automatically paginate with delay")
|
||||
@click.option("--noninteractive",
|
||||
is_flag=True,
|
||||
help="Do not prompt, automatically paginate with delay")
|
||||
def lib_search(query, json_output, page, noninteractive, **filters):
|
||||
if not query:
|
||||
query = []
|
||||
if not isinstance(query, list):
|
||||
query = list(query)
|
||||
|
||||
for key, values in filters.iteritems():
|
||||
for key, values in filters.items():
|
||||
for value in values:
|
||||
query.append('%s:"%s"' % (key, value))
|
||||
|
||||
result = get_api_result(
|
||||
"/v2/lib/search",
|
||||
dict(query=" ".join(query), page=page),
|
||||
cache_valid="1d")
|
||||
result = util.get_api_result("/v2/lib/search",
|
||||
dict(query=" ".join(query), page=page),
|
||||
cache_valid="1d")
|
||||
|
||||
if json_output:
|
||||
click.echo(json.dumps(result))
|
||||
click.echo(dump_json_to_unicode(result))
|
||||
return
|
||||
|
||||
if result['total'] == 0:
|
||||
@@ -228,13 +331,12 @@ def lib_search(query, json_output, page, noninteractive, **filters):
|
||||
click.echo("For more examples and advanced search syntax, "
|
||||
"please use documentation:")
|
||||
click.secho(
|
||||
"http://docs.platformio.org/page/userguide/lib/cmd_search.html\n",
|
||||
"https://docs.platformio.org/page/userguide/lib/cmd_search.html\n",
|
||||
fg="cyan")
|
||||
return
|
||||
|
||||
click.secho(
|
||||
"Found %d libraries:\n" % result['total'],
|
||||
fg="green" if result['total'] else "yellow")
|
||||
click.secho("Found %d libraries:\n" % result['total'],
|
||||
fg="green" if result['total'] else "yellow")
|
||||
|
||||
while True:
|
||||
for item in result['items']:
|
||||
@@ -246,37 +348,18 @@ def lib_search(query, json_output, page, noninteractive, **filters):
|
||||
|
||||
if noninteractive:
|
||||
click.echo()
|
||||
click.secho(
|
||||
"Loading next %d libraries... Press Ctrl+C to stop!" %
|
||||
result['perpage'],
|
||||
fg="yellow")
|
||||
click.secho("Loading next %d libraries... Press Ctrl+C to stop!" %
|
||||
result['perpage'],
|
||||
fg="yellow")
|
||||
click.echo()
|
||||
time.sleep(5)
|
||||
elif not click.confirm("Show next libraries?"):
|
||||
break
|
||||
result = get_api_result(
|
||||
"/v2/lib/search",
|
||||
{"query": " ".join(query),
|
||||
"page": int(result['page']) + 1},
|
||||
cache_valid="1d")
|
||||
|
||||
|
||||
@cli.command("list", short_help="List installed libraries")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@click.pass_obj
|
||||
def lib_list(lm, json_output):
|
||||
items = lm.get_installed()
|
||||
|
||||
if json_output:
|
||||
return click.echo(json.dumps(items))
|
||||
|
||||
if not items:
|
||||
return None
|
||||
|
||||
for item in sorted(items, key=lambda i: i['name']):
|
||||
print_lib_item(item)
|
||||
|
||||
return True
|
||||
result = util.get_api_result("/v2/lib/search", {
|
||||
"query": " ".join(query),
|
||||
"page": int(result['page']) + 1
|
||||
},
|
||||
cache_valid="1d")
|
||||
|
||||
|
||||
@cli.command("builtin", short_help="List built-in libraries")
|
||||
@@ -285,7 +368,7 @@ def lib_list(lm, json_output):
|
||||
def lib_builtin(storage, json_output):
|
||||
items = get_builtin_libs(storage)
|
||||
if json_output:
|
||||
return click.echo(json.dumps(items))
|
||||
return click.echo(dump_json_to_unicode(items))
|
||||
|
||||
for storage_ in items:
|
||||
if not storage_['items']:
|
||||
@@ -306,16 +389,15 @@ def lib_builtin(storage, json_output):
|
||||
def lib_show(library, json_output):
|
||||
lm = LibraryManager()
|
||||
name, requirements, _ = lm.parse_pkg_uri(library)
|
||||
lib_id = lm.search_lib_id(
|
||||
{
|
||||
"name": name,
|
||||
"requirements": requirements
|
||||
},
|
||||
silent=json_output,
|
||||
interactive=not json_output)
|
||||
lib = get_api_result("/lib/info/%d" % lib_id, cache_valid="1d")
|
||||
lib_id = lm.search_lib_id({
|
||||
"name": name,
|
||||
"requirements": requirements
|
||||
},
|
||||
silent=json_output,
|
||||
interactive=not json_output)
|
||||
lib = util.get_api_result("/lib/info/%d" % lib_id, cache_valid="1d")
|
||||
if json_output:
|
||||
return click.echo(json.dumps(lib))
|
||||
return click.echo(dump_json_to_unicode(lib))
|
||||
|
||||
click.secho(lib['name'], fg="cyan")
|
||||
click.echo("=" * len(lib['name']))
|
||||
@@ -389,84 +471,105 @@ def lib_register(config_url):
|
||||
and not config_url.startswith("https://")):
|
||||
raise exception.InvalidLibConfURL(config_url)
|
||||
|
||||
result = get_api_result("/lib/register", data=dict(config_url=config_url))
|
||||
result = util.get_api_result("/lib/register",
|
||||
data=dict(config_url=config_url))
|
||||
if "message" in result and result['message']:
|
||||
click.secho(
|
||||
result['message'],
|
||||
fg="green"
|
||||
if "successed" in result and result['successed'] else "red")
|
||||
click.secho(result['message'],
|
||||
fg="green" if "successed" in result and result['successed']
|
||||
else "red")
|
||||
|
||||
|
||||
@cli.command("stats", short_help="Library Registry Statistics")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def lib_stats(json_output):
|
||||
result = get_api_result("/lib/stats", cache_valid="1h")
|
||||
result = util.get_api_result("/lib/stats", cache_valid="1h")
|
||||
|
||||
if json_output:
|
||||
return click.echo(json.dumps(result))
|
||||
|
||||
printitem_tpl = "{name:<33} {url}"
|
||||
printitemdate_tpl = "{name:<33} {date:23} {url}"
|
||||
|
||||
def _print_title(title):
|
||||
click.secho(title.upper(), bold=True)
|
||||
click.echo("*" * len(title))
|
||||
|
||||
def _print_header(with_date=False):
|
||||
click.echo((printitemdate_tpl if with_date else printitem_tpl).format(
|
||||
name=click.style("Name", fg="cyan"),
|
||||
date="Date",
|
||||
url=click.style("Url", fg="blue")))
|
||||
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
click.echo("-" * terminal_width)
|
||||
|
||||
def _print_lib_item(item):
|
||||
click.echo((printitemdate_tpl
|
||||
if "date" in item else printitem_tpl).format(
|
||||
name=click.style(item['name'], fg="cyan"),
|
||||
date=str(
|
||||
time.strftime("%c", util.parse_date(item['date']))
|
||||
if "date" in item else ""),
|
||||
url=click.style(
|
||||
"https://platformio.org/lib/show/%s/%s" %
|
||||
(item['id'], quote(item['name'])),
|
||||
fg="blue")))
|
||||
|
||||
def _print_tag_item(name):
|
||||
click.echo(
|
||||
printitem_tpl.format(
|
||||
name=click.style(name, fg="cyan"),
|
||||
url=click.style(
|
||||
"https://platformio.org/lib/search?query=" +
|
||||
quote("keyword:%s" % name),
|
||||
fg="blue")))
|
||||
return click.echo(dump_json_to_unicode(result))
|
||||
|
||||
for key in ("updated", "added"):
|
||||
_print_title("Recently " + key)
|
||||
_print_header(with_date=True)
|
||||
for item in result.get(key, []):
|
||||
_print_lib_item(item)
|
||||
tabular_data = [(click.style(item['name'], fg="cyan"),
|
||||
time.strftime("%c", util.parse_date(item['date'])),
|
||||
"https://platformio.org/lib/show/%s/%s" %
|
||||
(item['id'], quote(item['name'])))
|
||||
for item in result.get(key, [])]
|
||||
table = tabulate(tabular_data,
|
||||
headers=[
|
||||
click.style("RECENTLY " + key.upper(), bold=True),
|
||||
"Date", "URL"
|
||||
])
|
||||
click.echo(table)
|
||||
click.echo()
|
||||
|
||||
_print_title("Recent keywords")
|
||||
_print_header(with_date=False)
|
||||
for item in result.get("lastkeywords"):
|
||||
_print_tag_item(item)
|
||||
click.echo()
|
||||
|
||||
_print_title("Popular keywords")
|
||||
_print_header(with_date=False)
|
||||
for item in result.get("topkeywords"):
|
||||
_print_tag_item(item)
|
||||
click.echo()
|
||||
for key in ("lastkeywords", "topkeywords"):
|
||||
tabular_data = [(click.style(name, fg="cyan"),
|
||||
"https://platformio.org/lib/search?query=" +
|
||||
quote("keyword:%s" % name))
|
||||
for name in result.get(key, [])]
|
||||
table = tabulate(
|
||||
tabular_data,
|
||||
headers=[
|
||||
click.style(
|
||||
("RECENT" if key == "lastkeywords" else "POPULAR") +
|
||||
" KEYWORDS",
|
||||
bold=True), "URL"
|
||||
])
|
||||
click.echo(table)
|
||||
click.echo()
|
||||
|
||||
for key, title in (("dlday", "Today"), ("dlweek", "Week"), ("dlmonth",
|
||||
"Month")):
|
||||
_print_title("Featured: " + title)
|
||||
_print_header(with_date=False)
|
||||
for item in result.get(key, []):
|
||||
_print_lib_item(item)
|
||||
tabular_data = [(click.style(item['name'], fg="cyan"),
|
||||
"https://platformio.org/lib/show/%s/%s" %
|
||||
(item['id'], quote(item['name'])))
|
||||
for item in result.get(key, [])]
|
||||
table = tabulate(tabular_data,
|
||||
headers=[
|
||||
click.style("FEATURED: " + title.upper(),
|
||||
bold=True), "URL"
|
||||
])
|
||||
click.echo(table)
|
||||
click.echo()
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def print_storage_header(storage_dirs, storage_dir):
|
||||
if storage_dirs and storage_dirs[0] != storage_dir:
|
||||
click.echo("")
|
||||
click.echo(
|
||||
click.style("Library Storage: ", bold=True) +
|
||||
click.style(storage_dir, fg="blue"))
|
||||
|
||||
|
||||
def print_lib_item(item):
|
||||
click.secho(item['name'], fg="cyan")
|
||||
click.echo("=" * len(item['name']))
|
||||
if "id" in item:
|
||||
click.secho("#ID: %d" % item['id'], bold=True)
|
||||
if "description" in item or "url" in item:
|
||||
click.echo(item.get("description", item.get("url", "")))
|
||||
click.echo()
|
||||
|
||||
for key in ("version", "homepage", "license", "keywords"):
|
||||
if key not in item or not item[key]:
|
||||
continue
|
||||
if isinstance(item[key], list):
|
||||
click.echo("%s: %s" % (key.title(), ", ".join(item[key])))
|
||||
else:
|
||||
click.echo("%s: %s" % (key.title(), item[key]))
|
||||
|
||||
for key in ("frameworks", "platforms"):
|
||||
if key not in item:
|
||||
continue
|
||||
click.echo("Compatible %s: %s" % (key, ", ".join(
|
||||
[i['title'] if isinstance(i, dict) else i for i in item[key]])))
|
||||
|
||||
if "authors" in item or "authornames" in item:
|
||||
click.echo("Authors: %s" % ", ".join(
|
||||
item.get("authornames",
|
||||
[a.get("name", "") for a in item.get("authors", [])])))
|
||||
|
||||
if "__src_url" in item:
|
||||
click.secho("Source: %s" % item['__src_url'])
|
||||
click.echo()
|
||||
|
||||
@@ -12,13 +12,13 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
from os.path import dirname, isdir
|
||||
|
||||
import click
|
||||
|
||||
from platformio import app, exception, util
|
||||
from platformio.commands.boards import print_boards
|
||||
from platformio.compat import dump_json_to_unicode
|
||||
from platformio.managers.platform import PlatformFactory, PlatformManager
|
||||
|
||||
|
||||
@@ -29,9 +29,9 @@ def cli():
|
||||
|
||||
def _print_platforms(platforms):
|
||||
for platform in platforms:
|
||||
click.echo("{name} ~ {title}".format(
|
||||
name=click.style(platform['name'], fg="cyan"),
|
||||
title=platform['title']))
|
||||
click.echo("{name} ~ {title}".format(name=click.style(platform['name'],
|
||||
fg="cyan"),
|
||||
title=platform['title']))
|
||||
click.echo("=" * (3 + len(platform['name'] + platform['title'])))
|
||||
click.echo(platform['description'])
|
||||
click.echo()
|
||||
@@ -42,7 +42,11 @@ def _print_platforms(platforms):
|
||||
if "packages" in platform:
|
||||
click.echo("Packages: %s" % ", ".join(platform['packages']))
|
||||
if "version" in platform:
|
||||
click.echo("Version: " + platform['version'])
|
||||
if "__src_url" in platform:
|
||||
click.echo("Version: #%s (%s)" %
|
||||
(platform['version'], platform['__src_url']))
|
||||
else:
|
||||
click.echo("Version: " + platform['version'])
|
||||
click.echo()
|
||||
|
||||
|
||||
@@ -54,18 +58,6 @@ def _get_registry_platforms():
|
||||
return platforms
|
||||
|
||||
|
||||
def _original_version(version):
|
||||
if version.count(".") != 2:
|
||||
return None
|
||||
_, y = version.split(".")[:2]
|
||||
if int(y) < 100:
|
||||
return None
|
||||
if len(y) % 2 != 0:
|
||||
y = "0" + y
|
||||
parts = [str(int(y[i * 2:i * 2 + 2])) for i in range(len(y) / 2)]
|
||||
return ".".join(parts)
|
||||
|
||||
|
||||
def _get_platform_data(*args, **kwargs):
|
||||
try:
|
||||
return _get_installed_platform_data(*args, **kwargs)
|
||||
@@ -77,18 +69,18 @@ def _get_installed_platform_data(platform,
|
||||
with_boards=True,
|
||||
expose_packages=True):
|
||||
p = PlatformFactory.newPlatform(platform)
|
||||
data = dict(
|
||||
name=p.name,
|
||||
title=p.title,
|
||||
description=p.description,
|
||||
version=p.version,
|
||||
homepage=p.homepage,
|
||||
repository=p.repository_url,
|
||||
url=p.vendor_url,
|
||||
license=p.license,
|
||||
forDesktop=not p.is_embedded(),
|
||||
frameworks=sorted(p.frameworks.keys() if p.frameworks else []),
|
||||
packages=p.packages.keys() if p.packages else [])
|
||||
data = dict(name=p.name,
|
||||
title=p.title,
|
||||
description=p.description,
|
||||
version=p.version,
|
||||
homepage=p.homepage,
|
||||
repository=p.repository_url,
|
||||
url=p.vendor_url,
|
||||
docs=p.docs_url,
|
||||
license=p.license,
|
||||
forDesktop=not p.is_embedded(),
|
||||
frameworks=sorted(list(p.frameworks) if p.frameworks else []),
|
||||
packages=list(p.packages) if p.packages else [])
|
||||
|
||||
# if dump to API
|
||||
# del data['version']
|
||||
@@ -110,18 +102,17 @@ def _get_installed_platform_data(platform,
|
||||
data['packages'] = []
|
||||
installed_pkgs = p.get_installed_packages()
|
||||
for name, opts in p.packages.items():
|
||||
item = dict(
|
||||
name=name,
|
||||
type=p.get_package_type(name),
|
||||
requirements=opts.get("version"),
|
||||
optional=opts.get("optional") is True)
|
||||
item = dict(name=name,
|
||||
type=p.get_package_type(name),
|
||||
requirements=opts.get("version"),
|
||||
optional=opts.get("optional") is True)
|
||||
if name in installed_pkgs:
|
||||
for key, value in installed_pkgs[name].items():
|
||||
if key not in ("url", "version", "description"):
|
||||
continue
|
||||
item[key] = value
|
||||
if key == "version":
|
||||
item["originalVersion"] = _original_version(value)
|
||||
item["originalVersion"] = util.get_original_version(value)
|
||||
data['packages'].append(item)
|
||||
|
||||
return data
|
||||
@@ -140,18 +131,17 @@ def _get_registry_platform_data( # pylint: disable=unused-argument
|
||||
if not _data:
|
||||
return None
|
||||
|
||||
data = dict(
|
||||
name=_data['name'],
|
||||
title=_data['title'],
|
||||
description=_data['description'],
|
||||
homepage=_data['homepage'],
|
||||
repository=_data['repository'],
|
||||
url=_data['url'],
|
||||
license=_data['license'],
|
||||
forDesktop=_data['forDesktop'],
|
||||
frameworks=_data['frameworks'],
|
||||
packages=_data['packages'],
|
||||
versions=_data['versions'])
|
||||
data = dict(name=_data['name'],
|
||||
title=_data['title'],
|
||||
description=_data['description'],
|
||||
homepage=_data['homepage'],
|
||||
repository=_data['repository'],
|
||||
url=_data['url'],
|
||||
license=_data['license'],
|
||||
forDesktop=_data['forDesktop'],
|
||||
frameworks=_data['frameworks'],
|
||||
packages=_data['packages'],
|
||||
versions=_data['versions'])
|
||||
|
||||
if with_boards:
|
||||
data['boards'] = [
|
||||
@@ -170,15 +160,16 @@ def platform_search(query, json_output):
|
||||
for platform in _get_registry_platforms():
|
||||
if query == "all":
|
||||
query = ""
|
||||
search_data = json.dumps(platform)
|
||||
search_data = dump_json_to_unicode(platform)
|
||||
if query and query.lower() not in search_data.lower():
|
||||
continue
|
||||
platforms.append(
|
||||
_get_registry_platform_data(
|
||||
platform['name'], with_boards=False, expose_packages=False))
|
||||
_get_registry_platform_data(platform['name'],
|
||||
with_boards=False,
|
||||
expose_packages=False))
|
||||
|
||||
if json_output:
|
||||
click.echo(json.dumps(platforms))
|
||||
click.echo(dump_json_to_unicode(platforms))
|
||||
else:
|
||||
_print_platforms(platforms)
|
||||
|
||||
@@ -191,11 +182,11 @@ def platform_frameworks(query, json_output):
|
||||
for framework in util.get_api_result("/frameworks", cache_valid="7d"):
|
||||
if query == "all":
|
||||
query = ""
|
||||
search_data = json.dumps(framework)
|
||||
search_data = dump_json_to_unicode(framework)
|
||||
if query and query.lower() not in search_data.lower():
|
||||
continue
|
||||
framework['homepage'] = (
|
||||
"https://platformio.org/frameworks/" + framework['name'])
|
||||
framework['homepage'] = ("https://platformio.org/frameworks/" +
|
||||
framework['name'])
|
||||
framework['platforms'] = [
|
||||
platform['name'] for platform in _get_registry_platforms()
|
||||
if framework['name'] in platform['frameworks']
|
||||
@@ -204,7 +195,7 @@ def platform_frameworks(query, json_output):
|
||||
|
||||
frameworks = sorted(frameworks, key=lambda manifest: manifest['name'])
|
||||
if json_output:
|
||||
click.echo(json.dumps(frameworks))
|
||||
click.echo(dump_json_to_unicode(frameworks))
|
||||
else:
|
||||
_print_platforms(frameworks)
|
||||
|
||||
@@ -216,14 +207,13 @@ def platform_list(json_output):
|
||||
pm = PlatformManager()
|
||||
for manifest in pm.get_installed():
|
||||
platforms.append(
|
||||
_get_installed_platform_data(
|
||||
manifest['__pkg_dir'],
|
||||
with_boards=False,
|
||||
expose_packages=False))
|
||||
_get_installed_platform_data(manifest['__pkg_dir'],
|
||||
with_boards=False,
|
||||
expose_packages=False))
|
||||
|
||||
platforms = sorted(platforms, key=lambda manifest: manifest['name'])
|
||||
if json_output:
|
||||
click.echo(json.dumps(platforms))
|
||||
click.echo(dump_json_to_unicode(platforms))
|
||||
else:
|
||||
_print_platforms(platforms)
|
||||
|
||||
@@ -236,10 +226,11 @@ def platform_show(platform, json_output): # pylint: disable=too-many-branches
|
||||
if not data:
|
||||
raise exception.UnknownPlatform(platform)
|
||||
if json_output:
|
||||
return click.echo(json.dumps(data))
|
||||
return click.echo(dump_json_to_unicode(data))
|
||||
|
||||
click.echo("{name} ~ {title}".format(
|
||||
name=click.style(data['name'], fg="cyan"), title=data['title']))
|
||||
click.echo("{name} ~ {title}".format(name=click.style(data['name'],
|
||||
fg="cyan"),
|
||||
title=data['title']))
|
||||
click.echo("=" * (3 + len(data['name'] + data['title'])))
|
||||
click.echo(data['description'])
|
||||
click.echo()
|
||||
@@ -304,17 +295,15 @@ def platform_install(platforms, with_package, without_package,
|
||||
skip_default_package, force):
|
||||
pm = PlatformManager()
|
||||
for platform in platforms:
|
||||
if pm.install(
|
||||
name=platform,
|
||||
with_packages=with_package,
|
||||
without_packages=without_package,
|
||||
skip_default_package=skip_default_package,
|
||||
force=force):
|
||||
click.secho(
|
||||
"The platform '%s' has been successfully installed!\n"
|
||||
"The rest of packages will be installed automatically "
|
||||
"depending on your build environment." % platform,
|
||||
fg="green")
|
||||
if pm.install(name=platform,
|
||||
with_packages=with_package,
|
||||
without_packages=without_package,
|
||||
skip_default_package=skip_default_package,
|
||||
force=force):
|
||||
click.secho("The platform '%s' has been successfully installed!\n"
|
||||
"The rest of packages will be installed automatically "
|
||||
"depending on your build environment." % platform,
|
||||
fg="green")
|
||||
|
||||
|
||||
@cli.command("uninstall", short_help="Uninstall development platform")
|
||||
@@ -323,26 +312,27 @@ def platform_uninstall(platforms):
|
||||
pm = PlatformManager()
|
||||
for platform in platforms:
|
||||
if pm.uninstall(platform):
|
||||
click.secho(
|
||||
"The platform '%s' has been successfully "
|
||||
"uninstalled!" % platform,
|
||||
fg="green")
|
||||
click.secho("The platform '%s' has been successfully "
|
||||
"uninstalled!" % platform,
|
||||
fg="green")
|
||||
|
||||
|
||||
@cli.command("update", short_help="Update installed development platforms")
|
||||
@click.argument("platforms", nargs=-1, required=False, metavar="[PLATFORM...]")
|
||||
@click.option(
|
||||
"-p",
|
||||
"--only-packages",
|
||||
is_flag=True,
|
||||
help="Update only the platform packages")
|
||||
@click.option(
|
||||
"-c",
|
||||
"--only-check",
|
||||
is_flag=True,
|
||||
help="Do not update, only check for a new version")
|
||||
@click.option("-p",
|
||||
"--only-packages",
|
||||
is_flag=True,
|
||||
help="Update only the platform packages")
|
||||
@click.option("-c",
|
||||
"--only-check",
|
||||
is_flag=True,
|
||||
help="DEPRECATED. Please use `--dry-run` instead")
|
||||
@click.option("--dry-run",
|
||||
is_flag=True,
|
||||
help="Do not update, only check for the new versions")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def platform_update(platforms, only_packages, only_check, json_output):
|
||||
def platform_update( # pylint: disable=too-many-locals
|
||||
platforms, only_packages, only_check, dry_run, json_output):
|
||||
pm = PlatformManager()
|
||||
pkg_dir_to_name = {}
|
||||
if not platforms:
|
||||
@@ -352,6 +342,8 @@ def platform_update(platforms, only_packages, only_check, json_output):
|
||||
pkg_dir_to_name[manifest['__pkg_dir']] = manifest.get(
|
||||
"title", manifest['name'])
|
||||
|
||||
only_check = dry_run or only_check
|
||||
|
||||
if only_check and json_output:
|
||||
result = []
|
||||
for platform in platforms:
|
||||
@@ -364,24 +356,25 @@ def platform_update(platforms, only_packages, only_check, json_output):
|
||||
if not pkg_dir:
|
||||
continue
|
||||
latest = pm.outdated(pkg_dir, requirements)
|
||||
if (not latest and not PlatformFactory.newPlatform(pkg_dir)
|
||||
.are_outdated_packages()):
|
||||
if (not latest and not PlatformFactory.newPlatform(
|
||||
pkg_dir).are_outdated_packages()):
|
||||
continue
|
||||
data = _get_installed_platform_data(
|
||||
pkg_dir, with_boards=False, expose_packages=False)
|
||||
data = _get_installed_platform_data(pkg_dir,
|
||||
with_boards=False,
|
||||
expose_packages=False)
|
||||
if latest:
|
||||
data['versionLatest'] = latest
|
||||
result.append(data)
|
||||
return click.echo(json.dumps(result))
|
||||
else:
|
||||
# cleanup cached board and platform lists
|
||||
app.clean_cache()
|
||||
for platform in platforms:
|
||||
click.echo("Platform %s" % click.style(
|
||||
pkg_dir_to_name.get(platform, platform), fg="cyan"))
|
||||
click.echo("--------")
|
||||
pm.update(
|
||||
platform, only_packages=only_packages, only_check=only_check)
|
||||
click.echo()
|
||||
return click.echo(dump_json_to_unicode(result))
|
||||
|
||||
# cleanup cached board and platform lists
|
||||
app.clean_cache()
|
||||
for platform in platforms:
|
||||
click.echo(
|
||||
"Platform %s" %
|
||||
click.style(pkg_dir_to_name.get(platform, platform), fg="cyan"))
|
||||
click.echo("--------")
|
||||
pm.update(platform, only_packages=only_packages, only_check=only_check)
|
||||
click.echo()
|
||||
|
||||
return True
|
||||
|
||||
@@ -21,8 +21,9 @@ from time import sleep
|
||||
|
||||
import click
|
||||
|
||||
from platformio import exception, util
|
||||
from platformio import exception, fs
|
||||
from platformio.commands.device import device_monitor as cmd_device_monitor
|
||||
from platformio.compat import get_file_contents
|
||||
from platformio.managers.core import pioplus_call
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
@@ -42,12 +43,13 @@ def remote_agent():
|
||||
@remote_agent.command("start", short_help="Start agent")
|
||||
@click.option("-n", "--name")
|
||||
@click.option("-s", "--share", multiple=True, metavar="E-MAIL")
|
||||
@click.option(
|
||||
"-d",
|
||||
"--working-dir",
|
||||
envvar="PLATFORMIO_REMOTE_AGENT_DIR",
|
||||
type=click.Path(
|
||||
file_okay=False, dir_okay=True, writable=True, resolve_path=True))
|
||||
@click.option("-d",
|
||||
"--working-dir",
|
||||
envvar="PLATFORMIO_REMOTE_AGENT_DIR",
|
||||
type=click.Path(file_okay=False,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
resolve_path=True))
|
||||
def remote_agent_start(**kwargs):
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
@@ -62,14 +64,16 @@ def remote_agent_list():
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@cli.command(
|
||||
"update", short_help="Update installed Platforms, Packages and Libraries")
|
||||
@click.option(
|
||||
"-c",
|
||||
"--only-check",
|
||||
is_flag=True,
|
||||
help="Do not update, only check for new version")
|
||||
def remote_update(only_check):
|
||||
@cli.command("update",
|
||||
short_help="Update installed Platforms, Packages and Libraries")
|
||||
@click.option("-c",
|
||||
"--only-check",
|
||||
is_flag=True,
|
||||
help="DEPRECATED. Please use `--dry-run` instead")
|
||||
@click.option("--dry-run",
|
||||
is_flag=True,
|
||||
help="Do not update, only check for the new versions")
|
||||
def remote_update(only_check, dry_run):
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@@ -77,16 +81,14 @@ def remote_update(only_check):
|
||||
@click.option("-e", "--environment", multiple=True)
|
||||
@click.option("-t", "--target", multiple=True)
|
||||
@click.option("--upload-port")
|
||||
@click.option(
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=getcwd,
|
||||
type=click.Path(
|
||||
exists=True,
|
||||
file_okay=True,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
resolve_path=True))
|
||||
@click.option("-d",
|
||||
"--project-dir",
|
||||
default=getcwd,
|
||||
type=click.Path(exists=True,
|
||||
file_okay=True,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
resolve_path=True))
|
||||
@click.option("--disable-auto-clean", is_flag=True)
|
||||
@click.option("-r", "--force-remote", is_flag=True)
|
||||
@click.option("-s", "--silent", is_flag=True)
|
||||
@@ -100,16 +102,14 @@ def remote_run(**kwargs):
|
||||
@click.option("--ignore", "-i", multiple=True, metavar="<pattern>")
|
||||
@click.option("--upload-port")
|
||||
@click.option("--test-port")
|
||||
@click.option(
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=getcwd,
|
||||
type=click.Path(
|
||||
exists=True,
|
||||
file_okay=False,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
resolve_path=True))
|
||||
@click.option("-d",
|
||||
"--project-dir",
|
||||
default=getcwd,
|
||||
type=click.Path(exists=True,
|
||||
file_okay=False,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
resolve_path=True))
|
||||
@click.option("-r", "--force-remote", is_flag=True)
|
||||
@click.option("--without-building", is_flag=True)
|
||||
@click.option("--without-uploading", is_flag=True)
|
||||
@@ -131,59 +131,55 @@ def device_list(json_output):
|
||||
|
||||
@remote_device.command("monitor", short_help="Monitor remote device")
|
||||
@click.option("--port", "-p", help="Port, a number or a device name")
|
||||
@click.option(
|
||||
"--baud", "-b", type=int, default=9600, help="Set baud rate, default=9600")
|
||||
@click.option(
|
||||
"--parity",
|
||||
default="N",
|
||||
type=click.Choice(["N", "E", "O", "S", "M"]),
|
||||
help="Set parity, default=N")
|
||||
@click.option(
|
||||
"--rtscts", is_flag=True, help="Enable RTS/CTS flow control, default=Off")
|
||||
@click.option(
|
||||
"--xonxoff",
|
||||
is_flag=True,
|
||||
help="Enable software flow control, default=Off")
|
||||
@click.option(
|
||||
"--rts",
|
||||
default=None,
|
||||
type=click.IntRange(0, 1),
|
||||
help="Set initial RTS line state")
|
||||
@click.option(
|
||||
"--dtr",
|
||||
default=None,
|
||||
type=click.IntRange(0, 1),
|
||||
help="Set initial DTR line state")
|
||||
@click.option("--baud",
|
||||
"-b",
|
||||
type=int,
|
||||
default=9600,
|
||||
help="Set baud rate, default=9600")
|
||||
@click.option("--parity",
|
||||
default="N",
|
||||
type=click.Choice(["N", "E", "O", "S", "M"]),
|
||||
help="Set parity, default=N")
|
||||
@click.option("--rtscts",
|
||||
is_flag=True,
|
||||
help="Enable RTS/CTS flow control, default=Off")
|
||||
@click.option("--xonxoff",
|
||||
is_flag=True,
|
||||
help="Enable software flow control, default=Off")
|
||||
@click.option("--rts",
|
||||
default=None,
|
||||
type=click.IntRange(0, 1),
|
||||
help="Set initial RTS line state")
|
||||
@click.option("--dtr",
|
||||
default=None,
|
||||
type=click.IntRange(0, 1),
|
||||
help="Set initial DTR line state")
|
||||
@click.option("--echo", is_flag=True, help="Enable local echo, default=Off")
|
||||
@click.option(
|
||||
"--encoding",
|
||||
default="UTF-8",
|
||||
help="Set the encoding for the serial port (e.g. hexlify, "
|
||||
"Latin1, UTF-8), default: UTF-8")
|
||||
@click.option("--encoding",
|
||||
default="UTF-8",
|
||||
help="Set the encoding for the serial port (e.g. hexlify, "
|
||||
"Latin1, UTF-8), default: UTF-8")
|
||||
@click.option("--filter", "-f", multiple=True, help="Add text transformation")
|
||||
@click.option(
|
||||
"--eol",
|
||||
default="CRLF",
|
||||
type=click.Choice(["CR", "LF", "CRLF"]),
|
||||
help="End of line mode, default=CRLF")
|
||||
@click.option(
|
||||
"--raw", is_flag=True, help="Do not apply any encodings/transformations")
|
||||
@click.option(
|
||||
"--exit-char",
|
||||
type=int,
|
||||
default=3,
|
||||
help="ASCII code of special character that is used to exit "
|
||||
"the application, default=3 (Ctrl+C)")
|
||||
@click.option(
|
||||
"--menu-char",
|
||||
type=int,
|
||||
default=20,
|
||||
help="ASCII code of special character that is used to "
|
||||
"control miniterm (menu), default=20 (DEC)")
|
||||
@click.option(
|
||||
"--quiet",
|
||||
is_flag=True,
|
||||
help="Diagnostics: suppress non-error messages, default=Off")
|
||||
@click.option("--eol",
|
||||
default="CRLF",
|
||||
type=click.Choice(["CR", "LF", "CRLF"]),
|
||||
help="End of line mode, default=CRLF")
|
||||
@click.option("--raw",
|
||||
is_flag=True,
|
||||
help="Do not apply any encodings/transformations")
|
||||
@click.option("--exit-char",
|
||||
type=int,
|
||||
default=3,
|
||||
help="ASCII code of special character that is used to exit "
|
||||
"the application, default=3 (Ctrl+C)")
|
||||
@click.option("--menu-char",
|
||||
type=int,
|
||||
default=20,
|
||||
help="ASCII code of special character that is used to "
|
||||
"control miniterm (menu), default=20 (DEC)")
|
||||
@click.option("--quiet",
|
||||
is_flag=True,
|
||||
help="Diagnostics: suppress non-error messages, default=Off")
|
||||
@click.pass_context
|
||||
def device_monitor(ctx, **kwargs):
|
||||
|
||||
@@ -202,8 +198,8 @@ def device_monitor(ctx, **kwargs):
|
||||
sleep(0.1)
|
||||
if not t.is_alive():
|
||||
return
|
||||
kwargs['port'] = open(sock_file).read()
|
||||
kwargs['port'] = get_file_contents(sock_file)
|
||||
ctx.invoke(cmd_device_monitor, **kwargs)
|
||||
t.join(2)
|
||||
finally:
|
||||
util.rmtree_(sock_dir)
|
||||
fs.rmtree(sock_dir)
|
||||
|
||||
@@ -1,423 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from datetime import datetime
|
||||
from hashlib import sha1
|
||||
from os import getcwd, makedirs, walk
|
||||
from os.path import getmtime, isdir, isfile, join
|
||||
from time import time
|
||||
|
||||
import click
|
||||
|
||||
from platformio import __version__, exception, telemetry, util
|
||||
from platformio.commands.device import device_monitor as cmd_device_monitor
|
||||
from platformio.commands.lib import lib_install as cmd_lib_install
|
||||
from platformio.commands.platform import \
|
||||
platform_install as cmd_platform_install
|
||||
from platformio.managers.lib import LibraryManager, is_builtin_lib
|
||||
from platformio.managers.platform import PlatformFactory
|
||||
|
||||
# pylint: disable=too-many-arguments,too-many-locals,too-many-branches
|
||||
|
||||
|
||||
@click.command("run", short_help="Process project environments")
|
||||
@click.option("-e", "--environment", multiple=True)
|
||||
@click.option("-t", "--target", multiple=True)
|
||||
@click.option("--upload-port")
|
||||
@click.option(
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=getcwd,
|
||||
type=click.Path(
|
||||
exists=True,
|
||||
file_okay=True,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
resolve_path=True))
|
||||
@click.option("-s", "--silent", is_flag=True)
|
||||
@click.option("-v", "--verbose", is_flag=True)
|
||||
@click.option("--disable-auto-clean", is_flag=True)
|
||||
@click.pass_context
|
||||
def cli(ctx, environment, target, upload_port, project_dir, silent, verbose,
|
||||
disable_auto_clean):
|
||||
# find project directory on upper level
|
||||
if isfile(project_dir):
|
||||
project_dir = util.find_project_dir_above(project_dir)
|
||||
|
||||
if not util.is_platformio_project(project_dir):
|
||||
raise exception.NotPlatformIOProject(project_dir)
|
||||
|
||||
with util.cd(project_dir):
|
||||
# clean obsolete build dir
|
||||
if not disable_auto_clean:
|
||||
try:
|
||||
_clean_build_dir(util.get_projectbuild_dir())
|
||||
except: # pylint: disable=bare-except
|
||||
click.secho(
|
||||
"Can not remove temporary directory `%s`. Please remove "
|
||||
"it manually to avoid build issues" %
|
||||
util.get_projectbuild_dir(force=True),
|
||||
fg="yellow")
|
||||
|
||||
config = util.load_project_config()
|
||||
env_default = None
|
||||
if config.has_option("platformio", "env_default"):
|
||||
env_default = util.parse_conf_multi_values(
|
||||
config.get("platformio", "env_default"))
|
||||
|
||||
check_project_defopts(config)
|
||||
check_project_envs(config, environment or env_default)
|
||||
|
||||
results = []
|
||||
start_time = time()
|
||||
for section in config.sections():
|
||||
if not section.startswith("env:"):
|
||||
continue
|
||||
|
||||
envname = section[4:]
|
||||
skipenv = any([
|
||||
environment and envname not in environment, not environment
|
||||
and env_default and envname not in env_default
|
||||
])
|
||||
if skipenv:
|
||||
results.append((envname, None))
|
||||
continue
|
||||
|
||||
if not silent and results:
|
||||
click.echo()
|
||||
|
||||
options = {}
|
||||
for k, v in config.items(section):
|
||||
options[k] = v
|
||||
if "piotest" not in options and "piotest" in ctx.meta:
|
||||
options['piotest'] = ctx.meta['piotest']
|
||||
|
||||
ep = EnvironmentProcessor(ctx, envname, options, target,
|
||||
upload_port, silent, verbose)
|
||||
result = (envname, ep.process())
|
||||
results.append(result)
|
||||
if result[1] and "monitor" in ep.get_build_targets() and \
|
||||
"nobuild" not in ep.get_build_targets():
|
||||
ctx.invoke(cmd_device_monitor)
|
||||
|
||||
found_error = any(status is False for (_, status) in results)
|
||||
|
||||
if (found_error or not silent) and len(results) > 1:
|
||||
click.echo()
|
||||
print_summary(results, start_time)
|
||||
|
||||
if found_error:
|
||||
raise exception.ReturnErrorCode(1)
|
||||
return True
|
||||
|
||||
|
||||
class EnvironmentProcessor(object):
|
||||
|
||||
DEFAULT_DUMP_OPTIONS = ("platform", "framework", "board")
|
||||
|
||||
KNOWN_PLATFORMIO_OPTIONS = ("env_default", "home_dir", "lib_dir",
|
||||
"libdeps_dir", "include_dir", "src_dir",
|
||||
"build_dir", "data_dir", "test_dir",
|
||||
"boards_dir", "lib_extra_dirs")
|
||||
|
||||
KNOWN_ENV_OPTIONS = ("platform", "framework", "board", "board_mcu",
|
||||
"board_f_cpu", "board_f_flash", "board_flash_mode",
|
||||
"build_flags", "src_build_flags", "build_unflags",
|
||||
"src_filter", "extra_scripts", "targets",
|
||||
"upload_port", "upload_protocol", "upload_speed",
|
||||
"upload_flags", "upload_resetmethod", "lib_deps",
|
||||
"lib_ignore", "lib_extra_dirs", "lib_ldf_mode",
|
||||
"lib_compat_mode", "lib_archive", "piotest",
|
||||
"test_transport", "test_filter", "test_ignore",
|
||||
"test_port", "test_speed", "debug_tool", "debug_port",
|
||||
"debug_init_cmds", "debug_extra_cmds", "debug_server",
|
||||
"debug_init_break", "debug_load_cmd",
|
||||
"debug_load_mode", "monitor_port", "monitor_baud",
|
||||
"monitor_rts", "monitor_dtr")
|
||||
|
||||
IGNORE_BUILD_OPTIONS = ("test_transport", "test_filter", "test_ignore",
|
||||
"test_port", "test_speed", "debug_port",
|
||||
"debug_init_cmds", "debug_extra_cmds",
|
||||
"debug_server", "debug_init_break",
|
||||
"debug_load_cmd", "debug_load_mode",
|
||||
"monitor_port", "monitor_baud", "monitor_rts",
|
||||
"monitor_dtr")
|
||||
|
||||
REMAPED_OPTIONS = {"framework": "pioframework", "platform": "pioplatform"}
|
||||
|
||||
RENAMED_OPTIONS = {
|
||||
"lib_use": "lib_deps",
|
||||
"lib_force": "lib_deps",
|
||||
"extra_script": "extra_scripts"
|
||||
}
|
||||
|
||||
RENAMED_PLATFORMS = {"espressif": "espressif8266"}
|
||||
|
||||
def __init__(
|
||||
self, # pylint: disable=R0913
|
||||
cmd_ctx,
|
||||
name,
|
||||
options,
|
||||
targets,
|
||||
upload_port,
|
||||
silent,
|
||||
verbose):
|
||||
self.cmd_ctx = cmd_ctx
|
||||
self.name = name
|
||||
self.options = options
|
||||
self.targets = targets
|
||||
self.upload_port = upload_port
|
||||
self.silent = silent
|
||||
self.verbose = verbose
|
||||
|
||||
def process(self):
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
start_time = time()
|
||||
env_dump = []
|
||||
|
||||
for k, v in self.options.items():
|
||||
self.options[k] = self.options[k].strip()
|
||||
if self.verbose or k in self.DEFAULT_DUMP_OPTIONS:
|
||||
env_dump.append(
|
||||
"%s: %s" % (k, ", ".join(util.parse_conf_multi_values(v))))
|
||||
|
||||
if not self.silent:
|
||||
click.echo("[%s] Processing %s (%s)" %
|
||||
(datetime.now().strftime("%c"),
|
||||
click.style(self.name, fg="cyan", bold=True),
|
||||
"; ".join(env_dump)))
|
||||
click.secho("-" * terminal_width, bold=True)
|
||||
|
||||
self.options = self._validate_options(self.options)
|
||||
result = self._run()
|
||||
is_error = result['returncode'] != 0
|
||||
|
||||
if self.silent and not is_error:
|
||||
return True
|
||||
|
||||
if is_error or "piotest_processor" not in self.cmd_ctx.meta:
|
||||
print_header(
|
||||
"[%s] Took %.2f seconds" %
|
||||
((click.style("ERROR", fg="red", bold=True)
|
||||
if is_error else click.style(
|
||||
"SUCCESS", fg="green", bold=True)), time() - start_time),
|
||||
is_error=is_error)
|
||||
|
||||
return not is_error
|
||||
|
||||
def _validate_options(self, options):
|
||||
result = {}
|
||||
for k, v in options.items():
|
||||
# process obsolete options
|
||||
if k in self.RENAMED_OPTIONS:
|
||||
click.secho(
|
||||
"Warning! `%s` option is deprecated and will be "
|
||||
"removed in the next release! Please use "
|
||||
"`%s` instead." % (k, self.RENAMED_OPTIONS[k]),
|
||||
fg="yellow")
|
||||
k = self.RENAMED_OPTIONS[k]
|
||||
# process renamed platforms
|
||||
if k == "platform" and v in self.RENAMED_PLATFORMS:
|
||||
click.secho(
|
||||
"Warning! Platform `%s` is deprecated and will be "
|
||||
"removed in the next release! Please use "
|
||||
"`%s` instead." % (v, self.RENAMED_PLATFORMS[v]),
|
||||
fg="yellow")
|
||||
v = self.RENAMED_PLATFORMS[v]
|
||||
|
||||
# warn about unknown options
|
||||
if k not in self.KNOWN_ENV_OPTIONS and not k.startswith("custom_"):
|
||||
click.secho(
|
||||
"Detected non-PlatformIO `%s` option in `[env:%s]` section"
|
||||
% (k, self.name),
|
||||
fg="yellow")
|
||||
result[k] = v
|
||||
return result
|
||||
|
||||
def get_build_variables(self):
|
||||
variables = {"pioenv": self.name}
|
||||
if self.upload_port:
|
||||
variables['upload_port'] = self.upload_port
|
||||
for k, v in self.options.items():
|
||||
if k in self.REMAPED_OPTIONS:
|
||||
k = self.REMAPED_OPTIONS[k]
|
||||
if k in self.IGNORE_BUILD_OPTIONS:
|
||||
continue
|
||||
if k == "targets" or (k == "upload_port" and self.upload_port):
|
||||
continue
|
||||
variables[k] = v
|
||||
return variables
|
||||
|
||||
def get_build_targets(self):
|
||||
targets = []
|
||||
if self.targets:
|
||||
targets = [t for t in self.targets]
|
||||
elif "targets" in self.options:
|
||||
targets = self.options['targets'].split(", ")
|
||||
return targets
|
||||
|
||||
def _run(self):
|
||||
if "platform" not in self.options:
|
||||
raise exception.UndefinedEnvPlatform(self.name)
|
||||
|
||||
build_vars = self.get_build_variables()
|
||||
build_targets = self.get_build_targets()
|
||||
|
||||
telemetry.on_run_environment(self.options, build_targets)
|
||||
|
||||
# skip monitor target, we call it above
|
||||
if "monitor" in build_targets:
|
||||
build_targets.remove("monitor")
|
||||
if "nobuild" not in build_targets:
|
||||
# install dependent libraries
|
||||
if "lib_install" in self.options:
|
||||
_autoinstall_libdeps(self.cmd_ctx, [
|
||||
int(d.strip())
|
||||
for d in self.options['lib_install'].split(",")
|
||||
if d.strip()
|
||||
], self.verbose)
|
||||
if "lib_deps" in self.options:
|
||||
_autoinstall_libdeps(self.cmd_ctx,
|
||||
util.parse_conf_multi_values(
|
||||
self.options['lib_deps']),
|
||||
self.verbose)
|
||||
|
||||
try:
|
||||
p = PlatformFactory.newPlatform(self.options['platform'])
|
||||
except exception.UnknownPlatform:
|
||||
self.cmd_ctx.invoke(
|
||||
cmd_platform_install,
|
||||
platforms=[self.options['platform']],
|
||||
skip_default_package=True)
|
||||
p = PlatformFactory.newPlatform(self.options['platform'])
|
||||
|
||||
return p.run(build_vars, build_targets, self.silent, self.verbose)
|
||||
|
||||
|
||||
def _autoinstall_libdeps(ctx, libraries, verbose=False):
|
||||
if not libraries:
|
||||
return
|
||||
storage_dir = util.get_projectlibdeps_dir()
|
||||
ctx.obj = LibraryManager(storage_dir)
|
||||
if verbose:
|
||||
click.echo("Library Storage: " + storage_dir)
|
||||
for lib in libraries:
|
||||
try:
|
||||
ctx.invoke(cmd_lib_install, libraries=[lib], silent=not verbose)
|
||||
except exception.LibNotFound as e:
|
||||
if verbose or not is_builtin_lib(lib):
|
||||
click.secho("Warning! %s" % e, fg="yellow")
|
||||
except exception.InternetIsOffline as e:
|
||||
click.secho(str(e), fg="yellow")
|
||||
|
||||
|
||||
def _clean_build_dir(build_dir):
|
||||
structhash_file = join(build_dir, "structure.hash")
|
||||
proj_hash = calculate_project_hash()
|
||||
|
||||
# if project's config is modified
|
||||
if (isdir(build_dir)
|
||||
and getmtime(join(util.get_project_dir(),
|
||||
"platformio.ini")) > getmtime(build_dir)):
|
||||
util.rmtree_(build_dir)
|
||||
|
||||
# check project structure
|
||||
if isdir(build_dir) and isfile(structhash_file):
|
||||
with open(structhash_file) as f:
|
||||
if f.read() == proj_hash:
|
||||
return
|
||||
util.rmtree_(build_dir)
|
||||
|
||||
if not isdir(build_dir):
|
||||
makedirs(build_dir)
|
||||
|
||||
with open(structhash_file, "w") as f:
|
||||
f.write(proj_hash)
|
||||
|
||||
|
||||
def print_header(label, is_error=False):
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
width = len(click.unstyle(label))
|
||||
half_line = "=" * ((terminal_width - width - 2) / 2)
|
||||
click.echo("%s %s %s" % (half_line, label, half_line), err=is_error)
|
||||
|
||||
|
||||
def print_summary(results, start_time):
|
||||
print_header("[%s]" % click.style("SUMMARY"))
|
||||
|
||||
envname_max_len = 0
|
||||
for (envname, _) in results:
|
||||
if len(envname) > envname_max_len:
|
||||
envname_max_len = len(envname)
|
||||
|
||||
successed = True
|
||||
for (envname, status) in results:
|
||||
status_str = click.style("SUCCESS", fg="green")
|
||||
if status is False:
|
||||
successed = False
|
||||
status_str = click.style("ERROR", fg="red")
|
||||
elif status is None:
|
||||
status_str = click.style("SKIP", fg="yellow")
|
||||
|
||||
format_str = (
|
||||
"Environment {0:<" + str(envname_max_len + 9) + "}\t[{1}]")
|
||||
click.echo(
|
||||
format_str.format(click.style(envname, fg="cyan"), status_str),
|
||||
err=status is False)
|
||||
|
||||
print_header(
|
||||
"[%s] Took %.2f seconds" %
|
||||
((click.style("SUCCESS", fg="green", bold=True)
|
||||
if successed else click.style("ERROR", fg="red", bold=True)),
|
||||
time() - start_time),
|
||||
is_error=not successed)
|
||||
|
||||
|
||||
def check_project_defopts(config):
|
||||
if not config.has_section("platformio"):
|
||||
return True
|
||||
unknown = set([k for k, _ in config.items("platformio")]) - set(
|
||||
EnvironmentProcessor.KNOWN_PLATFORMIO_OPTIONS)
|
||||
if not unknown:
|
||||
return True
|
||||
click.secho(
|
||||
"Warning! Ignore unknown `%s` option in `[platformio]` section" %
|
||||
", ".join(unknown),
|
||||
fg="yellow")
|
||||
return False
|
||||
|
||||
|
||||
def check_project_envs(config, environments=None):
|
||||
if not config.sections():
|
||||
raise exception.ProjectEnvsNotAvailable()
|
||||
|
||||
known = set([s[4:] for s in config.sections() if s.startswith("env:")])
|
||||
unknown = set(environments or []) - known
|
||||
if unknown:
|
||||
raise exception.UnknownEnvNames(", ".join(unknown), ", ".join(known))
|
||||
return True
|
||||
|
||||
|
||||
def calculate_project_hash():
|
||||
check_suffixes = (".c", ".cc", ".cpp", ".h", ".hpp", ".s", ".S")
|
||||
structure = [__version__]
|
||||
for d in (util.get_projectsrc_dir(), util.get_projectlib_dir()):
|
||||
if not isdir(d):
|
||||
continue
|
||||
for root, _, files in walk(d):
|
||||
for f in files:
|
||||
path = join(root, f)
|
||||
if path.endswith(check_suffixes):
|
||||
structure.append(path)
|
||||
return sha1(",".join(sorted(structure))).hexdigest()
|
||||
15
platformio/commands/run/__init__.py
Normal file
15
platformio/commands/run/__init__.py
Normal file
@@ -0,0 +1,15 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from platformio.commands.run.command import cli
|
||||
205
platformio/commands/run/command.py
Normal file
205
platformio/commands/run/command.py
Normal file
@@ -0,0 +1,205 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from multiprocessing import cpu_count
|
||||
from os import getcwd
|
||||
from os.path import isfile, join
|
||||
from time import time
|
||||
|
||||
import click
|
||||
from tabulate import tabulate
|
||||
|
||||
from platformio import exception, fs, util
|
||||
from platformio.commands.device import device_monitor as cmd_device_monitor
|
||||
from platformio.commands.run.helpers import (clean_build_dir,
|
||||
handle_legacy_libdeps)
|
||||
from platformio.commands.run.processor import EnvironmentProcessor
|
||||
from platformio.commands.test.processor import CTX_META_TEST_IS_RUNNING
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.helpers import (find_project_dir_above,
|
||||
get_project_build_dir)
|
||||
|
||||
# pylint: disable=too-many-arguments,too-many-locals,too-many-branches
|
||||
|
||||
try:
|
||||
DEFAULT_JOB_NUMS = cpu_count()
|
||||
except NotImplementedError:
|
||||
DEFAULT_JOB_NUMS = 1
|
||||
|
||||
|
||||
@click.command("run", short_help="Process project environments")
|
||||
@click.option("-e", "--environment", multiple=True)
|
||||
@click.option("-t", "--target", multiple=True)
|
||||
@click.option("--upload-port")
|
||||
@click.option("-d",
|
||||
"--project-dir",
|
||||
default=getcwd,
|
||||
type=click.Path(exists=True,
|
||||
file_okay=True,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
resolve_path=True))
|
||||
@click.option("-c",
|
||||
"--project-conf",
|
||||
type=click.Path(exists=True,
|
||||
file_okay=True,
|
||||
dir_okay=False,
|
||||
readable=True,
|
||||
resolve_path=True))
|
||||
@click.option("-j",
|
||||
"--jobs",
|
||||
type=int,
|
||||
default=DEFAULT_JOB_NUMS,
|
||||
help=("Allow N jobs at once. "
|
||||
"Default is a number of CPUs in a system (N=%d)" %
|
||||
DEFAULT_JOB_NUMS))
|
||||
@click.option("-s", "--silent", is_flag=True)
|
||||
@click.option("-v", "--verbose", is_flag=True)
|
||||
@click.option("--disable-auto-clean", is_flag=True)
|
||||
@click.pass_context
|
||||
def cli(ctx, environment, target, upload_port, project_dir, project_conf, jobs,
|
||||
silent, verbose, disable_auto_clean):
|
||||
# find project directory on upper level
|
||||
if isfile(project_dir):
|
||||
project_dir = find_project_dir_above(project_dir)
|
||||
|
||||
is_test_running = CTX_META_TEST_IS_RUNNING in ctx.meta
|
||||
|
||||
with fs.cd(project_dir):
|
||||
config = ProjectConfig.get_instance(
|
||||
project_conf or join(project_dir, "platformio.ini"))
|
||||
config.validate(environment)
|
||||
|
||||
# clean obsolete build dir
|
||||
if not disable_auto_clean:
|
||||
try:
|
||||
clean_build_dir(get_project_build_dir(), config)
|
||||
except: # pylint: disable=bare-except
|
||||
click.secho(
|
||||
"Can not remove temporary directory `%s`. Please remove "
|
||||
"it manually to avoid build issues" %
|
||||
get_project_build_dir(force=True),
|
||||
fg="yellow")
|
||||
|
||||
handle_legacy_libdeps(project_dir, config)
|
||||
|
||||
default_envs = config.default_envs()
|
||||
results = []
|
||||
for env in config.envs():
|
||||
skipenv = any([
|
||||
environment and env not in environment, not environment
|
||||
and default_envs and env not in default_envs
|
||||
])
|
||||
if skipenv:
|
||||
results.append({"env": env})
|
||||
continue
|
||||
|
||||
# print empty line between multi environment project
|
||||
if not silent and any(
|
||||
r.get("succeeded") is not None for r in results):
|
||||
click.echo()
|
||||
|
||||
results.append(
|
||||
process_env(ctx, env, config, environment, target, upload_port,
|
||||
silent, verbose, jobs, is_test_running))
|
||||
|
||||
command_failed = any(r.get("succeeded") is False for r in results)
|
||||
|
||||
if (not is_test_running and (command_failed or not silent)
|
||||
and len(results) > 1):
|
||||
print_processing_summary(results)
|
||||
|
||||
if command_failed:
|
||||
raise exception.ReturnErrorCode(1)
|
||||
return True
|
||||
|
||||
|
||||
def process_env(ctx, name, config, environments, targets, upload_port, silent,
|
||||
verbose, jobs, is_test_running):
|
||||
if not is_test_running and not silent:
|
||||
print_processing_header(name, config, verbose)
|
||||
|
||||
ep = EnvironmentProcessor(ctx, name, config, targets, upload_port, silent,
|
||||
verbose, jobs)
|
||||
result = {"env": name, "duration": time(), "succeeded": ep.process()}
|
||||
result['duration'] = time() - result['duration']
|
||||
|
||||
# print footer on error or when is not unit testing
|
||||
if not is_test_running and (not silent or not result['succeeded']):
|
||||
print_processing_footer(result)
|
||||
|
||||
if (result['succeeded'] and "monitor" in ep.get_build_targets()
|
||||
and "nobuild" not in ep.get_build_targets()):
|
||||
ctx.invoke(cmd_device_monitor,
|
||||
environment=environments[0] if environments else None)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def print_processing_header(env, config, verbose=False):
|
||||
env_dump = []
|
||||
for k, v in config.items(env=env):
|
||||
if verbose or k in ("platform", "framework", "board"):
|
||||
env_dump.append("%s: %s" %
|
||||
(k, ", ".join(v) if isinstance(v, list) else v))
|
||||
click.echo("Processing %s (%s)" %
|
||||
(click.style(env, fg="cyan", bold=True), "; ".join(env_dump)))
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
click.secho("-" * terminal_width, bold=True)
|
||||
|
||||
|
||||
def print_processing_footer(result):
|
||||
is_failed = not result.get("succeeded")
|
||||
util.print_labeled_bar(
|
||||
"[%s] Took %.2f seconds" %
|
||||
((click.style("FAILED", fg="red", bold=True) if is_failed else
|
||||
click.style("SUCCESS", fg="green", bold=True)), result['duration']),
|
||||
is_error=is_failed)
|
||||
|
||||
|
||||
def print_processing_summary(results):
|
||||
tabular_data = []
|
||||
succeeded_nums = 0
|
||||
failed_nums = 0
|
||||
duration = 0
|
||||
|
||||
for result in results:
|
||||
duration += result.get("duration", 0)
|
||||
if result.get("succeeded") is False:
|
||||
failed_nums += 1
|
||||
status_str = click.style("FAILED", fg="red")
|
||||
elif result.get("succeeded") is None:
|
||||
status_str = "IGNORED"
|
||||
else:
|
||||
succeeded_nums += 1
|
||||
status_str = click.style("SUCCESS", fg="green")
|
||||
|
||||
tabular_data.append(
|
||||
(click.style(result['env'], fg="cyan"), status_str,
|
||||
util.humanize_duration_time(result.get("duration"))))
|
||||
|
||||
click.echo()
|
||||
click.echo(tabulate(tabular_data,
|
||||
headers=[
|
||||
click.style(s, bold=True)
|
||||
for s in ("Environment", "Status", "Duration")
|
||||
]),
|
||||
err=failed_nums)
|
||||
|
||||
util.print_labeled_bar(
|
||||
"%s%d succeeded in %s" %
|
||||
("%d failed, " % failed_nums if failed_nums else "", succeeded_nums,
|
||||
util.humanize_duration_time(duration)),
|
||||
is_error=failed_nums,
|
||||
fg="red" if failed_nums else "green")
|
||||
64
platformio/commands/run/helpers.py
Normal file
64
platformio/commands/run/helpers.py
Normal file
@@ -0,0 +1,64 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from os import makedirs
|
||||
from os.path import isdir, isfile, join
|
||||
|
||||
import click
|
||||
|
||||
from platformio import fs
|
||||
from platformio.project.helpers import (compute_project_checksum,
|
||||
get_project_dir,
|
||||
get_project_libdeps_dir)
|
||||
|
||||
|
||||
def handle_legacy_libdeps(project_dir, config):
|
||||
legacy_libdeps_dir = join(project_dir, ".piolibdeps")
|
||||
if (not isdir(legacy_libdeps_dir)
|
||||
or legacy_libdeps_dir == get_project_libdeps_dir()):
|
||||
return
|
||||
if not config.has_section("env"):
|
||||
config.add_section("env")
|
||||
lib_extra_dirs = config.get("env", "lib_extra_dirs", [])
|
||||
lib_extra_dirs.append(legacy_libdeps_dir)
|
||||
config.set("env", "lib_extra_dirs", lib_extra_dirs)
|
||||
click.secho(
|
||||
"DEPRECATED! A legacy library storage `{0}` has been found in a "
|
||||
"project. \nPlease declare project dependencies in `platformio.ini`"
|
||||
" file using `lib_deps` option and remove `{0}` folder."
|
||||
"\nMore details -> http://docs.platformio.org/page/projectconf/"
|
||||
"section_env_library.html#lib-deps".format(legacy_libdeps_dir),
|
||||
fg="yellow")
|
||||
|
||||
|
||||
def clean_build_dir(build_dir, config):
|
||||
# remove legacy ".pioenvs" folder
|
||||
legacy_build_dir = join(get_project_dir(), ".pioenvs")
|
||||
if isdir(legacy_build_dir) and legacy_build_dir != build_dir:
|
||||
fs.rmtree(legacy_build_dir)
|
||||
|
||||
checksum_file = join(build_dir, "project.checksum")
|
||||
checksum = compute_project_checksum(config)
|
||||
|
||||
if isdir(build_dir):
|
||||
# check project structure
|
||||
if isfile(checksum_file):
|
||||
with open(checksum_file) as f:
|
||||
if f.read() == checksum:
|
||||
return
|
||||
fs.rmtree(build_dir)
|
||||
|
||||
makedirs(build_dir)
|
||||
with open(checksum_file, "w") as f:
|
||||
f.write(checksum)
|
||||
79
platformio/commands/run/processor.py
Normal file
79
platformio/commands/run/processor.py
Normal file
@@ -0,0 +1,79 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from platformio import exception, telemetry
|
||||
from platformio.commands.platform import \
|
||||
platform_install as cmd_platform_install
|
||||
from platformio.commands.test.processor import CTX_META_TEST_RUNNING_NAME
|
||||
from platformio.managers.platform import PlatformFactory
|
||||
|
||||
# pylint: disable=too-many-instance-attributes
|
||||
|
||||
|
||||
class EnvironmentProcessor(object):
|
||||
|
||||
def __init__( # pylint: disable=too-many-arguments
|
||||
self, cmd_ctx, name, config, targets, upload_port, silent, verbose,
|
||||
jobs):
|
||||
self.cmd_ctx = cmd_ctx
|
||||
self.name = name
|
||||
self.config = config
|
||||
self.targets = [str(t) for t in targets]
|
||||
self.upload_port = upload_port
|
||||
self.silent = silent
|
||||
self.verbose = verbose
|
||||
self.jobs = jobs
|
||||
self.options = config.items(env=name, as_dict=True)
|
||||
|
||||
def get_build_variables(self):
|
||||
variables = {"pioenv": self.name, "project_config": self.config.path}
|
||||
|
||||
if CTX_META_TEST_RUNNING_NAME in self.cmd_ctx.meta:
|
||||
variables['piotest_running_name'] = self.cmd_ctx.meta[
|
||||
CTX_META_TEST_RUNNING_NAME]
|
||||
|
||||
if self.upload_port:
|
||||
# override upload port with a custom from CLI
|
||||
variables['upload_port'] = self.upload_port
|
||||
return variables
|
||||
|
||||
def get_build_targets(self):
|
||||
if self.targets:
|
||||
return [t for t in self.targets]
|
||||
return self.config.get("env:" + self.name, "targets", [])
|
||||
|
||||
def process(self):
|
||||
if "platform" not in self.options:
|
||||
raise exception.UndefinedEnvPlatform(self.name)
|
||||
|
||||
build_vars = self.get_build_variables()
|
||||
build_targets = self.get_build_targets()
|
||||
|
||||
telemetry.on_run_environment(self.options, build_targets)
|
||||
|
||||
# skip monitor target, we call it above
|
||||
if "monitor" in build_targets:
|
||||
build_targets.remove("monitor")
|
||||
|
||||
try:
|
||||
p = PlatformFactory.newPlatform(self.options['platform'])
|
||||
except exception.UnknownPlatform:
|
||||
self.cmd_ctx.invoke(cmd_platform_install,
|
||||
platforms=[self.options['platform']],
|
||||
skip_default_package=True)
|
||||
p = PlatformFactory.newPlatform(self.options['platform'])
|
||||
|
||||
result = p.run(build_vars, build_targets, self.silent, self.verbose,
|
||||
self.jobs)
|
||||
return result['returncode'] == 0
|
||||
@@ -13,8 +13,18 @@
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
from tabulate import tabulate
|
||||
|
||||
from platformio import app
|
||||
from platformio.compat import string_types
|
||||
|
||||
|
||||
def format_value(raw):
|
||||
if isinstance(raw, bool):
|
||||
return "Yes" if raw else "No"
|
||||
if isinstance(raw, string_types):
|
||||
return raw
|
||||
return str(raw)
|
||||
|
||||
|
||||
@click.group(short_help="Manage PlatformIO settings")
|
||||
@@ -25,41 +35,27 @@ def cli():
|
||||
@cli.command("get", short_help="Get existing setting/-s")
|
||||
@click.argument("name", required=False)
|
||||
def settings_get(name):
|
||||
tabular_data = []
|
||||
for key, options in sorted(app.DEFAULT_SETTINGS.items()):
|
||||
if name and name != key:
|
||||
continue
|
||||
raw_value = app.get_setting(key)
|
||||
formatted_value = format_value(raw_value)
|
||||
|
||||
list_tpl = "{name:<40} {value:<35} {description}"
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
if raw_value != options['value']:
|
||||
default_formatted_value = format_value(options['value'])
|
||||
formatted_value += "%s" % (
|
||||
"\n" if len(default_formatted_value) > 10 else " ")
|
||||
formatted_value += "[%s]" % click.style(default_formatted_value,
|
||||
fg="yellow")
|
||||
|
||||
tabular_data.append(
|
||||
(click.style(key,
|
||||
fg="cyan"), formatted_value, options['description']))
|
||||
|
||||
click.echo(
|
||||
list_tpl.format(
|
||||
name=click.style("Name", fg="cyan"),
|
||||
value=(click.style("Value", fg="green") +
|
||||
click.style(" [Default]", fg="yellow")),
|
||||
description="Description"))
|
||||
click.echo("-" * terminal_width)
|
||||
|
||||
for _name, _data in sorted(app.DEFAULT_SETTINGS.items()):
|
||||
if name and name != _name:
|
||||
continue
|
||||
_value = app.get_setting(_name)
|
||||
|
||||
_value_str = str(_value)
|
||||
if isinstance(_value, bool):
|
||||
_value_str = "Yes" if _value else "No"
|
||||
_value_str = click.style(_value_str, fg="green")
|
||||
|
||||
if _value != _data['value']:
|
||||
_defvalue_str = str(_data['value'])
|
||||
if isinstance(_data['value'], bool):
|
||||
_defvalue_str = "Yes" if _data['value'] else "No"
|
||||
_value_str += click.style(" [%s]" % _defvalue_str, fg="yellow")
|
||||
else:
|
||||
_value_str += click.style(" ", fg="yellow")
|
||||
|
||||
click.echo(
|
||||
list_tpl.format(
|
||||
name=click.style(_name, fg="cyan"),
|
||||
value=_value_str,
|
||||
description=_data['description']))
|
||||
tabulate(tabular_data,
|
||||
headers=["Name", "Current value [Default]", "Description"]))
|
||||
|
||||
|
||||
@cli.command("set", short_help="Set new value for the setting")
|
||||
|
||||
@@ -1,67 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
from os import getcwd
|
||||
|
||||
import click
|
||||
|
||||
from platformio.managers.core import pioplus_call
|
||||
|
||||
|
||||
@click.command("test", short_help="Local Unit Testing")
|
||||
@click.option("--environment", "-e", multiple=True, metavar="<environment>")
|
||||
@click.option(
|
||||
"--filter",
|
||||
"-f",
|
||||
multiple=True,
|
||||
metavar="<pattern>",
|
||||
help="Filter tests by a pattern")
|
||||
@click.option(
|
||||
"--ignore",
|
||||
"-i",
|
||||
multiple=True,
|
||||
metavar="<pattern>",
|
||||
help="Ignore tests by a pattern")
|
||||
@click.option("--upload-port")
|
||||
@click.option("--test-port")
|
||||
@click.option(
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=getcwd,
|
||||
type=click.Path(
|
||||
exists=True,
|
||||
file_okay=False,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
resolve_path=True))
|
||||
@click.option("--without-building", is_flag=True)
|
||||
@click.option("--without-uploading", is_flag=True)
|
||||
@click.option(
|
||||
"--no-reset",
|
||||
is_flag=True,
|
||||
help="Disable software reset via Serial.DTR/RST")
|
||||
@click.option(
|
||||
"--monitor-rts",
|
||||
default=None,
|
||||
type=click.IntRange(0, 1),
|
||||
help="Set initial RTS line state for Serial Monitor")
|
||||
@click.option(
|
||||
"--monitor-dtr",
|
||||
default=None,
|
||||
type=click.IntRange(0, 1),
|
||||
help="Set initial DTR line state for Serial Monitor")
|
||||
@click.option("--verbose", "-v", is_flag=True)
|
||||
def cli(*args, **kwargs): # pylint: disable=unused-argument
|
||||
pioplus_call(sys.argv[1:])
|
||||
15
platformio/commands/test/__init__.py
Normal file
15
platformio/commands/test/__init__.py
Normal file
@@ -0,0 +1,15 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from platformio.commands.test.command import cli
|
||||
222
platformio/commands/test/command.py
Normal file
222
platformio/commands/test/command.py
Normal file
@@ -0,0 +1,222 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=too-many-arguments, too-many-locals, too-many-branches
|
||||
|
||||
from fnmatch import fnmatch
|
||||
from os import getcwd, listdir
|
||||
from os.path import isdir, join
|
||||
from time import time
|
||||
|
||||
import click
|
||||
from tabulate import tabulate
|
||||
|
||||
from platformio import exception, fs, util
|
||||
from platformio.commands.test.embedded import EmbeddedTestProcessor
|
||||
from platformio.commands.test.native import NativeTestProcessor
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.helpers import get_project_test_dir
|
||||
|
||||
|
||||
@click.command("test", short_help="Unit Testing")
|
||||
@click.option("--environment", "-e", multiple=True, metavar="<environment>")
|
||||
@click.option("--filter",
|
||||
"-f",
|
||||
multiple=True,
|
||||
metavar="<pattern>",
|
||||
help="Filter tests by a pattern")
|
||||
@click.option("--ignore",
|
||||
"-i",
|
||||
multiple=True,
|
||||
metavar="<pattern>",
|
||||
help="Ignore tests by a pattern")
|
||||
@click.option("--upload-port")
|
||||
@click.option("--test-port")
|
||||
@click.option("-d",
|
||||
"--project-dir",
|
||||
default=getcwd,
|
||||
type=click.Path(exists=True,
|
||||
file_okay=False,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
resolve_path=True))
|
||||
@click.option("-c",
|
||||
"--project-conf",
|
||||
type=click.Path(exists=True,
|
||||
file_okay=True,
|
||||
dir_okay=False,
|
||||
readable=True,
|
||||
resolve_path=True))
|
||||
@click.option("--without-building", is_flag=True)
|
||||
@click.option("--without-uploading", is_flag=True)
|
||||
@click.option("--without-testing", is_flag=True)
|
||||
@click.option("--no-reset", is_flag=True)
|
||||
@click.option("--monitor-rts",
|
||||
default=None,
|
||||
type=click.IntRange(0, 1),
|
||||
help="Set initial RTS line state for Serial Monitor")
|
||||
@click.option("--monitor-dtr",
|
||||
default=None,
|
||||
type=click.IntRange(0, 1),
|
||||
help="Set initial DTR line state for Serial Monitor")
|
||||
@click.option("--verbose", "-v", is_flag=True)
|
||||
@click.pass_context
|
||||
def cli( # pylint: disable=redefined-builtin
|
||||
ctx, environment, ignore, filter, upload_port, test_port, project_dir,
|
||||
project_conf, without_building, without_uploading, without_testing,
|
||||
no_reset, monitor_rts, monitor_dtr, verbose):
|
||||
with fs.cd(project_dir):
|
||||
test_dir = get_project_test_dir()
|
||||
if not isdir(test_dir):
|
||||
raise exception.TestDirNotExists(test_dir)
|
||||
test_names = get_test_names(test_dir)
|
||||
|
||||
config = ProjectConfig.get_instance(
|
||||
project_conf or join(project_dir, "platformio.ini"))
|
||||
config.validate(envs=environment)
|
||||
|
||||
click.echo("Verbose mode can be enabled via `-v, --verbose` option")
|
||||
click.secho("Collected %d items" % len(test_names), bold=True)
|
||||
|
||||
results = []
|
||||
default_envs = config.default_envs()
|
||||
for testname in test_names:
|
||||
|
||||
for envname in config.envs():
|
||||
section = "env:%s" % envname
|
||||
|
||||
# filter and ignore patterns
|
||||
patterns = dict(filter=list(filter), ignore=list(ignore))
|
||||
for key in patterns:
|
||||
patterns[key].extend(
|
||||
config.get(section, "test_%s" % key, []))
|
||||
|
||||
skip_conditions = [
|
||||
environment and envname not in environment,
|
||||
not environment and default_envs
|
||||
and envname not in default_envs,
|
||||
testname != "*" and patterns['filter'] and
|
||||
not any([fnmatch(testname, p)
|
||||
for p in patterns['filter']]),
|
||||
testname != "*"
|
||||
and any([fnmatch(testname, p)
|
||||
for p in patterns['ignore']]),
|
||||
]
|
||||
if any(skip_conditions):
|
||||
results.append({"env": envname, "test": testname})
|
||||
continue
|
||||
|
||||
click.echo()
|
||||
print_processing_header(testname, envname)
|
||||
|
||||
cls = (NativeTestProcessor
|
||||
if config.get(section, "platform") == "native" else
|
||||
EmbeddedTestProcessor)
|
||||
tp = cls(
|
||||
ctx, testname, envname,
|
||||
dict(project_config=config,
|
||||
project_dir=project_dir,
|
||||
upload_port=upload_port,
|
||||
test_port=test_port,
|
||||
without_building=without_building,
|
||||
without_uploading=without_uploading,
|
||||
without_testing=without_testing,
|
||||
no_reset=no_reset,
|
||||
monitor_rts=monitor_rts,
|
||||
monitor_dtr=monitor_dtr,
|
||||
verbose=verbose))
|
||||
result = {
|
||||
"env": envname,
|
||||
"test": testname,
|
||||
"duration": time(),
|
||||
"succeeded": tp.process()
|
||||
}
|
||||
result['duration'] = time() - result['duration']
|
||||
results.append(result)
|
||||
|
||||
print_processing_footer(result)
|
||||
|
||||
if without_testing:
|
||||
return
|
||||
|
||||
print_testing_summary(results)
|
||||
|
||||
command_failed = any(r.get("succeeded") is False for r in results)
|
||||
if command_failed:
|
||||
raise exception.ReturnErrorCode(1)
|
||||
|
||||
|
||||
def get_test_names(test_dir):
|
||||
names = []
|
||||
for item in sorted(listdir(test_dir)):
|
||||
if isdir(join(test_dir, item)):
|
||||
names.append(item)
|
||||
if not names:
|
||||
names = ["*"]
|
||||
return names
|
||||
|
||||
|
||||
def print_processing_header(test, env):
|
||||
click.echo("Processing %s in %s environment" % (click.style(
|
||||
test, fg="yellow", bold=True), click.style(env, fg="cyan", bold=True)))
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
click.secho("-" * terminal_width, bold=True)
|
||||
|
||||
|
||||
def print_processing_footer(result):
|
||||
is_failed = not result.get("succeeded")
|
||||
util.print_labeled_bar(
|
||||
"[%s] Took %.2f seconds" %
|
||||
((click.style("FAILED", fg="red", bold=True) if is_failed else
|
||||
click.style("PASSED", fg="green", bold=True)), result['duration']),
|
||||
is_error=is_failed)
|
||||
|
||||
|
||||
def print_testing_summary(results):
|
||||
click.echo()
|
||||
|
||||
tabular_data = []
|
||||
succeeded_nums = 0
|
||||
failed_nums = 0
|
||||
duration = 0
|
||||
|
||||
for result in results:
|
||||
duration += result.get("duration", 0)
|
||||
if result.get("succeeded") is False:
|
||||
failed_nums += 1
|
||||
status_str = click.style("FAILED", fg="red")
|
||||
elif result.get("succeeded") is None:
|
||||
status_str = "IGNORED"
|
||||
else:
|
||||
succeeded_nums += 1
|
||||
status_str = click.style("PASSED", fg="green")
|
||||
|
||||
tabular_data.append(
|
||||
(result['test'], click.style(result['env'], fg="cyan"), status_str,
|
||||
util.humanize_duration_time(result.get("duration"))))
|
||||
|
||||
click.echo(tabulate(tabular_data,
|
||||
headers=[
|
||||
click.style(s, bold=True)
|
||||
for s in ("Test", "Environment", "Status",
|
||||
"Duration")
|
||||
]),
|
||||
err=failed_nums)
|
||||
|
||||
util.print_labeled_bar(
|
||||
"%s%d succeeded in %s" %
|
||||
("%d failed, " % failed_nums if failed_nums else "", succeeded_nums,
|
||||
util.humanize_duration_time(duration)),
|
||||
is_error=failed_nums,
|
||||
fg="red" if failed_nums else "green")
|
||||
135
platformio/commands/test/embedded.py
Normal file
135
platformio/commands/test/embedded.py
Normal file
@@ -0,0 +1,135 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from time import sleep
|
||||
|
||||
import click
|
||||
import serial
|
||||
|
||||
from platformio import exception, util
|
||||
from platformio.commands.test.processor import TestProcessorBase
|
||||
from platformio.managers.platform import PlatformFactory
|
||||
|
||||
|
||||
class EmbeddedTestProcessor(TestProcessorBase):
|
||||
|
||||
SERIAL_TIMEOUT = 600
|
||||
|
||||
def process(self):
|
||||
if not self.options['without_building']:
|
||||
self.print_progress("Building...")
|
||||
target = ["__test"]
|
||||
if self.options['without_uploading']:
|
||||
target.append("checkprogsize")
|
||||
if not self.build_or_upload(target):
|
||||
return False
|
||||
|
||||
if not self.options['without_uploading']:
|
||||
self.print_progress("Uploading...")
|
||||
target = ["upload"]
|
||||
if self.options['without_building']:
|
||||
target.append("nobuild")
|
||||
else:
|
||||
target.append("__test")
|
||||
if not self.build_or_upload(target):
|
||||
return False
|
||||
|
||||
if self.options['without_testing']:
|
||||
return None
|
||||
|
||||
self.print_progress("Testing...")
|
||||
return self.run()
|
||||
|
||||
def run(self):
|
||||
click.echo("If you don't see any output for the first 10 secs, "
|
||||
"please reset board (press reset button)")
|
||||
click.echo()
|
||||
|
||||
try:
|
||||
ser = serial.Serial(baudrate=self.get_baudrate(),
|
||||
timeout=self.SERIAL_TIMEOUT)
|
||||
ser.port = self.get_test_port()
|
||||
ser.rts = self.options['monitor_rts']
|
||||
ser.dtr = self.options['monitor_dtr']
|
||||
ser.open()
|
||||
except serial.SerialException as e:
|
||||
click.secho(str(e), fg="red", err=True)
|
||||
return False
|
||||
|
||||
if not self.options['no_reset']:
|
||||
ser.flushInput()
|
||||
ser.setDTR(False)
|
||||
ser.setRTS(False)
|
||||
sleep(0.1)
|
||||
ser.setDTR(True)
|
||||
ser.setRTS(True)
|
||||
sleep(0.1)
|
||||
|
||||
while True:
|
||||
line = ser.readline().strip()
|
||||
|
||||
# fix non-ascii output from device
|
||||
for i, c in enumerate(line[::-1]):
|
||||
if not isinstance(c, int):
|
||||
c = ord(c)
|
||||
if c > 127:
|
||||
line = line[-i:]
|
||||
break
|
||||
|
||||
if not line:
|
||||
continue
|
||||
if isinstance(line, bytes):
|
||||
line = line.decode("utf8")
|
||||
self.on_run_out(line)
|
||||
if all([l in line for l in ("Tests", "Failures", "Ignored")]):
|
||||
break
|
||||
ser.close()
|
||||
return not self._run_failed
|
||||
|
||||
def get_test_port(self):
|
||||
# if test port is specified manually or in config
|
||||
if self.options.get("test_port"):
|
||||
return self.options.get("test_port")
|
||||
if self.env_options.get("test_port"):
|
||||
return self.env_options.get("test_port")
|
||||
|
||||
assert set(["platform", "board"]) & set(self.env_options.keys())
|
||||
p = PlatformFactory.newPlatform(self.env_options['platform'])
|
||||
board_hwids = p.board_config(self.env_options['board']).get(
|
||||
"build.hwids", [])
|
||||
port = None
|
||||
elapsed = 0
|
||||
while elapsed < 5 and not port:
|
||||
for item in util.get_serialports():
|
||||
port = item['port']
|
||||
for hwid in board_hwids:
|
||||
hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "")
|
||||
if hwid_str in item['hwid']:
|
||||
return port
|
||||
|
||||
# check if port is already configured
|
||||
try:
|
||||
serial.Serial(port, timeout=self.SERIAL_TIMEOUT).close()
|
||||
except serial.SerialException:
|
||||
port = None
|
||||
|
||||
if not port:
|
||||
sleep(0.25)
|
||||
elapsed += 0.25
|
||||
|
||||
if not port:
|
||||
raise exception.PlatformioException(
|
||||
"Please specify `test_port` for environment or use "
|
||||
"global `--test-port` option.")
|
||||
return port
|
||||
43
platformio/commands/test/native.py
Normal file
43
platformio/commands/test/native.py
Normal file
@@ -0,0 +1,43 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from os.path import join
|
||||
|
||||
from platformio import fs, proc
|
||||
from platformio.commands.test.processor import TestProcessorBase
|
||||
from platformio.proc import LineBufferedAsyncPipe
|
||||
from platformio.project.helpers import get_project_build_dir
|
||||
|
||||
|
||||
class NativeTestProcessor(TestProcessorBase):
|
||||
|
||||
def process(self):
|
||||
if not self.options['without_building']:
|
||||
self.print_progress("Building...")
|
||||
if not self.build_or_upload(["__test"]):
|
||||
return False
|
||||
if self.options['without_testing']:
|
||||
return None
|
||||
self.print_progress("Testing...")
|
||||
return self.run()
|
||||
|
||||
def run(self):
|
||||
with fs.cd(self.options['project_dir']):
|
||||
build_dir = get_project_build_dir()
|
||||
result = proc.exec_command(
|
||||
[join(build_dir, self.env_name, "program")],
|
||||
stdout=LineBufferedAsyncPipe(self.on_run_out),
|
||||
stderr=LineBufferedAsyncPipe(self.on_run_out))
|
||||
assert "returncode" in result
|
||||
return result['returncode'] == 0 and not self._run_failed
|
||||
194
platformio/commands/test/processor.py
Normal file
194
platformio/commands/test/processor.py
Normal file
@@ -0,0 +1,194 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import atexit
|
||||
from os import remove
|
||||
from os.path import isdir, isfile, join
|
||||
from string import Template
|
||||
|
||||
import click
|
||||
|
||||
from platformio import exception
|
||||
from platformio.project.helpers import get_project_test_dir
|
||||
|
||||
TRANSPORT_OPTIONS = {
|
||||
"arduino": {
|
||||
"include": "#include <Arduino.h>",
|
||||
"object": "",
|
||||
"putchar": "Serial.write(c)",
|
||||
"flush": "Serial.flush()",
|
||||
"begin": "Serial.begin($baudrate)",
|
||||
"end": "Serial.end()"
|
||||
},
|
||||
"mbed": {
|
||||
"include": "#include <mbed.h>",
|
||||
"object": "Serial pc(USBTX, USBRX);",
|
||||
"putchar": "pc.putc(c)",
|
||||
"flush": "",
|
||||
"begin": "pc.baud($baudrate)",
|
||||
"end": ""
|
||||
},
|
||||
"espidf": {
|
||||
"include": "#include <stdio.h>",
|
||||
"object": "",
|
||||
"putchar": "putchar(c)",
|
||||
"flush": "fflush(stdout)",
|
||||
"begin": "",
|
||||
"end": ""
|
||||
},
|
||||
"native": {
|
||||
"include": "#include <stdio.h>",
|
||||
"object": "",
|
||||
"putchar": "putchar(c)",
|
||||
"flush": "fflush(stdout)",
|
||||
"begin": "",
|
||||
"end": ""
|
||||
},
|
||||
"custom": {
|
||||
"include": '#include "unittest_transport.h"',
|
||||
"object": "",
|
||||
"putchar": "unittest_uart_putchar(c)",
|
||||
"flush": "unittest_uart_flush()",
|
||||
"begin": "unittest_uart_begin()",
|
||||
"end": "unittest_uart_end()"
|
||||
}
|
||||
}
|
||||
|
||||
CTX_META_TEST_IS_RUNNING = __name__ + ".test_running"
|
||||
CTX_META_TEST_RUNNING_NAME = __name__ + ".test_running_name"
|
||||
|
||||
|
||||
class TestProcessorBase(object):
|
||||
|
||||
DEFAULT_BAUDRATE = 115200
|
||||
|
||||
def __init__(self, cmd_ctx, testname, envname, options):
|
||||
self.cmd_ctx = cmd_ctx
|
||||
self.cmd_ctx.meta[CTX_META_TEST_IS_RUNNING] = True
|
||||
self.test_name = testname
|
||||
self.options = options
|
||||
self.env_name = envname
|
||||
self.env_options = options['project_config'].items(env=envname,
|
||||
as_dict=True)
|
||||
self._run_failed = False
|
||||
self._outputcpp_generated = False
|
||||
|
||||
def get_transport(self):
|
||||
if self.env_options.get("platform") == "native":
|
||||
transport = "native"
|
||||
elif "framework" in self.env_options:
|
||||
transport = self.env_options.get("framework")[0]
|
||||
if "test_transport" in self.env_options:
|
||||
transport = self.env_options['test_transport']
|
||||
if transport not in TRANSPORT_OPTIONS:
|
||||
raise exception.PlatformioException(
|
||||
"Unknown Unit Test transport `%s`" % transport)
|
||||
return transport.lower()
|
||||
|
||||
def get_baudrate(self):
|
||||
return int(self.env_options.get("test_speed", self.DEFAULT_BAUDRATE))
|
||||
|
||||
def print_progress(self, text):
|
||||
click.secho(text, bold=self.options.get("verbose"))
|
||||
|
||||
def build_or_upload(self, target):
|
||||
if not self._outputcpp_generated:
|
||||
self.generate_outputcpp(get_project_test_dir())
|
||||
self._outputcpp_generated = True
|
||||
|
||||
if self.test_name != "*":
|
||||
self.cmd_ctx.meta[CTX_META_TEST_RUNNING_NAME] = self.test_name
|
||||
|
||||
try:
|
||||
from platformio.commands.run import cli as cmd_run
|
||||
return self.cmd_ctx.invoke(cmd_run,
|
||||
project_dir=self.options['project_dir'],
|
||||
upload_port=self.options['upload_port'],
|
||||
silent=not self.options['verbose'],
|
||||
environment=[self.env_name],
|
||||
disable_auto_clean="nobuild" in target,
|
||||
target=target)
|
||||
except exception.ReturnErrorCode:
|
||||
return False
|
||||
|
||||
def process(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def run(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def on_run_out(self, line):
|
||||
line = line.strip()
|
||||
if line.endswith(":PASS"):
|
||||
click.echo("%s\t[%s]" %
|
||||
(line[:-5], click.style("PASSED", fg="green")))
|
||||
elif ":FAIL" in line:
|
||||
self._run_failed = True
|
||||
click.echo("%s\t[%s]" % (line, click.style("FAILED", fg="red")))
|
||||
else:
|
||||
click.echo(line)
|
||||
|
||||
def generate_outputcpp(self, test_dir):
|
||||
assert isdir(test_dir)
|
||||
|
||||
cpp_tpl = "\n".join([
|
||||
"$include",
|
||||
"#include <output_export.h>",
|
||||
"",
|
||||
"$object",
|
||||
"",
|
||||
"#ifdef __GNUC__",
|
||||
"void output_start(unsigned int baudrate __attribute__((unused)))",
|
||||
"#else",
|
||||
"void output_start(unsigned int baudrate)",
|
||||
"#endif",
|
||||
"{",
|
||||
" $begin;",
|
||||
"}",
|
||||
"",
|
||||
"void output_char(int c)",
|
||||
"{",
|
||||
" $putchar;",
|
||||
"}",
|
||||
"",
|
||||
"void output_flush(void)",
|
||||
"{",
|
||||
" $flush;",
|
||||
"}",
|
||||
"",
|
||||
"void output_complete(void)",
|
||||
"{",
|
||||
" $end;",
|
||||
"}"
|
||||
]) # yapf: disable
|
||||
|
||||
def delete_tmptest_file(file_):
|
||||
try:
|
||||
remove(file_)
|
||||
except: # pylint: disable=bare-except
|
||||
if isfile(file_):
|
||||
click.secho(
|
||||
"Warning: Could not remove temporary file '%s'. "
|
||||
"Please remove it manually." % file_,
|
||||
fg="yellow")
|
||||
|
||||
tpl = Template(cpp_tpl).substitute(
|
||||
TRANSPORT_OPTIONS[self.get_transport()])
|
||||
data = Template(tpl).substitute(baudrate=self.get_baudrate())
|
||||
|
||||
tmp_file = join(test_dir, "output_export.cpp")
|
||||
with open(tmp_file, "w") as f:
|
||||
f.write(data)
|
||||
|
||||
atexit.register(delete_tmptest_file, tmp_file)
|
||||
@@ -15,31 +15,37 @@
|
||||
import click
|
||||
|
||||
from platformio import app
|
||||
from platformio.commands.lib import CTX_META_STORAGE_DIRS_KEY
|
||||
from platformio.commands.lib import lib_update as cmd_lib_update
|
||||
from platformio.commands.platform import platform_update as cmd_platform_update
|
||||
from platformio.managers.core import update_core_packages
|
||||
from platformio.managers.lib import LibraryManager
|
||||
|
||||
|
||||
@click.command(
|
||||
"update", short_help="Update installed platforms, packages and libraries")
|
||||
@click.option(
|
||||
"--core-packages", is_flag=True, help="Update only the core packages")
|
||||
@click.option(
|
||||
"-c",
|
||||
"--only-check",
|
||||
is_flag=True,
|
||||
help="Do not update, only check for new version")
|
||||
@click.command("update",
|
||||
short_help="Update installed platforms, packages and libraries")
|
||||
@click.option("--core-packages",
|
||||
is_flag=True,
|
||||
help="Update only the core packages")
|
||||
@click.option("-c",
|
||||
"--only-check",
|
||||
is_flag=True,
|
||||
help="DEPRECATED. Please use `--dry-run` instead")
|
||||
@click.option("--dry-run",
|
||||
is_flag=True,
|
||||
help="Do not update, only check for the new versions")
|
||||
@click.pass_context
|
||||
def cli(ctx, core_packages, only_check):
|
||||
def cli(ctx, core_packages, only_check, dry_run):
|
||||
# cleanup lib search results, cached board and platform lists
|
||||
app.clean_cache()
|
||||
|
||||
only_check = dry_run or only_check
|
||||
|
||||
update_core_packages(only_check)
|
||||
|
||||
if core_packages:
|
||||
return
|
||||
|
||||
# cleanup lib search results, cached board and platform lists
|
||||
app.clean_cache()
|
||||
|
||||
click.echo()
|
||||
click.echo("Platform Manager")
|
||||
click.echo("================")
|
||||
@@ -48,5 +54,5 @@ def cli(ctx, core_packages, only_check):
|
||||
click.echo()
|
||||
click.echo("Library Manager")
|
||||
click.echo("===============")
|
||||
ctx.obj = LibraryManager()
|
||||
ctx.meta[CTX_META_STORAGE_DIRS_KEY] = [LibraryManager().package_dir]
|
||||
ctx.invoke(cmd_lib_update, only_check=only_check)
|
||||
|
||||
@@ -20,11 +20,13 @@ import click
|
||||
import requests
|
||||
|
||||
from platformio import VERSION, __version__, exception, util
|
||||
from platformio.commands.home import shutdown_servers
|
||||
from platformio.compat import WINDOWS
|
||||
from platformio.proc import exec_command, get_pythonexe_path
|
||||
from platformio.project.helpers import get_project_cache_dir
|
||||
|
||||
|
||||
@click.command(
|
||||
"upgrade", short_help="Upgrade PlatformIO to the latest version")
|
||||
@click.command("upgrade",
|
||||
short_help="Upgrade PlatformIO to the latest version")
|
||||
@click.option("--dev", is_flag=True, help="Use development branch")
|
||||
def cli(dev):
|
||||
if not dev and __version__ == get_latest_version():
|
||||
@@ -35,43 +37,38 @@ def cli(dev):
|
||||
|
||||
click.secho("Please wait while upgrading PlatformIO ...", fg="yellow")
|
||||
|
||||
# kill all PIO Home servers, they block `pioplus` binary
|
||||
shutdown_servers()
|
||||
|
||||
to_develop = dev or not all(c.isdigit() for c in __version__ if c != ".")
|
||||
cmds = (["pip", "install", "--upgrade",
|
||||
get_pip_package(to_develop)], ["platformio", "--version"])
|
||||
|
||||
cmd = None
|
||||
r = None
|
||||
r = {}
|
||||
try:
|
||||
for cmd in cmds:
|
||||
cmd = [util.get_pythonexe_path(), "-m"] + cmd
|
||||
r = None
|
||||
r = util.exec_command(cmd)
|
||||
cmd = [get_pythonexe_path(), "-m"] + cmd
|
||||
r = exec_command(cmd)
|
||||
|
||||
# try pip with disabled cache
|
||||
if r['returncode'] != 0 and cmd[2] == "pip":
|
||||
cmd.insert(3, "--no-cache-dir")
|
||||
r = util.exec_command(cmd)
|
||||
r = exec_command(cmd)
|
||||
|
||||
assert r['returncode'] == 0
|
||||
assert "version" in r['out']
|
||||
actual_version = r['out'].strip().split("version", 1)[1].strip()
|
||||
click.secho(
|
||||
"PlatformIO has been successfully upgraded to %s" % actual_version,
|
||||
fg="green")
|
||||
click.secho("PlatformIO has been successfully upgraded to %s" %
|
||||
actual_version,
|
||||
fg="green")
|
||||
click.echo("Release notes: ", nl=False)
|
||||
click.secho(
|
||||
"http://docs.platformio.org/en/latest/history.html", fg="cyan")
|
||||
click.secho("https://docs.platformio.org/en/latest/history.html",
|
||||
fg="cyan")
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
if not r:
|
||||
raise exception.UpgradeError("\n".join([str(cmd), str(e)]))
|
||||
permission_errors = ("permission denied", "not permitted")
|
||||
if (any(m in r['err'].lower() for m in permission_errors)
|
||||
and "windows" not in util.get_systype()):
|
||||
click.secho(
|
||||
"""
|
||||
and not WINDOWS):
|
||||
click.secho("""
|
||||
-----------------
|
||||
Permission denied
|
||||
-----------------
|
||||
@@ -81,12 +78,10 @@ You need the `sudo` permission to install Python packages. Try
|
||||
|
||||
WARNING! Don't use `sudo` for the rest PlatformIO commands.
|
||||
""",
|
||||
fg="yellow",
|
||||
err=True)
|
||||
fg="yellow",
|
||||
err=True)
|
||||
raise exception.ReturnErrorCode(1)
|
||||
else:
|
||||
raise exception.UpgradeError("\n".join(
|
||||
[str(cmd), r['out'], r['err']]))
|
||||
raise exception.UpgradeError("\n".join([str(cmd), r['out'], r['err']]))
|
||||
|
||||
return True
|
||||
|
||||
@@ -96,14 +91,15 @@ def get_pip_package(to_develop):
|
||||
return "platformio"
|
||||
dl_url = ("https://github.com/platformio/"
|
||||
"platformio-core/archive/develop.zip")
|
||||
cache_dir = util.get_cache_dir()
|
||||
cache_dir = get_project_cache_dir()
|
||||
if not os.path.isdir(cache_dir):
|
||||
os.makedirs(cache_dir)
|
||||
pkg_name = os.path.join(cache_dir, "piocoredevelop.zip")
|
||||
try:
|
||||
with open(pkg_name, "w") as fp:
|
||||
r = util.exec_command(
|
||||
["curl", "-fsSL", dl_url], stdout=fp, universal_newlines=True)
|
||||
r = exec_command(["curl", "-fsSL", dl_url],
|
||||
stdout=fp,
|
||||
universal_newlines=True)
|
||||
assert r['returncode'] == 0
|
||||
# check ZIP structure
|
||||
with ZipFile(pkg_name) as zp:
|
||||
@@ -149,8 +145,7 @@ def get_develop_latest_version():
|
||||
|
||||
|
||||
def get_pypi_latest_version():
|
||||
r = requests.get(
|
||||
"https://pypi.python.org/pypi/platformio/json",
|
||||
headers=util.get_request_defheaders())
|
||||
r = requests.get("https://pypi.org/pypi/platformio/json",
|
||||
headers=util.get_request_defheaders())
|
||||
r.raise_for_status()
|
||||
return r.json()['info']['version']
|
||||
|
||||
108
platformio/compat.py
Normal file
108
platformio/compat.py
Normal file
@@ -0,0 +1,108 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=unused-import
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
PY2 = sys.version_info[0] == 2
|
||||
CYGWIN = sys.platform.startswith('cygwin')
|
||||
WINDOWS = sys.platform.startswith('win')
|
||||
|
||||
|
||||
def get_filesystem_encoding():
|
||||
return sys.getfilesystemencoding() or sys.getdefaultencoding()
|
||||
|
||||
|
||||
if PY2:
|
||||
# pylint: disable=undefined-variable
|
||||
string_types = (str, unicode)
|
||||
|
||||
def is_bytes(x):
|
||||
return isinstance(x, (buffer, bytearray))
|
||||
|
||||
def path_to_unicode(path):
|
||||
if isinstance(path, unicode):
|
||||
return path
|
||||
return path.decode(get_filesystem_encoding()).encode("utf-8")
|
||||
|
||||
def get_file_contents(path):
|
||||
with open(path) as f:
|
||||
return f.read()
|
||||
|
||||
def hashlib_encode_data(data):
|
||||
if is_bytes(data):
|
||||
return data
|
||||
if isinstance(data, unicode):
|
||||
data = data.encode(get_filesystem_encoding())
|
||||
elif not isinstance(data, string_types):
|
||||
data = str(data)
|
||||
return data
|
||||
|
||||
def dump_json_to_unicode(obj):
|
||||
if isinstance(obj, unicode):
|
||||
return obj
|
||||
return json.dumps(obj,
|
||||
encoding=get_filesystem_encoding(),
|
||||
ensure_ascii=False,
|
||||
sort_keys=True).encode("utf8")
|
||||
|
||||
_magic_check = re.compile('([*?[])')
|
||||
_magic_check_bytes = re.compile(b'([*?[])')
|
||||
|
||||
def glob_escape(pathname):
|
||||
"""Escape all special characters."""
|
||||
# https://github.com/python/cpython/blob/master/Lib/glob.py#L161
|
||||
# Escaping is done by wrapping any of "*?[" between square brackets.
|
||||
# Metacharacters do not work in the drive part and shouldn't be
|
||||
# escaped.
|
||||
drive, pathname = os.path.splitdrive(pathname)
|
||||
if isinstance(pathname, bytes):
|
||||
pathname = _magic_check_bytes.sub(br'[\1]', pathname)
|
||||
else:
|
||||
pathname = _magic_check.sub(r'[\1]', pathname)
|
||||
return drive + pathname
|
||||
else:
|
||||
from glob import escape as glob_escape # pylint: disable=no-name-in-module
|
||||
|
||||
string_types = (str, )
|
||||
|
||||
def is_bytes(x):
|
||||
return isinstance(x, (bytes, memoryview, bytearray))
|
||||
|
||||
def path_to_unicode(path):
|
||||
return path
|
||||
|
||||
def get_file_contents(path):
|
||||
try:
|
||||
with open(path) as f:
|
||||
return f.read()
|
||||
except UnicodeDecodeError:
|
||||
with open(path, encoding="latin-1") as f:
|
||||
return f.read()
|
||||
|
||||
def hashlib_encode_data(data):
|
||||
if is_bytes(data):
|
||||
return data
|
||||
if not isinstance(data, string_types):
|
||||
data = str(data)
|
||||
return data.encode()
|
||||
|
||||
def dump_json_to_unicode(obj):
|
||||
if isinstance(obj, string_types):
|
||||
return obj
|
||||
return json.dumps(obj, ensure_ascii=False, sort_keys=True)
|
||||
@@ -15,15 +15,16 @@
|
||||
from email.utils import parsedate_tz
|
||||
from math import ceil
|
||||
from os.path import getsize, join
|
||||
from sys import getfilesystemencoding, version_info
|
||||
from sys import version_info
|
||||
from time import mktime
|
||||
|
||||
import click
|
||||
import requests
|
||||
|
||||
from platformio import app, util
|
||||
from platformio import util
|
||||
from platformio.exception import (FDSHASumMismatch, FDSizeMismatch,
|
||||
FDUnrecognizedStatusCode)
|
||||
from platformio.proc import exec_command
|
||||
|
||||
|
||||
class FileDownloader(object):
|
||||
@@ -33,28 +34,23 @@ class FileDownloader(object):
|
||||
def __init__(self, url, dest_dir=None):
|
||||
self._request = None
|
||||
# make connection
|
||||
self._request = requests.get(
|
||||
url,
|
||||
stream=True,
|
||||
headers=util.get_request_defheaders(),
|
||||
verify=version_info >= (2, 7, 9))
|
||||
self._request = requests.get(url,
|
||||
stream=True,
|
||||
headers=util.get_request_defheaders(),
|
||||
verify=version_info >= (2, 7, 9))
|
||||
if self._request.status_code != 200:
|
||||
raise FDUnrecognizedStatusCode(self._request.status_code, url)
|
||||
|
||||
disposition = self._request.headers.get("content-disposition")
|
||||
if disposition and "filename=" in disposition:
|
||||
self._fname = disposition[
|
||||
disposition.index("filename=") + 9:].replace('"', "").replace(
|
||||
"'", "")
|
||||
self._fname = self._fname.encode("utf8")
|
||||
self._fname = disposition[disposition.index("filename=") +
|
||||
9:].replace('"', "").replace("'", "")
|
||||
else:
|
||||
self._fname = [p for p in url.split("/") if p][-1]
|
||||
|
||||
self._progressbar = None
|
||||
self._fname = str(self._fname)
|
||||
self._destination = self._fname
|
||||
if dest_dir:
|
||||
self.set_destination(
|
||||
join(dest_dir.decode(getfilesystemencoding()), self._fname))
|
||||
self.set_destination(join(dest_dir, self._fname))
|
||||
|
||||
def set_destination(self, destination):
|
||||
self._destination = destination
|
||||
@@ -70,12 +66,12 @@ class FileDownloader(object):
|
||||
return -1
|
||||
return int(self._request.headers['content-length'])
|
||||
|
||||
def start(self):
|
||||
def start(self, with_progress=True):
|
||||
label = "Downloading"
|
||||
itercontent = self._request.iter_content(chunk_size=self.CHUNK_SIZE)
|
||||
f = open(self._destination, "wb")
|
||||
try:
|
||||
if app.is_disabled_progressbar() or self.get_size() == -1:
|
||||
if not with_progress or self.get_size() == -1:
|
||||
click.echo("%s..." % label)
|
||||
for chunk in itercontent:
|
||||
if chunk:
|
||||
@@ -85,12 +81,6 @@ class FileDownloader(object):
|
||||
with click.progressbar(length=chunks, label=label) as pb:
|
||||
for _ in pb:
|
||||
f.write(next(itercontent))
|
||||
except IOError as e:
|
||||
click.secho(
|
||||
"Error: Please read http://bit.ly/package-manager-ioerror",
|
||||
fg="red",
|
||||
err=True)
|
||||
raise e
|
||||
finally:
|
||||
f.close()
|
||||
self._request.close()
|
||||
@@ -98,30 +88,32 @@ class FileDownloader(object):
|
||||
if self.get_lmtime():
|
||||
self._preserve_filemtime(self.get_lmtime())
|
||||
|
||||
return True
|
||||
|
||||
def verify(self, sha1=None):
|
||||
_dlsize = getsize(self._destination)
|
||||
if self.get_size() != -1 and _dlsize != self.get_size():
|
||||
raise FDSizeMismatch(_dlsize, self._fname, self.get_size())
|
||||
|
||||
if not sha1:
|
||||
return
|
||||
return None
|
||||
|
||||
dlsha1 = None
|
||||
try:
|
||||
result = util.exec_command(["sha1sum", self._destination])
|
||||
result = exec_command(["sha1sum", self._destination])
|
||||
dlsha1 = result['out']
|
||||
except (OSError, ValueError):
|
||||
try:
|
||||
result = util.exec_command(
|
||||
["shasum", "-a", "1", self._destination])
|
||||
result = exec_command(["shasum", "-a", "1", self._destination])
|
||||
dlsha1 = result['out']
|
||||
except (OSError, ValueError):
|
||||
pass
|
||||
|
||||
if dlsha1:
|
||||
dlsha1 = dlsha1[1:41] if dlsha1.startswith("\\") else dlsha1[:40]
|
||||
if sha1 != dlsha1:
|
||||
raise FDSHASumMismatch(dlsha1, self._fname, sha1)
|
||||
if not dlsha1:
|
||||
return None
|
||||
dlsha1 = dlsha1[1:41] if dlsha1.startswith("\\") else dlsha1[:40]
|
||||
if sha1.lower() != dlsha1.lower():
|
||||
raise FDSHASumMismatch(dlsha1, self._fname, sha1)
|
||||
return True
|
||||
|
||||
def _preserve_filemtime(self, lmdate):
|
||||
timedata = parsedate_tz(lmdate)
|
||||
|
||||
@@ -19,8 +19,10 @@ class PlatformioException(Exception):
|
||||
|
||||
def __str__(self): # pragma: no cover
|
||||
if self.MESSAGE:
|
||||
# pylint: disable=not-an-iterable
|
||||
return self.MESSAGE.format(*self.args)
|
||||
return Exception.__str__(self)
|
||||
|
||||
return super(PlatformioException, self).__str__()
|
||||
|
||||
|
||||
class ReturnErrorCode(PlatformioException):
|
||||
@@ -28,6 +30,10 @@ class ReturnErrorCode(PlatformioException):
|
||||
MESSAGE = "{0}"
|
||||
|
||||
|
||||
class LockFileTimeoutError(PlatformioException):
|
||||
pass
|
||||
|
||||
|
||||
class MinitermException(PlatformioException):
|
||||
pass
|
||||
|
||||
@@ -36,11 +42,16 @@ class UserSideException(PlatformioException):
|
||||
pass
|
||||
|
||||
|
||||
class AbortedByUser(PlatformioException):
|
||||
class AbortedByUser(UserSideException):
|
||||
|
||||
MESSAGE = "Aborted by user"
|
||||
|
||||
|
||||
#
|
||||
# Development Platform
|
||||
#
|
||||
|
||||
|
||||
class UnknownPlatform(PlatformioException):
|
||||
|
||||
MESSAGE = "Unknown development platform '{0}'"
|
||||
@@ -57,13 +68,6 @@ class PlatformNotInstalledYet(PlatformioException):
|
||||
"Use `platformio platform install {0}` command")
|
||||
|
||||
|
||||
class BoardNotDefined(PlatformioException):
|
||||
|
||||
MESSAGE = (
|
||||
"You need to specify board ID using `-b` or `--board` option. "
|
||||
"Supported boards list is available via `platformio boards` command")
|
||||
|
||||
|
||||
class UnknownBoard(PlatformioException):
|
||||
|
||||
MESSAGE = "Unknown board ID '{0}'"
|
||||
@@ -79,47 +83,75 @@ class UnknownFramework(PlatformioException):
|
||||
MESSAGE = "Unknown framework '{0}'"
|
||||
|
||||
|
||||
class UnknownPackage(PlatformioException):
|
||||
# Package Manager
|
||||
|
||||
|
||||
class PlatformIOPackageException(PlatformioException):
|
||||
pass
|
||||
|
||||
|
||||
class UnknownPackage(PlatformIOPackageException):
|
||||
|
||||
MESSAGE = "Detected unknown package '{0}'"
|
||||
|
||||
|
||||
class MissingPackageManifest(PlatformioException):
|
||||
class MissingPackageManifest(PlatformIOPackageException):
|
||||
|
||||
MESSAGE = "Could not find one of '{0}' manifest files in the package"
|
||||
|
||||
|
||||
class UndefinedPackageVersion(PlatformioException):
|
||||
class UndefinedPackageVersion(PlatformIOPackageException):
|
||||
|
||||
MESSAGE = ("Could not find a version that satisfies the requirement '{0}'"
|
||||
" for your system '{1}'")
|
||||
|
||||
|
||||
class PackageInstallError(PlatformioException):
|
||||
class PackageInstallError(PlatformIOPackageException):
|
||||
|
||||
MESSAGE = ("Could not install '{0}' with version requirements '{1}' "
|
||||
"for your system '{2}'.\n\n"
|
||||
"Please try this solution -> http://bit.ly/faq-package-manager")
|
||||
|
||||
|
||||
class FDUnrecognizedStatusCode(PlatformioException):
|
||||
class ExtractArchiveItemError(PlatformIOPackageException):
|
||||
|
||||
MESSAGE = (
|
||||
"Could not extract `{0}` to `{1}`. Try to disable antivirus "
|
||||
"tool or check this solution -> http://bit.ly/faq-package-manager")
|
||||
|
||||
|
||||
class UnsupportedArchiveType(PlatformIOPackageException):
|
||||
|
||||
MESSAGE = "Can not unpack file '{0}'"
|
||||
|
||||
|
||||
class FDUnrecognizedStatusCode(PlatformIOPackageException):
|
||||
|
||||
MESSAGE = "Got an unrecognized status code '{0}' when downloaded {1}"
|
||||
|
||||
|
||||
class FDSizeMismatch(PlatformioException):
|
||||
class FDSizeMismatch(PlatformIOPackageException):
|
||||
|
||||
MESSAGE = ("The size ({0:d} bytes) of downloaded file '{1}' "
|
||||
"is not equal to remote size ({2:d} bytes)")
|
||||
|
||||
|
||||
class FDSHASumMismatch(PlatformioException):
|
||||
class FDSHASumMismatch(PlatformIOPackageException):
|
||||
|
||||
MESSAGE = ("The 'sha1' sum '{0}' of downloaded file '{1}' "
|
||||
"is not equal to remote '{2}'")
|
||||
|
||||
|
||||
class NotPlatformIOProject(PlatformioException):
|
||||
#
|
||||
# Project
|
||||
#
|
||||
|
||||
|
||||
class PlatformIOProjectException(PlatformioException):
|
||||
pass
|
||||
|
||||
|
||||
class NotPlatformIOProject(PlatformIOProjectException):
|
||||
|
||||
MESSAGE = (
|
||||
"Not a PlatformIO project. `platformio.ini` file has not been "
|
||||
@@ -127,26 +159,87 @@ class NotPlatformIOProject(PlatformioException):
|
||||
"please use `platformio init` command")
|
||||
|
||||
|
||||
class UndefinedEnvPlatform(PlatformioException):
|
||||
class InvalidProjectConf(PlatformIOProjectException):
|
||||
|
||||
MESSAGE = ("Invalid '{0}' (project configuration file): '{1}'")
|
||||
|
||||
|
||||
class UndefinedEnvPlatform(PlatformIOProjectException):
|
||||
|
||||
MESSAGE = "Please specify platform for '{0}' environment"
|
||||
|
||||
|
||||
class UnsupportedArchiveType(PlatformioException):
|
||||
|
||||
MESSAGE = "Can not unpack file '{0}'"
|
||||
|
||||
|
||||
class ProjectEnvsNotAvailable(PlatformioException):
|
||||
class ProjectEnvsNotAvailable(PlatformIOProjectException):
|
||||
|
||||
MESSAGE = "Please setup environments in `platformio.ini` file"
|
||||
|
||||
|
||||
class UnknownEnvNames(PlatformioException):
|
||||
class UnknownEnvNames(PlatformIOProjectException):
|
||||
|
||||
MESSAGE = "Unknown environment names '{0}'. Valid names are '{1}'"
|
||||
|
||||
|
||||
class ProjectOptionValueError(PlatformIOProjectException):
|
||||
|
||||
MESSAGE = "{0} for option `{1}` in section [{2}]"
|
||||
|
||||
|
||||
#
|
||||
# Library
|
||||
#
|
||||
|
||||
|
||||
class LibNotFound(PlatformioException):
|
||||
|
||||
MESSAGE = ("Library `{0}` has not been found in PlatformIO Registry.\n"
|
||||
"You can ignore this message, if `{0}` is a built-in library "
|
||||
"(included in framework, SDK). E.g., SPI, Wire, etc.")
|
||||
|
||||
|
||||
class NotGlobalLibDir(UserSideException):
|
||||
|
||||
MESSAGE = (
|
||||
"The `{0}` is not a PlatformIO project.\n\n"
|
||||
"To manage libraries in global storage `{1}`,\n"
|
||||
"please use `platformio lib --global {2}` or specify custom storage "
|
||||
"`platformio lib --storage-dir /path/to/storage/ {2}`.\n"
|
||||
"Check `platformio lib --help` for details.")
|
||||
|
||||
|
||||
class InvalidLibConfURL(PlatformioException):
|
||||
|
||||
MESSAGE = "Invalid library config URL '{0}'"
|
||||
|
||||
|
||||
#
|
||||
# UDEV Rules
|
||||
#
|
||||
|
||||
|
||||
class InvalidUdevRules(PlatformioException):
|
||||
pass
|
||||
|
||||
|
||||
class MissedUdevRules(InvalidUdevRules):
|
||||
|
||||
MESSAGE = (
|
||||
"Warning! Please install `99-platformio-udev.rules`. \nMode details: "
|
||||
"https://docs.platformio.org/en/latest/faq.html#platformio-udev-rules")
|
||||
|
||||
|
||||
class OutdatedUdevRules(InvalidUdevRules):
|
||||
|
||||
MESSAGE = (
|
||||
"Warning! Your `{0}` are outdated. Please update or reinstall them."
|
||||
"\n Mode details: https://docs.platformio.org"
|
||||
"/en/latest/faq.html#platformio-udev-rules")
|
||||
|
||||
|
||||
#
|
||||
# Misc
|
||||
#
|
||||
|
||||
|
||||
class GetSerialPortsError(PlatformioException):
|
||||
|
||||
MESSAGE = "No implementation for your platform ('{0}') available"
|
||||
@@ -162,34 +255,12 @@ class APIRequestError(PlatformioException):
|
||||
MESSAGE = "[API] {0}"
|
||||
|
||||
|
||||
class InternetIsOffline(PlatformioException):
|
||||
class InternetIsOffline(UserSideException):
|
||||
|
||||
MESSAGE = (
|
||||
"You are not connected to the Internet.\n"
|
||||
"If you build a project first time, we need Internet connection "
|
||||
"to install all dependencies and toolchain.")
|
||||
|
||||
|
||||
class LibNotFound(PlatformioException):
|
||||
|
||||
MESSAGE = ("Library `{0}` has not been found in PlatformIO Registry.\n"
|
||||
"You can ignore this message, if `{0}` is a built-in library "
|
||||
"(included in framework, SDK). E.g., SPI, Wire, etc.")
|
||||
|
||||
|
||||
class NotGlobalLibDir(PlatformioException):
|
||||
|
||||
MESSAGE = (
|
||||
"The `{0}` is not a PlatformIO project.\n\n"
|
||||
"To manage libraries in global storage `{1}`,\n"
|
||||
"please use `platformio lib --global {2}` or specify custom storage "
|
||||
"`platformio lib --storage-dir /path/to/storage/ {2}`.\n"
|
||||
"Check `platformio lib --help` for details.")
|
||||
|
||||
|
||||
class InvalidLibConfURL(PlatformioException):
|
||||
|
||||
MESSAGE = "Invalid library config URL '{0}'"
|
||||
"to install all dependencies and toolchains.")
|
||||
|
||||
|
||||
class BuildScriptNotFound(PlatformioException):
|
||||
@@ -207,6 +278,11 @@ class InvalidSettingValue(PlatformioException):
|
||||
MESSAGE = "Invalid value '{0}' for the setting '{1}'"
|
||||
|
||||
|
||||
class InvalidJSONFile(PlatformioException):
|
||||
|
||||
MESSAGE = "Could not load broken JSON: {0}"
|
||||
|
||||
|
||||
class CIBuildEnvsEmpty(PlatformioException):
|
||||
|
||||
MESSAGE = ("Can't find PlatformIO build environments.\n"
|
||||
@@ -220,7 +296,7 @@ class UpgradeError(PlatformioException):
|
||||
|
||||
* Upgrade using `pip install -U platformio`
|
||||
* Try different installation/upgrading steps:
|
||||
http://docs.platformio.org/page/installation.html
|
||||
https://docs.platformio.org/page/installation.html
|
||||
"""
|
||||
|
||||
|
||||
@@ -242,11 +318,21 @@ class CygwinEnvDetected(PlatformioException):
|
||||
|
||||
class DebugSupportError(PlatformioException):
|
||||
|
||||
MESSAGE = ("Currently, PlatformIO does not support debugging for `{0}`.\n"
|
||||
"Please mail contact@pioplus.com or visit "
|
||||
"< http://docs.platformio.org/page/plus/debugging.html >")
|
||||
MESSAGE = (
|
||||
"Currently, PlatformIO does not support debugging for `{0}`.\n"
|
||||
"Please request support at https://github.com/platformio/"
|
||||
"platformio-core/issues \nor visit -> https://docs.platformio.org"
|
||||
"/page/plus/debugging.html")
|
||||
|
||||
|
||||
class DebugInvalidOptions(PlatformioException):
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class TestDirNotExists(PlatformioException):
|
||||
|
||||
MESSAGE = "A test folder '{0}' does not exist.\nPlease create 'test' "\
|
||||
"directory in project's root and put a test set.\n"\
|
||||
"More details about Unit "\
|
||||
"Testing: http://docs.platformio.org/page/plus/"\
|
||||
"unit-testing.html"
|
||||
|
||||
163
platformio/fs.py
Normal file
163
platformio/fs.py
Normal file
@@ -0,0 +1,163 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
from glob import glob
|
||||
|
||||
import click
|
||||
|
||||
from platformio import exception
|
||||
from platformio.compat import get_file_contents, glob_escape
|
||||
|
||||
|
||||
class cd(object):
|
||||
|
||||
def __init__(self, new_path):
|
||||
self.new_path = new_path
|
||||
self.prev_path = os.getcwd()
|
||||
|
||||
def __enter__(self):
|
||||
os.chdir(self.new_path)
|
||||
|
||||
def __exit__(self, etype, value, traceback):
|
||||
os.chdir(self.prev_path)
|
||||
|
||||
|
||||
def get_source_dir():
|
||||
curpath = os.path.abspath(__file__)
|
||||
if not os.path.isfile(curpath):
|
||||
for p in sys.path:
|
||||
if os.path.isfile(os.path.join(p, __file__)):
|
||||
curpath = os.path.join(p, __file__)
|
||||
break
|
||||
return os.path.dirname(curpath)
|
||||
|
||||
|
||||
def load_json(file_path):
|
||||
try:
|
||||
with open(file_path, "r") as f:
|
||||
return json.load(f)
|
||||
except ValueError:
|
||||
raise exception.InvalidJSONFile(file_path)
|
||||
|
||||
|
||||
def format_filesize(filesize):
|
||||
base = 1024
|
||||
unit = 0
|
||||
suffix = "B"
|
||||
filesize = float(filesize)
|
||||
if filesize < base:
|
||||
return "%d%s" % (filesize, suffix)
|
||||
for i, suffix in enumerate("KMGTPEZY"):
|
||||
unit = base**(i + 2)
|
||||
if filesize >= unit:
|
||||
continue
|
||||
if filesize % (base**(i + 1)):
|
||||
return "%.2f%sB" % ((base * filesize / unit), suffix)
|
||||
break
|
||||
return "%d%sB" % ((base * filesize / unit), suffix)
|
||||
|
||||
|
||||
def ensure_udev_rules():
|
||||
from platformio.util import get_systype
|
||||
|
||||
def _rules_to_set(rules_path):
|
||||
return set(l.strip() for l in get_file_contents(rules_path).split("\n")
|
||||
if l.strip() and not l.startswith("#"))
|
||||
|
||||
if "linux" not in get_systype():
|
||||
return None
|
||||
installed_rules = [
|
||||
"/etc/udev/rules.d/99-platformio-udev.rules",
|
||||
"/lib/udev/rules.d/99-platformio-udev.rules"
|
||||
]
|
||||
if not any(os.path.isfile(p) for p in installed_rules):
|
||||
raise exception.MissedUdevRules
|
||||
|
||||
origin_path = os.path.abspath(
|
||||
os.path.join(get_source_dir(), "..", "scripts",
|
||||
"99-platformio-udev.rules"))
|
||||
if not os.path.isfile(origin_path):
|
||||
return None
|
||||
|
||||
origin_rules = _rules_to_set(origin_path)
|
||||
for rules_path in installed_rules:
|
||||
if not os.path.isfile(rules_path):
|
||||
continue
|
||||
current_rules = _rules_to_set(rules_path)
|
||||
if not origin_rules <= current_rules:
|
||||
raise exception.OutdatedUdevRules(rules_path)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def path_endswith_ext(path, extensions):
|
||||
if not isinstance(extensions, (list, tuple)):
|
||||
extensions = [extensions]
|
||||
for ext in extensions:
|
||||
if path.endswith("." + ext):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def match_src_files(src_dir, src_filter=None, src_exts=None):
|
||||
|
||||
def _append_build_item(items, item, src_dir):
|
||||
if not src_exts or path_endswith_ext(item, src_exts):
|
||||
items.add(item.replace(src_dir + os.sep, ""))
|
||||
|
||||
src_filter = src_filter or ""
|
||||
if isinstance(src_filter, (list, tuple)):
|
||||
src_filter = " ".join(src_filter)
|
||||
|
||||
matches = set()
|
||||
# correct fs directory separator
|
||||
src_filter = src_filter.replace("/", os.sep).replace("\\", os.sep)
|
||||
for (action, pattern) in re.findall(r"(\+|\-)<([^>]+)>", src_filter):
|
||||
items = set()
|
||||
for item in glob(os.path.join(glob_escape(src_dir), pattern)):
|
||||
if os.path.isdir(item):
|
||||
for root, _, files in os.walk(item, followlinks=True):
|
||||
for f in files:
|
||||
_append_build_item(items, os.path.join(root, f),
|
||||
src_dir)
|
||||
else:
|
||||
_append_build_item(items, item, src_dir)
|
||||
if action == "+":
|
||||
matches |= items
|
||||
else:
|
||||
matches -= items
|
||||
return sorted(list(matches))
|
||||
|
||||
|
||||
def rmtree(path):
|
||||
|
||||
def _onerror(func, path, __):
|
||||
try:
|
||||
st_mode = os.stat(path).st_mode
|
||||
if st_mode & stat.S_IREAD:
|
||||
os.chmod(path, st_mode | stat.S_IWRITE)
|
||||
func(path)
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
click.secho("%s \nPlease manually remove the file `%s`" %
|
||||
(str(e), path),
|
||||
fg="red",
|
||||
err=True)
|
||||
|
||||
return shutil.rmtree(path, onerror=_onerror)
|
||||
@@ -12,91 +12,118 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import codecs
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from os.path import abspath, basename, expanduser, isdir, isfile, join, relpath
|
||||
|
||||
import bottle
|
||||
from click.testing import CliRunner
|
||||
|
||||
from platformio import exception, util
|
||||
from platformio.commands.run import cli as cmd_run
|
||||
from platformio import fs, util
|
||||
from platformio.compat import WINDOWS, get_file_contents
|
||||
from platformio.proc import where_is_program
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.helpers import (get_project_lib_dir,
|
||||
get_project_libdeps_dir,
|
||||
get_project_src_dir,
|
||||
load_project_ide_data)
|
||||
|
||||
|
||||
class ProjectGenerator(object):
|
||||
|
||||
def __init__(self, project_dir, ide, env_name):
|
||||
def __init__(self, project_dir, ide, boards):
|
||||
self.config = ProjectConfig.get_instance(
|
||||
join(project_dir, "platformio.ini"))
|
||||
self.config.validate()
|
||||
self.project_dir = project_dir
|
||||
self.ide = ide
|
||||
self.env_name = env_name
|
||||
|
||||
self._tplvars = {}
|
||||
self._gather_tplvars()
|
||||
self.ide = str(ide)
|
||||
self.env_name = str(self.get_best_envname(boards))
|
||||
|
||||
@staticmethod
|
||||
def get_supported_ides():
|
||||
tpls_dir = join(util.get_source_dir(), "ide", "tpls")
|
||||
tpls_dir = join(fs.get_source_dir(), "ide", "tpls")
|
||||
return sorted(
|
||||
[d for d in os.listdir(tpls_dir) if isdir(join(tpls_dir, d))])
|
||||
|
||||
@util.memoized
|
||||
def get_project_env(self):
|
||||
data = {}
|
||||
config = util.load_project_config(self.project_dir)
|
||||
for section in config.sections():
|
||||
if not section.startswith("env:"):
|
||||
def get_best_envname(self, boards=None):
|
||||
envname = None
|
||||
default_envs = self.config.default_envs()
|
||||
if default_envs:
|
||||
envname = default_envs[0]
|
||||
if not boards:
|
||||
return envname
|
||||
|
||||
for env in self.config.envs():
|
||||
if not boards:
|
||||
return env
|
||||
if not envname:
|
||||
envname = env
|
||||
items = self.config.items(env=env, as_dict=True)
|
||||
if "board" in items and items.get("board") in boards:
|
||||
return env
|
||||
|
||||
return envname
|
||||
|
||||
def _load_tplvars(self):
|
||||
tpl_vars = {
|
||||
"config": self.config,
|
||||
"systype": util.get_systype(),
|
||||
"project_name": basename(self.project_dir),
|
||||
"project_dir": self.project_dir,
|
||||
"env_name": self.env_name,
|
||||
"user_home_dir": abspath(expanduser("~")),
|
||||
"platformio_path":
|
||||
sys.argv[0] if isfile(sys.argv[0])
|
||||
else where_is_program("platformio"),
|
||||
"env_path": os.getenv("PATH"),
|
||||
"env_pathsep": os.pathsep
|
||||
} # yapf: disable
|
||||
|
||||
# default env configuration
|
||||
tpl_vars.update(self.config.items(env=self.env_name, as_dict=True))
|
||||
# build data
|
||||
tpl_vars.update(
|
||||
load_project_ide_data(self.project_dir, self.env_name) or {})
|
||||
|
||||
with fs.cd(self.project_dir):
|
||||
tpl_vars.update({
|
||||
"src_files": self.get_src_files(),
|
||||
"project_src_dir": get_project_src_dir(),
|
||||
"project_lib_dir": get_project_lib_dir(),
|
||||
"project_libdeps_dir": join(
|
||||
get_project_libdeps_dir(), self.env_name)
|
||||
|
||||
}) # yapf: disable
|
||||
|
||||
for key, value in tpl_vars.items():
|
||||
if key.endswith(("_path", "_dir")):
|
||||
tpl_vars[key] = self.to_unix_path(value)
|
||||
for key in ("includes", "src_files", "libsource_dirs"):
|
||||
if key not in tpl_vars:
|
||||
continue
|
||||
if self.env_name != section[4:]:
|
||||
continue
|
||||
data = {"env_name": section[4:]}
|
||||
for k, v in config.items(section):
|
||||
data[k] = v
|
||||
return data
|
||||
tpl_vars[key] = [self.to_unix_path(inc) for inc in tpl_vars[key]]
|
||||
|
||||
@util.memoized
|
||||
def get_project_build_data(self):
|
||||
data = {
|
||||
"defines": [],
|
||||
"includes": [],
|
||||
"cxx_path": None,
|
||||
"prog_path": None
|
||||
}
|
||||
envdata = self.get_project_env()
|
||||
if not envdata:
|
||||
return data
|
||||
tpl_vars['to_unix_path'] = self.to_unix_path
|
||||
return tpl_vars
|
||||
|
||||
result = CliRunner().invoke(cmd_run, [
|
||||
"--project-dir", self.project_dir, "--environment",
|
||||
envdata['env_name'], "--target", "idedata"
|
||||
])
|
||||
|
||||
if result.exit_code != 0 and not isinstance(result.exception,
|
||||
exception.ReturnErrorCode):
|
||||
raise result.exception
|
||||
if '"includes":' not in result.output:
|
||||
raise exception.PlatformioException(result.output)
|
||||
|
||||
for line in result.output.split("\n"):
|
||||
line = line.strip()
|
||||
if line.startswith('{"') and line.endswith("}"):
|
||||
data = json.loads(line)
|
||||
return data
|
||||
|
||||
def get_project_name(self):
|
||||
return basename(self.project_dir)
|
||||
@staticmethod
|
||||
def to_unix_path(path):
|
||||
if not WINDOWS or not path:
|
||||
return path
|
||||
return re.sub(r"[\\]+", "/", path)
|
||||
|
||||
def get_src_files(self):
|
||||
result = []
|
||||
with util.cd(self.project_dir):
|
||||
for root, _, files in os.walk(util.get_projectsrc_dir()):
|
||||
with fs.cd(self.project_dir):
|
||||
for root, _, files in os.walk(get_project_src_dir()):
|
||||
for f in files:
|
||||
result.append(relpath(join(root, f)))
|
||||
return result
|
||||
|
||||
def get_tpls(self):
|
||||
tpls = []
|
||||
tpls_dir = join(util.get_source_dir(), "ide", "tpls", self.ide)
|
||||
tpls_dir = join(fs.get_source_dir(), "ide", "tpls", self.ide)
|
||||
for root, _, files in os.walk(tpls_dir):
|
||||
for f in files:
|
||||
if not f.endswith(".tpl"):
|
||||
@@ -108,6 +135,7 @@ class ProjectGenerator(object):
|
||||
return tpls
|
||||
|
||||
def generate(self):
|
||||
tpl_vars = self._load_tplvars()
|
||||
for tpl_relpath, tpl_path in self.get_tpls():
|
||||
dst_dir = self.project_dir
|
||||
if tpl_relpath:
|
||||
@@ -116,57 +144,16 @@ class ProjectGenerator(object):
|
||||
os.makedirs(dst_dir)
|
||||
|
||||
file_name = basename(tpl_path)[:-4]
|
||||
self._merge_contents(
|
||||
join(dst_dir, file_name),
|
||||
self._render_tpl(tpl_path).encode("utf8"))
|
||||
contents = self._render_tpl(tpl_path, tpl_vars)
|
||||
self._merge_contents(join(dst_dir, file_name), contents)
|
||||
|
||||
def _render_tpl(self, tpl_path):
|
||||
content = ""
|
||||
with open(tpl_path) as f:
|
||||
content = f.read()
|
||||
return bottle.template(content, **self._tplvars)
|
||||
@staticmethod
|
||||
def _render_tpl(tpl_path, tpl_vars):
|
||||
return bottle.template(get_file_contents(tpl_path), **tpl_vars)
|
||||
|
||||
@staticmethod
|
||||
def _merge_contents(dst_path, contents):
|
||||
file_name = basename(dst_path)
|
||||
|
||||
# merge .gitignore
|
||||
if file_name == ".gitignore" and isfile(dst_path):
|
||||
modified = False
|
||||
default = [l.strip() for l in contents.split("\n")]
|
||||
with open(dst_path) as fp:
|
||||
current = [l.strip() for l in fp.readlines()]
|
||||
for d in default:
|
||||
if d and d not in current:
|
||||
modified = True
|
||||
current.append(d)
|
||||
if not modified:
|
||||
return
|
||||
contents = "\n".join(current) + "\n"
|
||||
|
||||
with open(dst_path, "w") as f:
|
||||
f.write(contents)
|
||||
|
||||
def _gather_tplvars(self):
|
||||
self._tplvars.update(self.get_project_env())
|
||||
self._tplvars.update(self.get_project_build_data())
|
||||
with util.cd(self.project_dir):
|
||||
self._tplvars.update({
|
||||
"project_name": self.get_project_name(),
|
||||
"src_files": self.get_src_files(),
|
||||
"user_home_dir": abspath(expanduser("~")),
|
||||
"project_dir": self.project_dir,
|
||||
"project_src_dir": util.get_projectsrc_dir(),
|
||||
"project_lib_dir": util.get_projectlib_dir(),
|
||||
"project_libdeps_dir": util.get_projectlibdeps_dir(),
|
||||
"systype": util.get_systype(),
|
||||
"platformio_path": self._fix_os_path(
|
||||
util.where_is_program("platformio")),
|
||||
"env_pathsep": os.pathsep,
|
||||
"env_path": self._fix_os_path(os.getenv("PATH"))
|
||||
}) # yapf: disable
|
||||
|
||||
@staticmethod
|
||||
def _fix_os_path(path):
|
||||
return (re.sub(r"[\\]+", '\\' * 4, path)
|
||||
if "windows" in util.get_systype() else path)
|
||||
if basename(dst_path) == ".gitignore" and isfile(dst_path):
|
||||
return
|
||||
with codecs.open(dst_path, "w", encoding="utf8") as fp:
|
||||
fp.write(contents)
|
||||
|
||||
@@ -3,4 +3,4 @@
|
||||
% end
|
||||
% for define in defines:
|
||||
-D{{!define}}
|
||||
% end
|
||||
% end
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
% _defines = " ".join(["-D%s" % d for d in defines])
|
||||
{
|
||||
"execPath": "{{ cxx_path.replace("\\", "/") }}",
|
||||
"gccDefaultCFlags": "-fsyntax-only {{! cc_flags.replace(' -MMD ', ' ').replace('"', '\\"') }}",
|
||||
"gccDefaultCppFlags": "-fsyntax-only {{! cxx_flags.replace(' -MMD ', ' ').replace('"', '\\"') }}",
|
||||
"execPath": "{{ cxx_path }}",
|
||||
"gccDefaultCFlags": "-fsyntax-only {{! cc_flags.replace(' -MMD ', ' ').replace('"', '\\"') }} {{ !_defines.replace('"', '\\"') }}",
|
||||
"gccDefaultCppFlags": "-fsyntax-only {{! cxx_flags.replace(' -MMD ', ' ').replace('"', '\\"') }} {{ !_defines.replace('"', '\\"') }}",
|
||||
"gccErrorLimit": 15,
|
||||
"gccIncludePaths": "{{ ','.join(includes).replace("\\", "/") }}",
|
||||
"gccIncludePaths": "{{ ','.join(includes) }}",
|
||||
"gccSuppressWarnings": false
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
.pioenvs
|
||||
.piolibdeps
|
||||
.pio
|
||||
.clang_complete
|
||||
.gcc-flags.json
|
||||
|
||||
@@ -1,3 +1,2 @@
|
||||
.pioenvs
|
||||
.piolibdeps
|
||||
.pio
|
||||
CMakeListsPrivate.txt
|
||||
|
||||
4
platformio/ide/tpls/clion/.idea/misc.xml.tpl
generated
4
platformio/ide/tpls/clion/.idea/misc.xml.tpl
generated
@@ -7,10 +7,10 @@
|
||||
</sourceRoots>
|
||||
<libraryRoots>
|
||||
<file path="$PROJECT_DIR$/lib" />
|
||||
<file path="$PROJECT_DIR$/.piolibdeps" />
|
||||
<file path="$PROJECT_DIR$/.pio/libdeps" />
|
||||
</libraryRoots>
|
||||
<excludeRoots>
|
||||
<file path="$PROJECT_DIR$/.pioenvs" />
|
||||
<file path="$PROJECT_DIR$/.pio" />
|
||||
</excludeRoots>
|
||||
</component>
|
||||
</project>
|
||||
@@ -15,7 +15,7 @@
|
||||
<FilterInfo>
|
||||
<option name="description" value="" />
|
||||
<option name="name" value="PIO Conf" />
|
||||
<option name="regExp" value="$FILE_PATH$:^platofrmio" />
|
||||
<option name="regExp" value="$FILE_PATH$:^platformio" />
|
||||
</FilterInfo>
|
||||
</array>
|
||||
</option>
|
||||
|
||||
@@ -1,14 +1,31 @@
|
||||
# !!! WARNING !!! AUTO-GENERATED FILE, PLEASE DO NOT MODIFY IT AND USE
|
||||
# https://docs.platformio.org/page/projectconf/section_env_build.html#build-flags
|
||||
#
|
||||
# If you need to override existing CMake configuration or add extra,
|
||||
# please create `CMakeListsUser.txt` in the root of project.
|
||||
# The `CMakeListsUser.txt` will not be overwritten by PlatformIO.
|
||||
|
||||
cmake_minimum_required(VERSION 3.2)
|
||||
project({{project_name}})
|
||||
|
||||
include(CMakeListsPrivate.txt)
|
||||
|
||||
if(EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/CMakeListsUser.txt)
|
||||
include(CMakeListsUser.txt)
|
||||
endif()
|
||||
|
||||
add_custom_target(
|
||||
PLATFORMIO_BUILD ALL
|
||||
COMMAND ${PLATFORMIO_CMD} -f -c clion run
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
)
|
||||
|
||||
add_custom_target(
|
||||
PLATFORMIO_BUILD_VERBOSE ALL
|
||||
COMMAND ${PLATFORMIO_CMD} -f -c clion run --verbose
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
)
|
||||
|
||||
add_custom_target(
|
||||
PLATFORMIO_UPLOAD ALL
|
||||
COMMAND ${PLATFORMIO_CMD} -f -c clion run --target upload
|
||||
@@ -21,6 +38,12 @@ add_custom_target(
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
)
|
||||
|
||||
add_custom_target(
|
||||
PLATFORMIO_MONITOR ALL
|
||||
COMMAND ${PLATFORMIO_CMD} -f -c clion device monitor
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
)
|
||||
|
||||
add_custom_target(
|
||||
PLATFORMIO_TEST ALL
|
||||
COMMAND ${PLATFORMIO_CMD} -f -c clion test
|
||||
|
||||
@@ -1,27 +1,48 @@
|
||||
set(ENV{PATH} "{{env_path}}")
|
||||
set(PLATFORMIO_CMD "{{platformio_path}}")
|
||||
|
||||
SET(CMAKE_C_COMPILER "{{cc_path.replace("\\", "/")}}")
|
||||
SET(CMAKE_CXX_COMPILER "{{cxx_path.replace("\\", "/")}}")
|
||||
SET(CMAKE_CXX_FLAGS_DISTRIBUTION "{{cxx_flags}}")
|
||||
SET(CMAKE_C_FLAGS_DISTRIBUTION "{{cc_flags}}")
|
||||
set(CMAKE_CXX_STANDARD 11)
|
||||
# !!! WARNING !!! AUTO-GENERATED FILE, PLEASE DO NOT MODIFY IT AND USE
|
||||
# https://docs.platformio.org/page/projectconf/section_env_build.html#build-flags
|
||||
#
|
||||
# If you need to override existing CMake configuration or add extra,
|
||||
# please create `CMakeListsUser.txt` in the root of project.
|
||||
# The `CMakeListsUser.txt` will not be overwritten by PlatformIO.
|
||||
|
||||
% import re
|
||||
%
|
||||
% def _normalize_path(path):
|
||||
% if project_dir in path:
|
||||
% path = path.replace(project_dir, "${CMAKE_CURRENT_LIST_DIR}")
|
||||
% elif user_home_dir in path:
|
||||
% if "windows" in systype:
|
||||
% path = path.replace(user_home_dir, "$ENV{HOMEDRIVE}$ENV{HOMEPATH}")
|
||||
% else:
|
||||
% path = path.replace(user_home_dir, "$ENV{HOME}")
|
||||
% end
|
||||
% end
|
||||
% return path
|
||||
% end
|
||||
|
||||
set(PLATFORMIO_CMD "{{ _normalize_path(platformio_path) }}")
|
||||
|
||||
SET(CMAKE_C_COMPILER "{{ _normalize_path(cc_path) }}")
|
||||
SET(CMAKE_CXX_COMPILER "{{ _normalize_path(cxx_path) }}")
|
||||
SET(CMAKE_CXX_FLAGS_DISTRIBUTION "{{cxx_flags}}")
|
||||
SET(CMAKE_C_FLAGS_DISTRIBUTION "{{cc_flags}}")
|
||||
|
||||
% STD_RE = re.compile(r"\-std=[a-z\+]+(\d+)")
|
||||
% cc_stds = STD_RE.findall(cc_flags)
|
||||
% cxx_stds = STD_RE.findall(cxx_flags)
|
||||
% if cc_stds:
|
||||
SET(CMAKE_C_STANDARD {{ cc_stds[-1] }})
|
||||
% end
|
||||
% if cxx_stds:
|
||||
set(CMAKE_CXX_STANDARD {{ cxx_stds[-1] }})
|
||||
% end
|
||||
|
||||
% for define in defines:
|
||||
add_definitions(-D'{{!re.sub(r"([\"\(\)#])", r"\\\1", define)}}')
|
||||
% end
|
||||
|
||||
% for include in includes:
|
||||
% if include.startswith(user_home_dir):
|
||||
% if "windows" in systype:
|
||||
include_directories("$ENV{HOMEDRIVE}$ENV{HOMEPATH}{{include.replace(user_home_dir, '').replace("\\", "/")}}")
|
||||
% else:
|
||||
include_directories("$ENV{HOME}{{include.replace(user_home_dir, '').replace("\\", "/")}}")
|
||||
% end
|
||||
% else:
|
||||
include_directories("{{include.replace("\\", "/")}}")
|
||||
% end
|
||||
include_directories("{{ _normalize_path(include) }}")
|
||||
% end
|
||||
|
||||
FILE(GLOB_RECURSE SRC_LIST "{{project_src_dir.replace("\\", "/")}}/*.*" "{{project_lib_dir.replace("\\", "/")}}/*.*" "{{project_libdeps_dir.replace("\\", "/")}}/*.*")
|
||||
FILE(GLOB_RECURSE SRC_LIST "{{ _normalize_path(project_src_dir) }}/*.*" "{{ _normalize_path(project_lib_dir) }}/*.*" "{{ _normalize_path(project_libdeps_dir) }}/*.*")
|
||||
|
||||
@@ -53,12 +53,12 @@
|
||||
<Add option="-D{{define}}"/>
|
||||
% end
|
||||
% for include in includes:
|
||||
<Add directory="{{include.replace("\\", "/")}}"/>
|
||||
% end
|
||||
<Add directory="{{include}}"/>
|
||||
% end
|
||||
</Compiler>
|
||||
<Unit filename="platformio.ini" />
|
||||
% for file in src_files:
|
||||
<Unit filename="{{file.replace("\\", "/")}}"></Unit>
|
||||
<Unit filename="{{file}}"></Unit>
|
||||
% end
|
||||
</Project>
|
||||
</CodeBlocks_project_file>
|
||||
|
||||
@@ -219,17 +219,17 @@
|
||||
<target name="PlatformIO: Upload using Programmer" path="" targetID="org.eclipse.cdt.build.MakeTargetBuilder">
|
||||
<buildCommand>platformio</buildCommand>
|
||||
<buildArguments>-f -c eclipse</buildArguments>
|
||||
<buildTarget>run -t program</buildTarget>
|
||||
<buildTarget>run --target program</buildTarget>
|
||||
<stopOnError>true</stopOnError>
|
||||
<useDefaultCommand>true</useDefaultCommand>
|
||||
<useDefaultCommand>false</useDefaultCommand>
|
||||
<runAllBuilders>false</runAllBuilders>
|
||||
</target>
|
||||
<target name="PlatformIO: Upload SPIFFS image" path="" targetID="org.eclipse.cdt.build.MakeTargetBuilder">
|
||||
<buildCommand>platformio</buildCommand>
|
||||
<buildArguments>-f -c eclipse</buildArguments>
|
||||
<buildTarget>run -t uploadfs</buildTarget>
|
||||
<buildTarget>run --target uploadfs</buildTarget>
|
||||
<stopOnError>true</stopOnError>
|
||||
<useDefaultCommand>true</useDefaultCommand>
|
||||
<useDefaultCommand>false</useDefaultCommand>
|
||||
<runAllBuilders>false</runAllBuilders>
|
||||
</target>
|
||||
<target name="PlatformIO: Build" path="" targetID="org.eclipse.cdt.build.MakeTargetBuilder">
|
||||
@@ -237,23 +237,39 @@
|
||||
<buildArguments>-f -c eclipse</buildArguments>
|
||||
<buildTarget>run</buildTarget>
|
||||
<stopOnError>true</stopOnError>
|
||||
<useDefaultCommand>true</useDefaultCommand>
|
||||
<useDefaultCommand>false</useDefaultCommand>
|
||||
<runAllBuilders>false</runAllBuilders>
|
||||
</target>
|
||||
<target name="PlatformIO: Verbose Build" path="" targetID="org.eclipse.cdt.build.MakeTargetBuilder">
|
||||
<buildCommand>platformio</buildCommand>
|
||||
<buildArguments>-f -c eclipse</buildArguments>
|
||||
<buildTarget>run --verbose</buildTarget>
|
||||
<stopOnError>true</stopOnError>
|
||||
<useDefaultCommand>false</useDefaultCommand>
|
||||
<runAllBuilders>false</runAllBuilders>
|
||||
</target>
|
||||
<target name="PlatformIO: Upload" path="" targetID="org.eclipse.cdt.build.MakeTargetBuilder">
|
||||
<buildCommand>platformio</buildCommand>
|
||||
<buildArguments>-f -c eclipse</buildArguments>
|
||||
<buildTarget>run -t upload</buildTarget>
|
||||
<buildTarget>run --target upload</buildTarget>
|
||||
<stopOnError>true</stopOnError>
|
||||
<useDefaultCommand>true</useDefaultCommand>
|
||||
<useDefaultCommand>false</useDefaultCommand>
|
||||
<runAllBuilders>false</runAllBuilders>
|
||||
</target>
|
||||
<target name="PlatformIO: Verbose Upload" path="" targetID="org.eclipse.cdt.build.MakeTargetBuilder">
|
||||
<buildCommand>platformio</buildCommand>
|
||||
<buildArguments>-f -c eclipse</buildArguments>
|
||||
<buildTarget>run --target upload --verbose</buildTarget>
|
||||
<stopOnError>true</stopOnError>
|
||||
<useDefaultCommand>false</useDefaultCommand>
|
||||
<runAllBuilders>false</runAllBuilders>
|
||||
</target>
|
||||
<target name="PlatformIO: Clean" path="" targetID="org.eclipse.cdt.build.MakeTargetBuilder">
|
||||
<buildCommand>platformio</buildCommand>
|
||||
<buildArguments>-f -c eclipse</buildArguments>
|
||||
<buildTarget>run -t clean</buildTarget>
|
||||
<buildTarget>run --target clean</buildTarget>
|
||||
<stopOnError>true</stopOnError>
|
||||
<useDefaultCommand>true</useDefaultCommand>
|
||||
<useDefaultCommand>false</useDefaultCommand>
|
||||
<runAllBuilders>false</runAllBuilders>
|
||||
</target>
|
||||
<target name="PlatformIO: Test" path="" targetID="org.eclipse.cdt.build.MakeTargetBuilder">
|
||||
@@ -261,15 +277,15 @@
|
||||
<buildArguments>-f -c eclipse</buildArguments>
|
||||
<buildTarget>test</buildTarget>
|
||||
<stopOnError>true</stopOnError>
|
||||
<useDefaultCommand>true</useDefaultCommand>
|
||||
<useDefaultCommand>false</useDefaultCommand>
|
||||
<runAllBuilders>false</runAllBuilders>
|
||||
</target>
|
||||
<target name="PlatformIO: Update platforms and libraries" path="" targetID="org.eclipse.cdt.build.MakeTargetBuilder">
|
||||
<target name="PlatformIO: Remote" path="" targetID="org.eclipse.cdt.build.MakeTargetBuilder">
|
||||
<buildCommand>platformio</buildCommand>
|
||||
<buildArguments>-f -c eclipse</buildArguments>
|
||||
<buildTarget>update</buildTarget>
|
||||
<buildTarget>remote run --target upload</buildTarget>
|
||||
<stopOnError>true</stopOnError>
|
||||
<useDefaultCommand>true</useDefaultCommand>
|
||||
<useDefaultCommand>false</useDefaultCommand>
|
||||
<runAllBuilders>false</runAllBuilders>
|
||||
</target>
|
||||
<target name="PlatformIO: Rebuild C/C++ Project Index" path="" targetID="org.eclipse.cdt.build.MakeTargetBuilder">
|
||||
@@ -277,7 +293,39 @@
|
||||
<buildArguments>-f -c eclipse</buildArguments>
|
||||
<buildTarget>init --ide eclipse</buildTarget>
|
||||
<stopOnError>true</stopOnError>
|
||||
<useDefaultCommand>true</useDefaultCommand>
|
||||
<useDefaultCommand>false</useDefaultCommand>
|
||||
<runAllBuilders>false</runAllBuilders>
|
||||
</target>
|
||||
<target name="PlatformIO: List Devices" path="" targetID="org.eclipse.cdt.build.MakeTargetBuilder">
|
||||
<buildCommand>platformio</buildCommand>
|
||||
<buildArguments>-f -c eclipse</buildArguments>
|
||||
<buildTarget>device list</buildTarget>
|
||||
<stopOnError>true</stopOnError>
|
||||
<useDefaultCommand>false</useDefaultCommand>
|
||||
<runAllBuilders>false</runAllBuilders>
|
||||
</target>
|
||||
<target name="PlatformIO: Update Project Libraries" path="" targetID="org.eclipse.cdt.build.MakeTargetBuilder">
|
||||
<buildCommand>platformio</buildCommand>
|
||||
<buildArguments>-f -c eclipse</buildArguments>
|
||||
<buildTarget>lib update</buildTarget>
|
||||
<stopOnError>true</stopOnError>
|
||||
<useDefaultCommand>false</useDefaultCommand>
|
||||
<runAllBuilders>false</runAllBuilders>
|
||||
</target>
|
||||
<target name="PlatformIO: Update All" path="" targetID="org.eclipse.cdt.build.MakeTargetBuilder">
|
||||
<buildCommand>platformio</buildCommand>
|
||||
<buildArguments>-f -c eclipse</buildArguments>
|
||||
<buildTarget>update</buildTarget>
|
||||
<stopOnError>true</stopOnError>
|
||||
<useDefaultCommand>false</useDefaultCommand>
|
||||
<runAllBuilders>false</runAllBuilders>
|
||||
</target>
|
||||
<target name="PlatformIO: Upgrade Core" path="" targetID="org.eclipse.cdt.build.MakeTargetBuilder">
|
||||
<buildCommand>platformio</buildCommand>
|
||||
<buildArguments>-f -c eclipse</buildArguments>
|
||||
<buildTarget>upgrade</buildTarget>
|
||||
<stopOnError>true</stopOnError>
|
||||
<useDefaultCommand>false</useDefaultCommand>
|
||||
<runAllBuilders>false</runAllBuilders>
|
||||
</target>
|
||||
</buildTargets>
|
||||
|
||||
@@ -17,8 +17,8 @@
|
||||
<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_ID" value="gdb"/>
|
||||
<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_REGISTER_GROUPS" value=""/>
|
||||
<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_START_MODE" value="run"/>
|
||||
<booleanAttribute key="org.eclipse.cdt.launch.DEBUGGER_STOP_AT_MAIN" value="true"/>
|
||||
<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_STOP_AT_MAIN_SYMBOL" value="main"/>
|
||||
<booleanAttribute key="org.eclipse.cdt.launch.DEBUGGER_STOP_AT_MAIN" value="false"/>
|
||||
<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_STOP_AT_MAIN_SYMBOL" value=""/>
|
||||
<stringAttribute key="org.eclipse.cdt.launch.PROGRAM_NAME" value="{{prog_path}}"/>
|
||||
<stringAttribute key="org.eclipse.cdt.launch.PROJECT_ATTR" value="{{project_name}}"/>
|
||||
<booleanAttribute key="org.eclipse.cdt.launch.PROJECT_BUILD_CONFIG_AUTO_ATTR" value="false"/>
|
||||
|
||||
@@ -1,3 +1,16 @@
|
||||
% import re
|
||||
% STD_RE = re.compile(r"(\-std=[a-z\+]+\d+)")
|
||||
% cxx_stds = STD_RE.findall(cxx_flags)
|
||||
% cxx_std = cxx_stds[-1] if cxx_stds else ""
|
||||
%
|
||||
% if cxx_path.startswith(user_home_dir):
|
||||
% if "windows" in systype:
|
||||
% cxx_path = "${USERPROFILE}" + cxx_path.replace(user_home_dir, "")
|
||||
% else:
|
||||
% cxx_path = "${HOME}" + cxx_path.replace(user_home_dir, "")
|
||||
% end
|
||||
% end
|
||||
%
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<project>
|
||||
<configuration id="0.910961921" name="Default">
|
||||
@@ -5,11 +18,7 @@
|
||||
<provider copy-of="extension" id="org.eclipse.cdt.ui.UserLanguageSettingsProvider"/>
|
||||
<provider-reference id="org.eclipse.cdt.core.ReferencedProjectsLanguageSettingsProvider" ref="shared-provider"/>
|
||||
<provider-reference id="org.eclipse.cdt.managedbuilder.core.MBSLanguageSettingsProvider" ref="shared-provider"/>
|
||||
% if "windows" in systype:
|
||||
<provider class="org.eclipse.cdt.internal.build.crossgcc.CrossGCCBuiltinSpecsDetector" console="false" env-hash="1291887707783033084" id="org.eclipse.cdt.build.crossgcc.CrossGCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT Cross GCC Built-in Compiler Settings" parameter="${USERPROFILE}{{cxx_path.replace(user_home_dir, '')}} ${FLAGS} -E -P -v -dD "${INPUTS}"" prefer-non-shared="true">
|
||||
% else:
|
||||
<provider class="org.eclipse.cdt.internal.build.crossgcc.CrossGCCBuiltinSpecsDetector" console="false" env-hash="-869785120007741010" id="org.eclipse.cdt.build.crossgcc.CrossGCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT Cross GCC Built-in Compiler Settings" parameter="${HOME}{{cxx_path.replace(user_home_dir, '')}} ${FLAGS} -E -P -v -dD "${INPUTS}"" prefer-non-shared="true">
|
||||
% end
|
||||
<provider class="org.eclipse.cdt.internal.build.crossgcc.CrossGCCBuiltinSpecsDetector" console="false" env-hash="1291887707783033084" id="org.eclipse.cdt.build.crossgcc.CrossGCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT Cross GCC Built-in Compiler Settings" parameter="{{ cxx_path }} ${FLAGS} {{ cxx_std }} -E -P -v -dD "${INPUTS}"" prefer-non-shared="true">
|
||||
<language-scope id="org.eclipse.cdt.core.gcc"/>
|
||||
<language-scope id="org.eclipse.cdt.core.g++"/>
|
||||
</provider>
|
||||
@@ -20,11 +29,7 @@
|
||||
<provider copy-of="extension" id="org.eclipse.cdt.ui.UserLanguageSettingsProvider"/>
|
||||
<provider-reference id="org.eclipse.cdt.core.ReferencedProjectsLanguageSettingsProvider" ref="shared-provider"/>
|
||||
<provider-reference id="org.eclipse.cdt.managedbuilder.core.MBSLanguageSettingsProvider" ref="shared-provider"/>
|
||||
% if "windows" in systype:
|
||||
<provider class="org.eclipse.cdt.internal.build.crossgcc.CrossGCCBuiltinSpecsDetector" console="false" env-hash="1291887707783033084" id="org.eclipse.cdt.build.crossgcc.CrossGCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT Cross GCC Built-in Compiler Settings" parameter="${USERPROFILE}{{cxx_path.replace(user_home_dir, '')}} ${FLAGS} -E -P -v -dD "${INPUTS}"" prefer-non-shared="true">
|
||||
% else:
|
||||
<provider class="org.eclipse.cdt.internal.build.crossgcc.CrossGCCBuiltinSpecsDetector" console="false" env-hash="-869785120007741010" id="org.eclipse.cdt.build.crossgcc.CrossGCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT Cross GCC Built-in Compiler Settings" parameter="${HOME}{{cxx_path.replace(user_home_dir, '')}} ${FLAGS} -E -P -v -dD "${INPUTS}"" prefer-non-shared="true">
|
||||
% end
|
||||
<provider class="org.eclipse.cdt.internal.build.crossgcc.CrossGCCBuiltinSpecsDetector" console="false" env-hash="1291887707783033084" id="org.eclipse.cdt.build.crossgcc.CrossGCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT Cross GCC Built-in Compiler Settings" parameter="{{ cxx_path }} ${FLAGS} {{ cxx_std }} -E -P -v -dD "${INPUTS}"" prefer-non-shared="true">
|
||||
<language-scope id="org.eclipse.cdt.core.gcc"/>
|
||||
<language-scope id="org.eclipse.cdt.core.g++"/>
|
||||
</provider>
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
eclipse.preferences.version=1
|
||||
environment/project/0.910961921/PATH/delimiter={{env_pathsep.replace(":", "\\:")}}
|
||||
environment/project/0.910961921/PATH/operation=replace
|
||||
environment/project/0.910961921/PATH/value={{env_path.replace(":", "\\:")}}
|
||||
environment/project/0.910961921/PATH/value={{env_path.replace(":", "\\:")}}${PathDelimiter}${PATH}
|
||||
environment/project/0.910961921/append=true
|
||||
environment/project/0.910961921/appendContributed=true
|
||||
environment/project/0.910961921.1363900502/PATH/delimiter={{env_pathsep.replace(":", "\\:")}}
|
||||
environment/project/0.910961921.1363900502/PATH/operation=replace
|
||||
environment/project/0.910961921.1363900502/PATH/value={{env_path.replace(":", "\\:")}}
|
||||
environment/project/0.910961921.1363900502/PATH/value={{env_path.replace(":", "\\:")}}${PathDelimiter}${PATH}
|
||||
environment/project/0.910961921.1363900502/append=true
|
||||
environment/project/0.910961921.1363900502/appendContributed=true
|
||||
@@ -3,4 +3,4 @@
|
||||
% end
|
||||
% for define in defines:
|
||||
-D{{!define}}
|
||||
% end
|
||||
% end
|
||||
|
||||
@@ -1,3 +1,2 @@
|
||||
.pioenvs
|
||||
.piolibdeps
|
||||
.pio
|
||||
.clang_complete
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<itemPath>nbproject/private/launcher.properties</itemPath>
|
||||
</logicalFolder>
|
||||
</logicalFolder>
|
||||
<sourceFolderFilter>^(nbproject|.pioenvs)$</sourceFolderFilter>
|
||||
<sourceFolderFilter>^(nbproject|.pio)$</sourceFolderFilter>
|
||||
<sourceRootList>
|
||||
<Elem>.</Elem>
|
||||
</sourceRootList>
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"platformio",
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"run"
|
||||
],
|
||||
@@ -14,7 +14,7 @@
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"platformio",
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"run"
|
||||
],
|
||||
@@ -23,27 +23,7 @@
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"platformio",
|
||||
"-f", "-c", "sublimetext",
|
||||
"run",
|
||||
"--target",
|
||||
"clean"
|
||||
],
|
||||
"name": "Clean"
|
||||
},
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"platformio",
|
||||
"-f", "-c", "sublimetext",
|
||||
"test"
|
||||
],
|
||||
"name": "Test"
|
||||
},
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"platformio",
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"run",
|
||||
"--target",
|
||||
@@ -54,7 +34,27 @@
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"platformio",
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"run",
|
||||
"--target",
|
||||
"clean"
|
||||
],
|
||||
"name": "Clean"
|
||||
},
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"test"
|
||||
],
|
||||
"name": "Test"
|
||||
},
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"run",
|
||||
"--target",
|
||||
@@ -65,7 +65,7 @@
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"platformio",
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"run",
|
||||
"--target",
|
||||
@@ -76,16 +76,24 @@
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"platformio",
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"update"
|
||||
],
|
||||
"name": "Update platforms and libraries"
|
||||
},
|
||||
{
|
||||
"cmd":
|
||||
[
|
||||
"{{ platformio_path }}",
|
||||
"-f", "-c", "sublimetext",
|
||||
"upgrade"
|
||||
],
|
||||
"name": "Upgrade PlatformIO Core"
|
||||
}
|
||||
],
|
||||
"working_dir": "${project_path:${folder}}",
|
||||
"selector": "source.c, source.c++",
|
||||
"path": "{{env_path}}"
|
||||
"selector": "source.c, source.c++"
|
||||
}
|
||||
],
|
||||
"folders":
|
||||
@@ -97,8 +105,8 @@
|
||||
"settings":
|
||||
{
|
||||
"sublimegdb_workingdir": "{{project_dir}}",
|
||||
"sublimegdb_exec_cmd": "-exec-continue",
|
||||
"sublimegdb_commandline": "{{platformio_path}} -f -c sublimetext debug --interface=gdb --interpreter=mi -x .pioinit"
|
||||
"sublimegdb_exec_cmd": "",
|
||||
"sublimegdb_commandline": "{{ platformio_path }} -f -c sublimetext debug --interface=gdb --interpreter=mi -x .pioinit"
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
% for include in includes:
|
||||
-I{{include}}
|
||||
-I"{{include}}"
|
||||
% end
|
||||
% for define in defines:
|
||||
-D{{!define}}
|
||||
% end
|
||||
% end
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
% _defines = " ".join(["-D%s" % d for d in defines])
|
||||
{
|
||||
"execPath": "{{ cxx_path.replace("\\", "/") }}",
|
||||
"gccDefaultCFlags": "-fsyntax-only {{! cc_flags.replace(' -MMD ', ' ').replace('"', '\\"') }}",
|
||||
"gccDefaultCppFlags": "-fsyntax-only {{! cxx_flags.replace(' -MMD ', ' ').replace('"', '\\"') }}",
|
||||
"execPath": "{{ cxx_path }}",
|
||||
"gccDefaultCFlags": "-fsyntax-only {{! cc_flags.replace(' -MMD ', ' ').replace('"', '\\"') }} {{ !_defines.replace('"', '\\"') }}",
|
||||
"gccDefaultCppFlags": "-fsyntax-only {{! cxx_flags.replace(' -MMD ', ' ').replace('"', '\\"') }} {{ !_defines.replace('"', '\\"') }}",
|
||||
"gccErrorLimit": 15,
|
||||
"gccIncludePaths": "{{ ','.join(includes).replace("\\", "/") }}",
|
||||
"gccIncludePaths": "{{! ','.join("'{}'".format(inc) for inc in includes)}}",
|
||||
"gccSuppressWarnings": false
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
.pioenvs
|
||||
.piolibdeps
|
||||
.pio
|
||||
.clang_complete
|
||||
.gcc-flags.json
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
.pioenvs
|
||||
.piolibdeps
|
||||
.pio
|
||||
.vscode/.browse.c_cpp.db*
|
||||
.vscode/c_cpp_properties.json
|
||||
.vscode/launch.json
|
||||
.vscode/ipch
|
||||
|
||||
@@ -1,38 +1,71 @@
|
||||
{
|
||||
"configurations": [
|
||||
{
|
||||
"name": "!!! WARNING !!! AUTO-GENERATED FILE, PLEASE DO NOT MODIFY IT AND USE https://docs.platformio.org/page/projectconf/section_env_build.html#build-flags"
|
||||
},
|
||||
{
|
||||
% import platform
|
||||
% from os.path import commonprefix, dirname, isdir
|
||||
%
|
||||
% systype = platform.system().lower()
|
||||
%
|
||||
% def _escape(text):
|
||||
% return to_unix_path(text).replace('"', '\\"')
|
||||
% end
|
||||
%
|
||||
% cleaned_includes = []
|
||||
% for include in includes:
|
||||
% if "toolchain-" not in dirname(commonprefix([include, cc_path])) and isdir(include):
|
||||
% cleaned_includes.append(include)
|
||||
% end
|
||||
% end
|
||||
%
|
||||
% if systype == "windows":
|
||||
"name": "Win32",
|
||||
% elif systype == "darwin":
|
||||
"name": "Mac",
|
||||
"macFrameworkPath": [],
|
||||
% else:
|
||||
"name": "Linux",
|
||||
% end
|
||||
"includePath": [
|
||||
% for include in includes:
|
||||
"{{include.replace('\\\\', '/').replace('\\', '/').replace('"', '\\"')}}",
|
||||
% for include in cleaned_includes:
|
||||
"{{ include }}",
|
||||
% end
|
||||
""
|
||||
],
|
||||
"browse": {
|
||||
"limitSymbolsToIncludedHeaders": true,
|
||||
"databaseFilename": "${workspaceRoot}/.vscode/.browse.c_cpp.db",
|
||||
"path": [
|
||||
% for include in includes:
|
||||
"{{include.replace('\\\\', '/').replace('\\', '/').replace('"', '\\"')}}",
|
||||
% for include in cleaned_includes:
|
||||
"{{ include }}",
|
||||
% end
|
||||
""
|
||||
]
|
||||
},
|
||||
"defines": [
|
||||
% for define in defines:
|
||||
"{{!define.replace('"', '\\"')}}",
|
||||
"{{! _escape(define) }}",
|
||||
% end
|
||||
""
|
||||
],
|
||||
"intelliSenseMode": "clang-x64"
|
||||
"intelliSenseMode": "clang-x64",
|
||||
% import re
|
||||
% STD_RE = re.compile(r"\-std=[a-z\+]+(\d+)")
|
||||
% cc_stds = STD_RE.findall(cc_flags)
|
||||
% cxx_stds = STD_RE.findall(cxx_flags)
|
||||
%
|
||||
% # pass only architecture specific flags
|
||||
% cc_m_flags = " ".join([f.strip() for f in cc_flags.split(" ") if f.strip().startswith("-m")])
|
||||
%
|
||||
% if cc_stds:
|
||||
"cStandard": "c{{ cc_stds[-1] }}",
|
||||
% end
|
||||
% if cxx_stds:
|
||||
"cppStandard": "c++{{ cxx_stds[-1] }}",
|
||||
% end
|
||||
"compilerPath": "\"{{cc_path}}\" {{! _escape(cc_m_flags) }}"
|
||||
}
|
||||
]
|
||||
],
|
||||
"version": 4
|
||||
}
|
||||
7
platformio/ide/tpls/vscode/.vscode/extensions.json.tpl
vendored
Normal file
7
platformio/ide/tpls/vscode/.vscode/extensions.json.tpl
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
// See http://go.microsoft.com/fwlink/?LinkId=827846
|
||||
// for the documentation about the extensions.json format
|
||||
"recommendations": [
|
||||
"platformio.platformio-ide"
|
||||
]
|
||||
}
|
||||
@@ -1,16 +1,43 @@
|
||||
// AUTOMATICALLY GENERATED FILE. PLEASE DO NOT MODIFY IT MANUALLY
|
||||
|
||||
// PIO Unified Debugger
|
||||
//
|
||||
// Documentation: https://docs.platformio.org/page/plus/debugging.html
|
||||
// Configuration: https://docs.platformio.org/page/projectconf/section_env_debug.html
|
||||
|
||||
% from os.path import dirname, join
|
||||
%
|
||||
% def _escape_path(path):
|
||||
% return path.replace('\\\\', '/').replace('\\', '/').replace('"', '\\"')
|
||||
% end
|
||||
%
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "gdb",
|
||||
"type": "platformio-debug",
|
||||
"request": "launch",
|
||||
"cwd": "${workspaceRoot}",
|
||||
"name": "PlatformIO Debugger",
|
||||
"target": "{{prog_path.replace('\\\\', '/').replace('\\', '/').replace('"', '\\"')}}",
|
||||
"gdbpath": "{{join(dirname(platformio_path), "piodebuggdb").replace('\\\\', '/').replace('\\', '/').replace('"', '\\"')}}",
|
||||
"autorun": [ "source .pioinit" ],
|
||||
"preLaunchTask": "PlatformIO: Pre-Debug",
|
||||
"name": "PIO Debug",
|
||||
"executable": "{{ _escape_path(prog_path) }}",
|
||||
"toolchainBinDir": "{{ _escape_path(dirname(gdb_path)) }}",
|
||||
% if svd_path:
|
||||
"svdPath": "{{ _escape_path(svd_path) }}",
|
||||
% end
|
||||
"preLaunchTask": {
|
||||
"type": "PlatformIO",
|
||||
"task": "Pre-Debug"
|
||||
},
|
||||
"internalConsoleOptions": "openOnSessionStart"
|
||||
},
|
||||
{
|
||||
"type": "platformio-debug",
|
||||
"request": "launch",
|
||||
"name": "PIO Debug (skip Pre-Debug)",
|
||||
"executable": "{{ _escape_path(prog_path) }}",
|
||||
"toolchainBinDir": "{{ _escape_path(dirname(gdb_path)) }}",
|
||||
% if svd_path:
|
||||
"svdPath": "{{ _escape_path(svd_path) }}",
|
||||
% end
|
||||
"internalConsoleOptions": "openOnSessionStart"
|
||||
}
|
||||
]
|
||||
|
||||
108
platformio/lockfile.py
Normal file
108
platformio/lockfile.py
Normal file
@@ -0,0 +1,108 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from os import remove
|
||||
from os.path import abspath, exists, getmtime
|
||||
from time import sleep, time
|
||||
|
||||
from platformio import exception
|
||||
|
||||
LOCKFILE_TIMEOUT = 3600 # in seconds, 1 hour
|
||||
LOCKFILE_DELAY = 0.2
|
||||
|
||||
LOCKFILE_INTERFACE_FCNTL = 1
|
||||
LOCKFILE_INTERFACE_MSVCRT = 2
|
||||
|
||||
try:
|
||||
import fcntl
|
||||
LOCKFILE_CURRENT_INTERFACE = LOCKFILE_INTERFACE_FCNTL
|
||||
except ImportError:
|
||||
try:
|
||||
import msvcrt
|
||||
LOCKFILE_CURRENT_INTERFACE = LOCKFILE_INTERFACE_MSVCRT
|
||||
except ImportError:
|
||||
LOCKFILE_CURRENT_INTERFACE = None
|
||||
|
||||
|
||||
class LockFileExists(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class LockFile(object):
|
||||
|
||||
def __init__(self, path, timeout=LOCKFILE_TIMEOUT, delay=LOCKFILE_DELAY):
|
||||
self.timeout = timeout
|
||||
self.delay = delay
|
||||
self._lock_path = abspath(path) + ".lock"
|
||||
self._fp = None
|
||||
|
||||
def _lock(self):
|
||||
if not LOCKFILE_CURRENT_INTERFACE and exists(self._lock_path):
|
||||
# remove stale lock
|
||||
if time() - getmtime(self._lock_path) > 10:
|
||||
try:
|
||||
remove(self._lock_path)
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
else:
|
||||
raise LockFileExists
|
||||
|
||||
self._fp = open(self._lock_path, "w")
|
||||
try:
|
||||
if LOCKFILE_CURRENT_INTERFACE == LOCKFILE_INTERFACE_FCNTL:
|
||||
fcntl.flock(self._fp.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
|
||||
elif LOCKFILE_CURRENT_INTERFACE == LOCKFILE_INTERFACE_MSVCRT:
|
||||
msvcrt.locking(self._fp.fileno(), msvcrt.LK_NBLCK, 1)
|
||||
except IOError:
|
||||
self._fp = None
|
||||
raise LockFileExists
|
||||
return True
|
||||
|
||||
def _unlock(self):
|
||||
if not self._fp:
|
||||
return
|
||||
if LOCKFILE_CURRENT_INTERFACE == LOCKFILE_INTERFACE_FCNTL:
|
||||
fcntl.flock(self._fp.fileno(), fcntl.LOCK_UN)
|
||||
elif LOCKFILE_CURRENT_INTERFACE == LOCKFILE_INTERFACE_MSVCRT:
|
||||
msvcrt.locking(self._fp.fileno(), msvcrt.LK_UNLCK, 1)
|
||||
self._fp.close()
|
||||
self._fp = None
|
||||
|
||||
def acquire(self):
|
||||
elapsed = 0
|
||||
while elapsed < self.timeout:
|
||||
try:
|
||||
return self._lock()
|
||||
except LockFileExists:
|
||||
sleep(self.delay)
|
||||
elapsed += self.delay
|
||||
|
||||
raise exception.LockFileTimeoutError()
|
||||
|
||||
def release(self):
|
||||
self._unlock()
|
||||
if exists(self._lock_path):
|
||||
try:
|
||||
remove(self._lock_path)
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
def __enter__(self):
|
||||
self.acquire()
|
||||
|
||||
def __exit__(self, type_, value, traceback):
|
||||
self.release()
|
||||
|
||||
def __del__(self):
|
||||
self.release()
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user