mirror of
https://github.com/platformio/platformio-core.git
synced 2025-12-23 15:18:03 +01:00
Compare commits
970 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
70eedfbeec | ||
|
|
6d5dc60b47 | ||
|
|
129146e82e | ||
|
|
df923bf17e | ||
|
|
8f19dd50fe | ||
|
|
ab1d1f248c | ||
|
|
617f51b9ea | ||
|
|
41432d4075 | ||
|
|
ae964fa729 | ||
|
|
9a5f9843b9 | ||
|
|
44175f87b1 | ||
|
|
2f2cfc2d84 | ||
|
|
d9e908fceb | ||
|
|
98aa47c885 | ||
|
|
c777b0095d | ||
|
|
9191ea97fe | ||
|
|
fedf3162f1 | ||
|
|
89fc77d87a | ||
|
|
2004c9b079 | ||
|
|
5aaa9cf205 | ||
|
|
9b15ec417b | ||
|
|
42540d4207 | ||
|
|
72bfa4a1e2 | ||
|
|
7b8342cd9a | ||
|
|
a206b2e4fd | ||
|
|
6e8ce56206 | ||
|
|
7c2c0ba1aa | ||
|
|
f130b5bfb6 | ||
|
|
bf23d85005 | ||
|
|
903b41b336 | ||
|
|
ab24ca4ff6 | ||
|
|
fd8b603910 | ||
|
|
e4462d7546 | ||
|
|
aa796959c9 | ||
|
|
ff3ce2d69e | ||
|
|
ff59dcefe0 | ||
|
|
3f2f79ade4 | ||
|
|
bc380714bd | ||
|
|
2ba41cddc4 | ||
|
|
4a14cc686c | ||
|
|
734cb5c7aa | ||
|
|
da89f57046 | ||
|
|
4a3b616b0f | ||
|
|
a14f2d291e | ||
|
|
72d260c295 | ||
|
|
e1578dabac | ||
|
|
f2c4ba1895 | ||
|
|
695a850979 | ||
|
|
1d7d518ec5 | ||
|
|
44a926b30a | ||
|
|
735cfbf850 | ||
|
|
6b6c60e82c | ||
|
|
cb7717eaf6 | ||
|
|
28a76eb389 | ||
|
|
0fdfb273c6 | ||
|
|
eced1c4c2a | ||
|
|
7d6192b069 | ||
|
|
23f0ffdfeb | ||
|
|
f7ac71d48e | ||
|
|
9af715e872 | ||
|
|
dae50a32c0 | ||
|
|
ca37190da4 | ||
|
|
7c5c5b5f70 | ||
|
|
83ccf96f36 | ||
|
|
c1f4b729ea | ||
|
|
1c8ac97073 | ||
|
|
fd88a249b4 | ||
|
|
d8329a6868 | ||
|
|
5c48233259 | ||
|
|
5efe0e4f8c | ||
|
|
8f88939aa0 | ||
|
|
a58535d95c | ||
|
|
a4173f5de1 | ||
|
|
dc3973b046 | ||
|
|
7a5af4b180 | ||
|
|
25b562e1c1 | ||
|
|
6dada01e70 | ||
|
|
3956dae01e | ||
|
|
19711d75e0 | ||
|
|
5b1b05cd09 | ||
|
|
3e0feeabb4 | ||
|
|
e21ac05e71 | ||
|
|
4adc73ebe2 | ||
|
|
357e70e5bb | ||
|
|
ca3567df1e | ||
|
|
9bd033e288 | ||
|
|
7564e00fc4 | ||
|
|
f1a8add795 | ||
|
|
d6ca30a920 | ||
|
|
c8f6907d02 | ||
|
|
369868624e | ||
|
|
dfecc04901 | ||
|
|
e9fe2856ec | ||
|
|
0ba9b341cd | ||
|
|
9cff2d3206 | ||
|
|
ab8497e7ce | ||
|
|
f0cd122952 | ||
|
|
b83acf4297 | ||
|
|
89d403879e | ||
|
|
b7ad64226e | ||
|
|
2725d8da8b | ||
|
|
08759700b6 | ||
|
|
7cac351d25 | ||
|
|
f62bde0e38 | ||
|
|
a9577bc0ba | ||
|
|
ee69c13b2d | ||
|
|
3c6f57ac5c | ||
|
|
4d48c365f5 | ||
|
|
a3cda59d70 | ||
|
|
22b5e4e5c0 | ||
|
|
19844c89c1 | ||
|
|
c055ed4850 | ||
|
|
6f905e319f | ||
|
|
389783adae | ||
|
|
46a62de14c | ||
|
|
38c74b3f78 | ||
|
|
92fc308590 | ||
|
|
0f9c213796 | ||
|
|
a6831d9783 | ||
|
|
2ba7c47603 | ||
|
|
786d505ecb | ||
|
|
00c0eaed8a | ||
|
|
46c904e67d | ||
|
|
f9fde5d627 | ||
|
|
75754a4750 | ||
|
|
a584ac1da2 | ||
|
|
2ff88837ec | ||
|
|
4528ca0365 | ||
|
|
bfc94d36e3 | ||
|
|
2fb8128791 | ||
|
|
20c1ce40d3 | ||
|
|
ecaa9d90b3 | ||
|
|
c0b069c920 | ||
|
|
551f0c1514 | ||
|
|
7db04b1c3f | ||
|
|
32dbf22d44 | ||
|
|
3243a84dba | ||
|
|
f465befa68 | ||
|
|
f4b4f5c434 | ||
|
|
ffc94a88fe | ||
|
|
fb29c9c0f6 | ||
|
|
df437995df | ||
|
|
e4440ed94c | ||
|
|
b213a302e3 | ||
|
|
d50dfe19d9 | ||
|
|
9ba5dc0a60 | ||
|
|
5011c3e21c | ||
|
|
e48e15b014 | ||
|
|
357c932a88 | ||
|
|
2f07a58e4f | ||
|
|
4a2594c12e | ||
|
|
8cda6db02d | ||
|
|
f7053928f0 | ||
|
|
e22335984f | ||
|
|
6a1a1956c8 | ||
|
|
98852caefa | ||
|
|
e399c6b363 | ||
|
|
cb2c3b1b63 | ||
|
|
19003ea51b | ||
|
|
29064b6c63 | ||
|
|
ba352454ed | ||
|
|
5ee194b2b9 | ||
|
|
9d566d8905 | ||
|
|
b310c57136 | ||
|
|
9aa5f16b49 | ||
|
|
4fac523811 | ||
|
|
2bb22a86d7 | ||
|
|
39aaae303f | ||
|
|
1310b7b07b | ||
|
|
18f6f23271 | ||
|
|
366efacd81 | ||
|
|
7be1af4241 | ||
|
|
d0bc40bc24 | ||
|
|
73b1d9ccd5 | ||
|
|
94c27ae30f | ||
|
|
b476e298d3 | ||
|
|
c9fa2206ef | ||
|
|
b1caaa2208 | ||
|
|
f46072f769 | ||
|
|
7de4d6aeef | ||
|
|
31f14274af | ||
|
|
50c568c232 | ||
|
|
3bcc3e07ae | ||
|
|
2ae169d210 | ||
|
|
1c68409a08 | ||
|
|
f981916f1d | ||
|
|
0a9031e448 | ||
|
|
2d1daa756d | ||
|
|
6b6860196a | ||
|
|
ccb63a9ecc | ||
|
|
3ce62fbafe | ||
|
|
b77160d363 | ||
|
|
6a04f52620 | ||
|
|
aa28beddd8 | ||
|
|
d0cc3a045e | ||
|
|
02efe4f7f3 | ||
|
|
2c0ca3e437 | ||
|
|
aa8de4ff4b | ||
|
|
59fe190f20 | ||
|
|
e0fc44aa42 | ||
|
|
e7b5a14e11 | ||
|
|
0710c094e7 | ||
|
|
1410dd093a | ||
|
|
d1362c3751 | ||
|
|
2299383b03 | ||
|
|
622e4033c1 | ||
|
|
ec9a2b02ea | ||
|
|
275648a882 | ||
|
|
5214b32ee3 | ||
|
|
c1c2be0b58 | ||
|
|
44fc500c93 | ||
|
|
b6d2e1b243 | ||
|
|
d54327f1a9 | ||
|
|
0f4ab5b50b | ||
|
|
ca34da51aa | ||
|
|
f937eabc1a | ||
|
|
e019341e59 | ||
|
|
50b2bc07dc | ||
|
|
8f7206b186 | ||
|
|
1461953341 | ||
|
|
cd3245960b | ||
|
|
580c0601cf | ||
|
|
979a6a80f0 | ||
|
|
6f9fac5663 | ||
|
|
85730619f4 | ||
|
|
61374f15f1 | ||
|
|
ad52f618cf | ||
|
|
bbb32607ed | ||
|
|
669ef3cc93 | ||
|
|
d47022b8c3 | ||
|
|
c20cd1b464 | ||
|
|
3161e5f606 | ||
|
|
233d48fac0 | ||
|
|
218a1dccf6 | ||
|
|
02bad10652 | ||
|
|
7495160374 | ||
|
|
3663dc3470 | ||
|
|
d2b34d42f7 | ||
|
|
b78a151706 | ||
|
|
6a49df7dfe | ||
|
|
f79e2e38ef | ||
|
|
bc323252e8 | ||
|
|
f63a6d73ee | ||
|
|
5e6d1d9361 | ||
|
|
7e875553c2 | ||
|
|
105cd0fa71 | ||
|
|
8676f471f1 | ||
|
|
93d524a392 | ||
|
|
e5b73dcd2b | ||
|
|
ade6c25056 | ||
|
|
e0ce40d6b3 | ||
|
|
90993ec69f | ||
|
|
3269d243a8 | ||
|
|
ef861ed702 | ||
|
|
b1c9eb9022 | ||
|
|
3d300414ac | ||
|
|
3a16ecbaa1 | ||
|
|
9415b369e1 | ||
|
|
6ec1890f52 | ||
|
|
d1c7f56950 | ||
|
|
2ccb30b0f0 | ||
|
|
e95354afeb | ||
|
|
cfb9ec77ce | ||
|
|
3a52f35fe5 | ||
|
|
ba0e87b978 | ||
|
|
1cb1af3375 | ||
|
|
7c0cd12f80 | ||
|
|
a3457dfca6 | ||
|
|
cdee242333 | ||
|
|
aa0b6c2071 | ||
|
|
be306224e3 | ||
|
|
1fce214a6b | ||
|
|
f1f42c6888 | ||
|
|
1d6dcb1c5a | ||
|
|
6b36a29858 | ||
|
|
18c6fe98ee | ||
|
|
f86885a523 | ||
|
|
0c2f973412 | ||
|
|
591e876660 | ||
|
|
acefc8d276 | ||
|
|
0763a54af3 | ||
|
|
d7f7418812 | ||
|
|
06cce20707 | ||
|
|
b553b8f9df | ||
|
|
8736e7bfb0 | ||
|
|
231bd8b294 | ||
|
|
cc08bb0fd0 | ||
|
|
46cca359e7 | ||
|
|
aac0b29929 | ||
|
|
f7023aa8ff | ||
|
|
904c5464c3 | ||
|
|
31edb2a570 | ||
|
|
d428d18fae | ||
|
|
e7e80ff152 | ||
|
|
1362630ed6 | ||
|
|
e5543b2aee | ||
|
|
53afdc5e02 | ||
|
|
20641bb4ff | ||
|
|
33a05fa7ca | ||
|
|
f358a4ff57 | ||
|
|
aa57924488 | ||
|
|
c5af85f123 | ||
|
|
55b8ff7e74 | ||
|
|
8913f1b1ea | ||
|
|
4360ff7463 | ||
|
|
718d1f2de1 | ||
|
|
195444b253 | ||
|
|
17dc5f594f | ||
|
|
3b99dabbf4 | ||
|
|
c9e578f977 | ||
|
|
00782fc624 | ||
|
|
19d2dfdad0 | ||
|
|
1890162f3f | ||
|
|
4980d3e4bb | ||
|
|
fc53cb4489 | ||
|
|
640aa72cff | ||
|
|
6235328194 | ||
|
|
332472e84b | ||
|
|
59fb4b103f | ||
|
|
8186aed8d9 | ||
|
|
31700c6bfc | ||
|
|
316c2c6e1a | ||
|
|
b6ad672f6a | ||
|
|
59337c71c1 | ||
|
|
7a40992cc1 | ||
|
|
1412f085b8 | ||
|
|
6b826abce0 | ||
|
|
f8dafbca80 | ||
|
|
dabe9ba2a7 | ||
|
|
b8fde283fd | ||
|
|
fa738650da | ||
|
|
717a699546 | ||
|
|
f512ccbe68 | ||
|
|
de523493b2 | ||
|
|
c0b277d9c8 | ||
|
|
e615e7529e | ||
|
|
86667c5664 | ||
|
|
dcb299e9b9 | ||
|
|
2b4b2eb571 | ||
|
|
3caa2a9e8d | ||
|
|
0b5769dc57 | ||
|
|
9b9b05439b | ||
|
|
93d4e68378 | ||
|
|
2c79de971e | ||
|
|
bc18941eb0 | ||
|
|
23ecce297a | ||
|
|
cc646b19bf | ||
|
|
4b08dbd602 | ||
|
|
d822334fdd | ||
|
|
3289b36450 | ||
|
|
affd53eb27 | ||
|
|
06a6822173 | ||
|
|
6380d6c3ea | ||
|
|
24f314d73d | ||
|
|
6cddaf9eb7 | ||
|
|
ec419f3d0e | ||
|
|
a6c84da83a | ||
|
|
7cad113f0a | ||
|
|
712155243c | ||
|
|
2091a33fb9 | ||
|
|
1d5245edbd | ||
|
|
cfb22f2a36 | ||
|
|
16eb41b84e | ||
|
|
ae38d17b7f | ||
|
|
7bbb850c2f | ||
|
|
fda439841e | ||
|
|
c558584640 | ||
|
|
cfb04b31a4 | ||
|
|
1090c414f5 | ||
|
|
5b64bf1f7c | ||
|
|
61eb989edd | ||
|
|
23ae8e0d3e | ||
|
|
e4f8a1877c | ||
|
|
61872dd734 | ||
|
|
16b307d1b3 | ||
|
|
a4770a27f4 | ||
|
|
ba858989f2 | ||
|
|
93c055a2ec | ||
|
|
2b3bc05f2b | ||
|
|
5260217537 | ||
|
|
62235ef32d | ||
|
|
ec40dcada7 | ||
|
|
5e666492c3 | ||
|
|
82246a837e | ||
|
|
2758e99295 | ||
|
|
7354515845 | ||
|
|
d58c392930 | ||
|
|
86cb2efd64 | ||
|
|
b307855207 | ||
|
|
3ad4ff02e8 | ||
|
|
4ef3818482 | ||
|
|
0082dc43a3 | ||
|
|
755ade05c6 | ||
|
|
12e4318de7 | ||
|
|
34e9063ddd | ||
|
|
2315b08909 | ||
|
|
9c9a40a531 | ||
|
|
ac4b485521 | ||
|
|
f13bf35dbd | ||
|
|
012cb85e31 | ||
|
|
62ee8066c2 | ||
|
|
fc064aaf05 | ||
|
|
0516cd74e1 | ||
|
|
5b7a0e6997 | ||
|
|
ad08ed8d12 | ||
|
|
55d4fc23d0 | ||
|
|
e29ecb47a5 | ||
|
|
c96c1f2b2f | ||
|
|
fab4f00ad4 | ||
|
|
adaa3757ac | ||
|
|
2eb7d0e8be | ||
|
|
d16d715898 | ||
|
|
1171cb204c | ||
|
|
d70a90f1d5 | ||
|
|
06574e3066 | ||
|
|
39dfbbdd87 | ||
|
|
7b8c68c934 | ||
|
|
6d81c230a5 | ||
|
|
a6c1869eb2 | ||
|
|
0c3f2b54ed | ||
|
|
c6abdf8206 | ||
|
|
9f2875fcd7 | ||
|
|
78b296f2ec | ||
|
|
903ad6c6f8 | ||
|
|
9441f776cc | ||
|
|
5dd97a35cc | ||
|
|
31814b5122 | ||
|
|
724135f40e | ||
|
|
cbb7db552a | ||
|
|
cdf4639c97 | ||
|
|
170917a927 | ||
|
|
efceb5db72 | ||
|
|
cee2da7448 | ||
|
|
3d8183a2e1 | ||
|
|
25341d1ec7 | ||
|
|
05081561c3 | ||
|
|
abbba7fe2e | ||
|
|
269935726d | ||
|
|
56aeff87dd | ||
|
|
42fb589369 | ||
|
|
bff590e207 | ||
|
|
0933e46a58 | ||
|
|
1a7429a1ef | ||
|
|
fd0b45afdb | ||
|
|
3d67535a9e | ||
|
|
0eb67a7b61 | ||
|
|
75774ad9fa | ||
|
|
7b6ecf4e45 | ||
|
|
ca8bc3819f | ||
|
|
0bd103a46d | ||
|
|
736c6a9a1e | ||
|
|
ef00ecd7f1 | ||
|
|
70b63d8618 | ||
|
|
a632583f89 | ||
|
|
15bb626e78 | ||
|
|
bc0d1f06e0 | ||
|
|
bd611bbee8 | ||
|
|
2522d19453 | ||
|
|
5c4b5c2270 | ||
|
|
2d0ac1a9c2 | ||
|
|
0f37e15b6c | ||
|
|
3d0f0659ae | ||
|
|
02a263fdce | ||
|
|
f7815d6c9b | ||
|
|
786dd8fe18 | ||
|
|
f9b9ed317d | ||
|
|
0394e43ba4 | ||
|
|
3b1fa572fa | ||
|
|
ea99701172 | ||
|
|
e329688954 | ||
|
|
18b10a7fbf | ||
|
|
05069f7ac6 | ||
|
|
1b1453808f | ||
|
|
d0e32ebcb5 | ||
|
|
ad48b85a44 | ||
|
|
75160bb231 | ||
|
|
4b1716e42b | ||
|
|
10d21595c6 | ||
|
|
f922fac9d9 | ||
|
|
5846566bbb | ||
|
|
8f97181ea7 | ||
|
|
3c6bd9824e | ||
|
|
1b0776167c | ||
|
|
f5f3cd85d8 | ||
|
|
79b7974f35 | ||
|
|
32c92eec95 | ||
|
|
7ec90ac23b | ||
|
|
bab8ad088e | ||
|
|
46acad952e | ||
|
|
200cbae177 | ||
|
|
d8ee64a545 | ||
|
|
7b324ebc3c | ||
|
|
02d9272d2a | ||
|
|
9fc5aecb64 | ||
|
|
5ca472050c | ||
|
|
dcab855d2c | ||
|
|
0c9c6d1092 | ||
|
|
301b8dc649 | ||
|
|
547e983a86 | ||
|
|
9b514ba194 | ||
|
|
37a2ccedbd | ||
|
|
a983f60fa0 | ||
|
|
fc96806e68 | ||
|
|
53b37216cc | ||
|
|
d07833e010 | ||
|
|
5b5387d97b | ||
|
|
1c20efe9d1 | ||
|
|
8b4104bf5b | ||
|
|
448e0f27b6 | ||
|
|
02db510048 | ||
|
|
ce9c563c9f | ||
|
|
4c170b7934 | ||
|
|
d1a2dba68c | ||
|
|
5d2867d8a9 | ||
|
|
4504a65b92 | ||
|
|
25f52917ef | ||
|
|
75770bcedf | ||
|
|
1c9fe4561a | ||
|
|
909b773f6d | ||
|
|
0343dc0785 | ||
|
|
b670ab4888 | ||
|
|
f85202d64c | ||
|
|
e5e5ebb7db | ||
|
|
86de58b9e1 | ||
|
|
675cd456b6 | ||
|
|
9fe581e425 | ||
|
|
a0626ac958 | ||
|
|
1fab2a5bec | ||
|
|
d9ae367281 | ||
|
|
837b040761 | ||
|
|
71afea8d80 | ||
|
|
7c9989d999 | ||
|
|
2e2b1fda9c | ||
|
|
25da978fee | ||
|
|
c677f24d8e | ||
|
|
77fe1e8184 | ||
|
|
377008ee08 | ||
|
|
22fb89e56a | ||
|
|
68c56e042c | ||
|
|
16c242e7fa | ||
|
|
c9e1ae2548 | ||
|
|
fc7f1c0728 | ||
|
|
aa1c7609d2 | ||
|
|
998f4ed6e6 | ||
|
|
33242a02ce | ||
|
|
7963ce2cdd | ||
|
|
b4159f9144 | ||
|
|
674aa5c4d8 | ||
|
|
e176e9922a | ||
|
|
fd98aa0ff8 | ||
|
|
f24e97e933 | ||
|
|
a934efa90a | ||
|
|
a0cae2b1a6 | ||
|
|
274c1a40a5 | ||
|
|
059a408e95 | ||
|
|
f4c6919800 | ||
|
|
7301b9e808 | ||
|
|
113746dc74 | ||
|
|
95aaca5e02 | ||
|
|
ed6b196459 | ||
|
|
34ed4678d7 | ||
|
|
a127251107 | ||
|
|
b55b80ecc8 | ||
|
|
68c75735f4 | ||
|
|
9b66abf5ef | ||
|
|
ccd650dda0 | ||
|
|
3afac476e5 | ||
|
|
8f7483cddf | ||
|
|
3b5c73b1a3 | ||
|
|
04ec65df3e | ||
|
|
49244072c4 | ||
|
|
3fe9ea1b01 | ||
|
|
a55ccb2b28 | ||
|
|
01a6ae656f | ||
|
|
2b7b852a68 | ||
|
|
b12c4d171e | ||
|
|
3ce7104542 | ||
|
|
297c173418 | ||
|
|
22733ea110 | ||
|
|
845991f1d3 | ||
|
|
e6faed5dd9 | ||
|
|
44909f9ce8 | ||
|
|
4ff1d640b3 | ||
|
|
743de42484 | ||
|
|
04c381d440 | ||
|
|
bda4b5d264 | ||
|
|
3f13821c43 | ||
|
|
e60e076d16 | ||
|
|
541993c06b | ||
|
|
3d5114655f | ||
|
|
3229933ed0 | ||
|
|
d7d66fd4a6 | ||
|
|
4a7cd5be6a | ||
|
|
fd56e5cec9 | ||
|
|
2cb7fcca9a | ||
|
|
9b2e7bf927 | ||
|
|
d913fb5600 | ||
|
|
6bf42f90ed | ||
|
|
51ff2b65f4 | ||
|
|
c05e1f7c9b | ||
|
|
4700419590 | ||
|
|
5f320cc5c8 | ||
|
|
351aaa3974 | ||
|
|
327d42d02d | ||
|
|
bbcc79208c | ||
|
|
578dfa40aa | ||
|
|
a43c6c6b9e | ||
|
|
5f99dd620d | ||
|
|
e959710d6d | ||
|
|
f0f1d0a61a | ||
|
|
7f63928d21 | ||
|
|
b929e452b0 | ||
|
|
b4f927a84d | ||
|
|
fe4a72edd0 | ||
|
|
5f4a10086f | ||
|
|
d5baa153a5 | ||
|
|
3f96530c32 | ||
|
|
861659e890 | ||
|
|
cd5b88dd1f | ||
|
|
06b49ec3ec | ||
|
|
4944731dc6 | ||
|
|
5d31d6825b | ||
|
|
0099b037f3 | ||
|
|
9994ed8b5e | ||
|
|
954357bdd2 | ||
|
|
c7f5629f82 | ||
|
|
dfe769c92e | ||
|
|
53017b24d1 | ||
|
|
f13537cabb | ||
|
|
16ea8f29a1 | ||
|
|
e708b74507 | ||
|
|
f61c7f6030 | ||
|
|
1260859c42 | ||
|
|
ec9324d77f | ||
|
|
d66b1780ce | ||
|
|
35cab82605 | ||
|
|
12222c0f42 | ||
|
|
12ec11c7e2 | ||
|
|
c926ca389c | ||
|
|
e01c0a1eff | ||
|
|
8008f87ffa | ||
|
|
91d3a8ffad | ||
|
|
309d3a45d6 | ||
|
|
ebe0d41b77 | ||
|
|
6403cf0c8b | ||
|
|
7964aed453 | ||
|
|
97866cf44d | ||
|
|
4eb92ff2e3 | ||
|
|
0e8fb1ba83 | ||
|
|
8741f37831 | ||
|
|
68ccabda56 | ||
|
|
a0cd0bc189 | ||
|
|
aadb186054 | ||
|
|
2c232f2f3f | ||
|
|
f85c894b52 | ||
|
|
fb432da26b | ||
|
|
e8da7b4673 | ||
|
|
53906c49cb | ||
|
|
b774bd6a55 | ||
|
|
cefc2fa21c | ||
|
|
546993b12c | ||
|
|
08ab80187c | ||
|
|
bf48643865 | ||
|
|
35a91dbd57 | ||
|
|
7a56ec614c | ||
|
|
7d2728845e | ||
|
|
94a834ecc4 | ||
|
|
46774466db | ||
|
|
671eff5012 | ||
|
|
a3e7535db5 | ||
|
|
f7ee7e2e8c | ||
|
|
7a04061d6d | ||
|
|
7827994791 | ||
|
|
dae290ad2d | ||
|
|
61fc7d8589 | ||
|
|
0deb623ad5 | ||
|
|
ef8aeeb5f0 | ||
|
|
2695c985a4 | ||
|
|
fcfdc5c206 | ||
|
|
3a96b460df | ||
|
|
78616bf06f | ||
|
|
43ebff2a84 | ||
|
|
7afbbadef1 | ||
|
|
e26372075d | ||
|
|
d0f6c69135 | ||
|
|
b1780c54db | ||
|
|
7cc51035aa | ||
|
|
820efaeb21 | ||
|
|
1ee53137ec | ||
|
|
01afcb1c9e | ||
|
|
574bbd1692 | ||
|
|
0c06982d75 | ||
|
|
8f4c09a600 | ||
|
|
1c5b08de59 | ||
|
|
d8a0272bec | ||
|
|
f6960a0f98 | ||
|
|
2849d78ece | ||
|
|
954ff8dca0 | ||
|
|
93db0fa064 | ||
|
|
839fe8e02f | ||
|
|
60b668342f | ||
|
|
178cf35a43 | ||
|
|
adf30f3640 | ||
|
|
2793059c70 | ||
|
|
6e7de3a01c | ||
|
|
77a14f3c7b | ||
|
|
a9543037b2 | ||
|
|
902b8e0a52 | ||
|
|
613d92c32f | ||
|
|
ebf9607c99 | ||
|
|
89dc767a1c | ||
|
|
cb54910529 | ||
|
|
f78837d467 | ||
|
|
6ad1ce5239 | ||
|
|
707384aeed | ||
|
|
e3b976e189 | ||
|
|
8c7fa61f62 | ||
|
|
0bdb877fe1 | ||
|
|
fddcc3c965 | ||
|
|
8dde7e2efb | ||
|
|
2f40f32988 | ||
|
|
bb0063d5cf | ||
|
|
dcdd552856 | ||
|
|
c4f23be1dc | ||
|
|
571fe4dc04 | ||
|
|
1827223b1c | ||
|
|
b2c37311b9 | ||
|
|
70e4181b17 | ||
|
|
e8d7aae53c | ||
|
|
41312ef86d | ||
|
|
b04fc327c0 | ||
|
|
a37eb9868f | ||
|
|
4d1a135d76 | ||
|
|
45e75f7473 | ||
|
|
8127e8b2ff | ||
|
|
d30b8fffa1 | ||
|
|
52f0e556e2 | ||
|
|
0376a92ebb | ||
|
|
5da5bd43e4 | ||
|
|
e2811a4a28 | ||
|
|
a53a38b5dd | ||
|
|
6502cf5552 | ||
|
|
aed6d9a91b | ||
|
|
ff6353a1ea | ||
|
|
b6f9220f3f | ||
|
|
0c9e6ef577 | ||
|
|
198dadc209 | ||
|
|
f6a3d9f474 | ||
|
|
232a735dde | ||
|
|
2ea9af8151 | ||
|
|
1e14792ea0 | ||
|
|
908f0ba833 | ||
|
|
466d1b1c14 | ||
|
|
68d7630b44 | ||
|
|
641c981c4b | ||
|
|
50ec9e48bf | ||
|
|
801ac28c11 | ||
|
|
7637e1ad69 | ||
|
|
fa24d61680 | ||
|
|
1bd159e60d | ||
|
|
959dab4dc2 | ||
|
|
20086b0816 | ||
|
|
e21b8a841b | ||
|
|
a045469584 | ||
|
|
228db871b4 | ||
|
|
49b2e7303e | ||
|
|
074e262e66 | ||
|
|
021f0323cb | ||
|
|
bb55e5bc58 | ||
|
|
5d6d49f7e1 | ||
|
|
548d0692ba | ||
|
|
5d87fc3461 | ||
|
|
0a254c52c0 | ||
|
|
eafa586fdc | ||
|
|
1e36731076 | ||
|
|
d5d3bb19de | ||
|
|
d367f726ba | ||
|
|
cb241b703a | ||
|
|
c02d180e11 | ||
|
|
90fc207bf2 | ||
|
|
41f1806009 | ||
|
|
6941b822b6 | ||
|
|
5eb2fc67e5 | ||
|
|
c14ba16297 | ||
|
|
861e68ab3c | ||
|
|
4beecd62a8 | ||
|
|
1e2e409e8d | ||
|
|
90cefe4809 | ||
|
|
866b3e915a | ||
|
|
2827e7dc3a | ||
|
|
a8da7dcfd3 | ||
|
|
70df106f57 | ||
|
|
d37c6fcdce | ||
|
|
f3f8374253 | ||
|
|
a5973043b1 | ||
|
|
d3d87a0bfb | ||
|
|
c69269ea3d | ||
|
|
30ff491a34 | ||
|
|
56d4d545c1 | ||
|
|
884859324d | ||
|
|
7d92bcdf58 | ||
|
|
ba545bfa29 | ||
|
|
7c31a9c9b8 | ||
|
|
13cd09d161 | ||
|
|
d55f28e3d7 | ||
|
|
1e5df747cd | ||
|
|
46e82e08ce | ||
|
|
9658bcdb73 | ||
|
|
81c96808b6 | ||
|
|
969e72c4a4 | ||
|
|
3d2df9f9a9 | ||
|
|
384c3c45e4 | ||
|
|
6b0467ead5 | ||
|
|
1344ab5bb6 | ||
|
|
ae3aeeca69 | ||
|
|
e976c617f7 | ||
|
|
a63592894c | ||
|
|
bd4636c98f | ||
|
|
dff3c7d093 | ||
|
|
d0f2aa38ca | ||
|
|
abb2fb7045 | ||
|
|
44be1dc1c7 | ||
|
|
64ed76762e | ||
|
|
f194a1a572 | ||
|
|
c7249aadf3 | ||
|
|
d16fd73b05 | ||
|
|
59b65ba668 | ||
|
|
ba17c57026 | ||
|
|
b5217682fd | ||
|
|
a60792d20e | ||
|
|
f4c9d09020 | ||
|
|
97185fffb8 | ||
|
|
48ed0a508c | ||
|
|
73f4bce99a | ||
|
|
8055c84087 | ||
|
|
7a8aff47e9 | ||
|
|
62e755ce60 | ||
|
|
75e1173f80 | ||
|
|
fe7c93d004 | ||
|
|
d77dea5fe1 | ||
|
|
df226df87d | ||
|
|
dca2e10570 | ||
|
|
aedbda8d7a | ||
|
|
3721a8f039 | ||
|
|
e736b08a49 | ||
|
|
b7a61f12e8 | ||
|
|
ac245ad0a4 | ||
|
|
64eaa1516b | ||
|
|
4e5f34ec19 | ||
|
|
bb124ce681 | ||
|
|
d3679671ac | ||
|
|
571a52b432 | ||
|
|
a20434ace5 | ||
|
|
31715e937b | ||
|
|
f39cda041c | ||
|
|
09e15f6d2a | ||
|
|
23ac02bea1 | ||
|
|
2cd3592a49 | ||
|
|
f8b2902c91 | ||
|
|
36d6421312 | ||
|
|
8c283dc8a0 | ||
|
|
d994da9d53 | ||
|
|
2fe4b7c0ec | ||
|
|
58942c3f38 | ||
|
|
41cea76603 | ||
|
|
a235c532e4 | ||
|
|
5e5c9d3bcf | ||
|
|
93ce65b28b | ||
|
|
3c10e84def | ||
|
|
20d6b2ebbb | ||
|
|
cb927c276c | ||
|
|
3adc3eace3 | ||
|
|
fe1846c2e1 | ||
|
|
cfd1f03023 | ||
|
|
ba750f0eee | ||
|
|
a119fdfc48 | ||
|
|
2e6d66fcd4 | ||
|
|
c0c8368905 | ||
|
|
8f79d865aa | ||
|
|
8f5e23ae95 | ||
|
|
30f698ddf0 | ||
|
|
d4cd3dd600 | ||
|
|
8e0ad02249 | ||
|
|
2f6c594ee6 | ||
|
|
570b72f4c2 | ||
|
|
024d830621 | ||
|
|
a817ec19ed | ||
|
|
bcb265b42d | ||
|
|
2467d5a5d0 | ||
|
|
c098b8bbca | ||
|
|
ea6bf48b3d | ||
|
|
6ede03f880 | ||
|
|
e180f1cf45 | ||
|
|
b3ee14bf55 | ||
|
|
ec88b719f6 | ||
|
|
c03d7bd1c4 | ||
|
|
84d5fbcfe5 | ||
|
|
20d9f7dae7 | ||
|
|
50dc608456 | ||
|
|
3d6dab39ca | ||
|
|
ba58b4ba8a | ||
|
|
cbb46fe7b6 | ||
|
|
d99ae7b1a3 | ||
|
|
b5482db581 | ||
|
|
916f4b071c | ||
|
|
1143012216 | ||
|
|
1d1c677c81 | ||
|
|
34eab69e85 | ||
|
|
c1e14b671c | ||
|
|
8a7d255361 | ||
|
|
a4d8749e44 | ||
|
|
4d566b81d1 | ||
|
|
65db6ce497 | ||
|
|
054b5cca6b | ||
|
|
6123d055f9 | ||
|
|
9cf242ad89 | ||
|
|
162caf61a2 | ||
|
|
d64f4778df | ||
|
|
a874db38be | ||
|
|
92b2782af8 | ||
|
|
682f1cb798 | ||
|
|
d31c09f786 | ||
|
|
5d8a17ba6d | ||
|
|
8314e05a71 | ||
|
|
ce066417e9 | ||
|
|
9405ca3dff | ||
|
|
b5ddf380ca | ||
|
|
06209c17b5 | ||
|
|
ddd07138dd | ||
|
|
20246a3481 | ||
|
|
101ceb538b | ||
|
|
6c111959b4 | ||
|
|
369aff9113 | ||
|
|
b8de4b26b0 | ||
|
|
a9400f5a9c | ||
|
|
fb2f1d3553 | ||
|
|
87f3cbdda7 | ||
|
|
ff5fac251e | ||
|
|
72e94398bf | ||
|
|
1df89525e6 | ||
|
|
82662d0a09 | ||
|
|
e7cc94c27e | ||
|
|
b6e09c5da4 | ||
|
|
b6de719f2b | ||
|
|
6cad7c01a7 | ||
|
|
c504001f04 | ||
|
|
ede581182c | ||
|
|
80f67df3fa | ||
|
|
c7fba32229 | ||
|
|
1a4c5df14d | ||
|
|
7f38c85738 | ||
|
|
2521a2420d | ||
|
|
8c47814d8d | ||
|
|
539ad4b5ee | ||
|
|
99c7473208 | ||
|
|
403da8e22b | ||
|
|
e9f15ba034 | ||
|
|
0d52147005 | ||
|
|
bd7d41ed37 | ||
|
|
8869680302 | ||
|
|
f1e06da156 | ||
|
|
4a6e644b2f | ||
|
|
d2b3ce55e5 | ||
|
|
65e83af982 | ||
|
|
36c0c123d3 | ||
|
|
0286567df8 | ||
|
|
4cf542c6f6 | ||
|
|
7dcddb295e | ||
|
|
ee086ff580 | ||
|
|
a1ed5c6262 |
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
26
.github/ISSUE_TEMPLATE.md
vendored
26
.github/ISSUE_TEMPLATE.md
vendored
@@ -1,20 +1,28 @@
|
||||
What kind of issue is this?
|
||||
|
||||
- [ ] Question. This issue tracker is not the place for questions. If you want to ask how to do
|
||||
something, or to understand why something isn't working the way you expect it to, use
|
||||
our Community Forums https://community.platformio.org
|
||||
- [ ] **Question**.
|
||||
This issue tracker is not the place for questions. If you want to ask how to do something,
|
||||
or to understand why something isn't working the way you expect it to,
|
||||
use [Community Forums](https://community.platformio.org) or [Premium Support](https://platformio.org/support)
|
||||
|
||||
- [ ] PlatformIO IDE. All issues related to PlatformIO IDE should be reported to appropriate repository
|
||||
https://github.com/platformio/platformio-atom-ide/issues
|
||||
- [ ] **PlatformIO IDE**.
|
||||
All issues related to PlatformIO IDE should be reported to appropriate repository:
|
||||
[PlatformIO IDE for Atom](https://github.com/platformio/platformio-atom-ide/issues) or
|
||||
[PlatformIO IDE for VSCode](https://github.com/platformio/platformio-vscode-ide/issues)
|
||||
|
||||
- [ ] Development Platform. All issues related to Development Platform should be reported to appropriate repository. Search it using link below
|
||||
https://github.com/platformio?query=platform-
|
||||
- [ ] **Development Platform or Board**.
|
||||
All issues (building, uploading, adding new boards, etc.) related to PlatformIO development platforms
|
||||
should be reported to appropriate repository related to your hardware
|
||||
https://github.com/topics/platformio-platform
|
||||
|
||||
- [ ] Feature Request. Start by telling us what problem you’re trying to solve. Often a solution
|
||||
- [ ] **Feature Request**.
|
||||
Start by telling us what problem you’re trying to solve. Often a solution
|
||||
already exists! Don’t send pull requests to implement new features without first getting our
|
||||
support. Sometimes we leave features out on purpose to keep the project small.
|
||||
|
||||
- [ ] PlatformIO Core. If you’ve found a bug, please provide an information below.
|
||||
- [ ] **PlatformIO Core**.
|
||||
If you’ve found a bug, please provide an information below.
|
||||
|
||||
|
||||
*You can erase any parts of this template not applicable to your Issue.*
|
||||
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -9,3 +9,4 @@ build
|
||||
coverage.xml
|
||||
.coverage
|
||||
htmlcov
|
||||
.pytest_cache
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
[settings]
|
||||
line_length=79
|
||||
known_third_party=bottle,click,lockfile,pytest,requests,semantic_version,serial,SCons
|
||||
known_third_party=bottle,click,pytest,requests,SCons,semantic_version,serial
|
||||
|
||||
@@ -16,11 +16,12 @@ matrix:
|
||||
env: TOX_ENV=py27
|
||||
- os: osx
|
||||
language: generic
|
||||
env: TOX_ENV=py27
|
||||
env: TOX_ENV=skipexamples
|
||||
|
||||
install:
|
||||
- git submodule update --init --recursive
|
||||
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then sudo pip install -U tox; else pip install -U tox; fi
|
||||
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then curl -fsSL https://bootstrap.pypa.io/get-pip.py | sudo python; fi
|
||||
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then sudo pip install "tox==3.0.0"; else pip install -U tox; fi
|
||||
|
||||
# ChipKIT issue: install 32-bit support for GCC PIC32
|
||||
- if [[ "$TOX_ENV" == "py27" ]] && [[ "$TRAVIS_OS_NAME" == "linux" ]]; then sudo apt-get install libc6-i386; fi
|
||||
@@ -28,10 +29,6 @@ install:
|
||||
script:
|
||||
- tox -e $TOX_ENV
|
||||
|
||||
after_success:
|
||||
- if [[ "$TOX_ENV" == "py27" ]] && [[ "$TRAVIS_OS_NAME" == "linux" ]]; then tox -e coverage; fi
|
||||
- if [[ "$TOX_ENV" == "py27" ]] && [[ "$TRAVIS_OS_NAME" == "linux" ]]; then bash <(curl -s https://codecov.io/bash); fi
|
||||
|
||||
notifications:
|
||||
email: false
|
||||
|
||||
|
||||
15
.vscode/settings.json
vendored
Normal file
15
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"python.pythonPath": "${workspaceRoot}/.tox/develop/bin/python",
|
||||
"python.formatting.provider": "yapf",
|
||||
"files.exclude": {
|
||||
"**/*.pyc": true,
|
||||
"*.egg-info": true,
|
||||
".cache": true,
|
||||
"build": true,
|
||||
"dist": true
|
||||
},
|
||||
"editor.rulers": [79],
|
||||
"restructuredtext.builtDocumentationPath": "${workspaceRoot}/docs/_build/html",
|
||||
"restructuredtext.confPath": "${workspaceRoot}/docs",
|
||||
"restructuredtext.linter.executablePath": "${workspaceRoot}/.tox/docs/bin/restructuredtext-lint"
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
Contributing
|
||||
------------
|
||||
|
||||
To get started, <a href="https://www.clahub.com/agreements/platformio/platformio">sign the Contributor License Agreement</a>.
|
||||
To get started, <a href="https://www.clahub.com/agreements/platformio/platformio-core">sign the Contributor License Agreement</a>.
|
||||
|
||||
1. Fork the repository on GitHub.
|
||||
2. Make a branch off of ``develop``
|
||||
|
||||
890
HISTORY.rst
890
HISTORY.rst
File diff suppressed because it is too large
Load Diff
5
Makefile
5
Makefile
@@ -9,7 +9,10 @@ isort:
|
||||
yapf:
|
||||
yapf --recursive --in-place platformio/
|
||||
|
||||
before-commit: isort yapf lint
|
||||
test:
|
||||
py.test -v -s -n 3 --dist=loadscope tests --ignore tests/test_examples.py --ignore tests/test_pkgmanifest.py
|
||||
|
||||
before-commit: isort yapf lint test
|
||||
|
||||
clean-docs:
|
||||
rm -rf docs/_build
|
||||
|
||||
233
README.rst
233
README.rst
@@ -7,9 +7,6 @@ PlatformIO
|
||||
.. image:: https://ci.appveyor.com/api/projects/status/unnpw0n3c5k14btn/branch/develop?svg=true
|
||||
:target: https://ci.appveyor.com/project/ivankravets/platformio-core
|
||||
:alt: AppVeyor.CI Build Status
|
||||
.. image:: https://requires.io/github/platformio/platformio/requirements.svg?branch=develop
|
||||
:target: https://requires.io/github/platformio/platformio/requirements/?branch=develop
|
||||
:alt: Requirements Status
|
||||
.. image:: https://img.shields.io/pypi/v/platformio.svg
|
||||
:target: https://pypi.python.org/pypi/platformio/
|
||||
:alt: Latest Version
|
||||
@@ -18,171 +15,109 @@ PlatformIO
|
||||
:alt: License
|
||||
.. image:: https://img.shields.io/PlatformIO/Community.png
|
||||
:alt: Community Forums
|
||||
:target: https://community.platformio.org
|
||||
.. image:: https://img.shields.io/PlatformIO/Plus.png?color=orange
|
||||
:alt: PlatformIO Plus: Professional solutions for an awesome open source PlatformIO ecosystem
|
||||
:target: https://pioplus.com
|
||||
:target: https://community.platformio.org?utm_source=github&utm_medium=core
|
||||
.. image:: https://img.shields.io/PIO/Plus.png?color=orange
|
||||
:alt: PIO Plus: Professional solutions for an awesome open source PlatformIO ecosystem
|
||||
:target: https://platformio.org/pricing?utm_source=github&utm_medium=core
|
||||
|
||||
**Quick Links:** `Home Page <http://platformio.org>`_ |
|
||||
`PlatformIO Plus <https://pioplus.com>`_ |
|
||||
`PlatformIO IDE <http://platformio.org/platformio-ide>`_ |
|
||||
`Project Examples <https://github.com/platformio/platformio-examples/tree/develop>`_ |
|
||||
`Docs <http://docs.platformio.org>`_ |
|
||||
`Donate <http://platformio.org/donate>`_ |
|
||||
`Contact Us <https://pioplus.com/contact.html>`_
|
||||
**Quick Links:** `Web <https://platformio.org?utm_source=github&utm_medium=core>`_ |
|
||||
`PIO Plus <https://platformio.org/pricing?utm_source=github&utm_medium=core>`_ |
|
||||
`PlatformIO IDE <https://platformio.org/platformio-ide?utm_source=github&utm_medium=core>`_ |
|
||||
`Project Examples <https://github.com/platformio/platformio-examples/>`_ |
|
||||
`Docs <http://docs.platformio.org?utm_source=github&utm_medium=core>`_ |
|
||||
`Donate <https://platformio.org/donate?utm_source=github&utm_medium=core>`_ |
|
||||
`Contact Us <https://platformio.org/contact?utm_source=github&utm_medium=core>`_
|
||||
|
||||
**Social:** `Twitter <https://twitter.com/PlatformIO_Org>`_ |
|
||||
`LinkedIn <https://www.linkedin.com/company/platformio/>`_ |
|
||||
`Facebook <https://www.facebook.com/platformio>`_ |
|
||||
`Hackaday <https://hackaday.io/project/7980-platformio>`_ |
|
||||
`Bintray <https://bintray.com/platformio>`_ |
|
||||
`Community <https://community.platformio.org>`_
|
||||
`Community <https://community.platformio.org?utm_source=github&utm_medium=core>`_
|
||||
|
||||
.. image:: http://docs.platformio.org/en/stable/_static/platformio-logo.png
|
||||
:target: http://platformio.org
|
||||
.. image:: https://raw.githubusercontent.com/platformio/platformio-web/develop/app/images/platformio-ide-laptop.png
|
||||
:target: https://platformio.org?utm_source=github&utm_medium=core
|
||||
|
||||
`PlatformIO <http://platformio.org>`_ is an open source ecosystem for IoT
|
||||
development. Cross-platform build system and library manager. Continuous and
|
||||
IDE integration. Arduino, ESP8266 and ARM mbed compatible
|
||||
`PlatformIO <https://platformio.org?utm_source=github&utm_medium=core>`_ is an open source ecosystem for IoT
|
||||
development. Cross-platform IDE and unified debugger. Remote unit testing and
|
||||
firmware updates.
|
||||
|
||||
* **PlatformIO IDE** - The next-generation integrated development environment for IoT.
|
||||
C/C++ Intelligent Code Completion and Smart Code Linter for the super-fast coding.
|
||||
Multi-projects workflow with Multiple Panes. Themes Support with dark and light colors.
|
||||
Built-in Terminal with PlatformIO Core tool and support for the powerful Serial Port Monitor.
|
||||
All advanced instruments without leaving your favourite development environment.
|
||||
* **Development Platforms** - Embedded and Desktop development platforms with
|
||||
pre-built toolchains, debuggers, uploaders and frameworks which work under
|
||||
popular host OS: Mac, Windows, Linux (+ARM)
|
||||
* **Embedded Boards** - Rapid Embedded Programming, IDE and Continuous
|
||||
Integration in a few steps with PlatformIO thanks to built-in project
|
||||
generator for the most popular embedded boards and IDE
|
||||
* **Library Manager** - Hundreds Popular Libraries are organized into single
|
||||
Web 2.0 platform: list by categories, keywords, authors, compatible
|
||||
platforms and frameworks; learn via examples; be up-to-date with the latest
|
||||
version.
|
||||
Get Started
|
||||
-----------
|
||||
|
||||
*Atmel AVR & SAM, Espressif, Freescale Kinetis, Intel ARC32, Lattice iCE40,
|
||||
Microchip PIC32, Nordic nRF51, NXP LPC, Silicon Labs EFM32, ST STM32,
|
||||
TI MSP430 & Tiva, Teensy, Arduino, mbed, libOpenCM3, etc.*
|
||||
* `What is PlatformIO? <http://docs.platformio.org/en/latest/what-is-platformio.html?utm_source=github&utm_medium=core>`_
|
||||
|
||||
.. image:: http://docs.platformio.org/en/stable/_static/platformio-demo-wiring.gif
|
||||
:target: http://platformio.org
|
||||
Open Source
|
||||
-----------
|
||||
|
||||
* `PlatformIO Plus and professional solutions <https://pioplus.com>`_
|
||||
* `PlatformIO IDE <http://platformio.org/platformio-ide>`_
|
||||
* `Get Started <http://platformio.org/get-started>`_
|
||||
* `Library Search and Registry <http://platformio.org/lib>`_
|
||||
* `Development Platforms <http://platformio.org/platforms>`_
|
||||
* `Frameworks <http://platformio.org/frameworks>`_
|
||||
* `Embedded Boards Explorer <http://platformio.org/boards>`_
|
||||
* `Library Manager <http://docs.platformio.org/en/stable/librarymanager/index.html>`_
|
||||
* `User Guide <http://docs.platformio.org/en/stable/userguide/index.html>`_
|
||||
* `Continuous Integration <http://docs.platformio.org/en/stable/ci/index.html>`_
|
||||
* `IDE Integration <http://docs.platformio.org/en/stable/ide.html>`_
|
||||
* `Articles about us <http://docs.platformio.org/en/stable/articles.html>`_
|
||||
* `FAQ <http://docs.platformio.org/en/stable/faq.html>`_
|
||||
* `Release Notes <http://docs.platformio.org/en/stable/history.html>`_
|
||||
* `PlatformIO IDE <https://platformio.org/platformio-ide?utm_source=github&utm_medium=core>`_
|
||||
* `PlatformIO Core (CLI) <http://docs.platformio.org/en/latest/core.html?utm_source=github&utm_medium=core>`_
|
||||
* `Library Management <http://docs.platformio.org/page/librarymanager/index.html?utm_source=github&utm_medium=core>`_
|
||||
* `Project Examples <https://github.com/platformio/platformio-examples?utm_source=github&utm_medium=core>`_
|
||||
* `Desktop IDEs Integration <http://docs.platformio.org/page/ide.html?utm_source=github&utm_medium=core>`_
|
||||
* `Continuous Integration <http://docs.platformio.org/page/ci/index.html?utm_source=github&utm_medium=core>`_
|
||||
* `Advanced Scripting API <http://docs.platformio.org/page/projectconf/advanced_scripting.html?utm_source=github&utm_medium=core>`_
|
||||
|
||||
Use whenever. *Run everywhere.*
|
||||
-------------------------------
|
||||
*PlatformIO* is written in pure *Python* and **doesn't depend** on any
|
||||
additional libraries/tools from an operating system. It allows you to use
|
||||
*PlatformIO* beginning from *PC (Mac, Linux, Win)* and ending with credit-card
|
||||
sized computers (`Raspberry Pi <http://www.raspberrypi.org>`_,
|
||||
`BeagleBone <http://beagleboard.org>`_,
|
||||
`CubieBoard <http://cubieboard.org>`_).
|
||||
PIO Plus
|
||||
--------
|
||||
|
||||
Embedded Development. *Easier Than Ever.*
|
||||
-----------------------------------------
|
||||
*PlatformIO* is well suited for embedded development and has pre-configured
|
||||
settings for most popular `Embedded Boards <http://platformio.org/boards>`_.
|
||||
* `PIO Remote <http://docs.platformio.org/page/plus/pio-remote.html?utm_source=github&utm_medium=core>`_
|
||||
* `PIO Unified Debugger <http://docs.platformio.org/page/plus/debugging.html?utm_source=github&utm_medium=core>`_
|
||||
* `PIO Unit Testing <http://docs.platformio.org/en/latest/plus/unit-testing.html?utm_source=github&utm_medium=core>`_
|
||||
* `Cloud IDEs Integration <http://docs.platformio.org/en/latest/ide.html?utm_source=github&utm_medium=core#solution-pio-delivery>`_
|
||||
* `Integration Services <https://platformio.org/pricing?utm_source=github&utm_medium=core#enterprise-features>`_
|
||||
|
||||
* Colourful `command-line output <https://raw.githubusercontent.com/platformio/platformio/develop/examples/platformio-examples.png>`_
|
||||
* `IDE Integration <http://docs.platformio.org/en/stable/ide.html>`_ with
|
||||
*Arduino, Atom, Eclipse, Emacs, Energia, Qt Creator, Sublime Text, Vim, Visual Studio*
|
||||
* Cloud compiling and `Continuous Integration <http://docs.platformio.org/en/stable/ci/index.html>`_
|
||||
with *AppVeyor, Circle CI, Drone, Shippable, Travis CI*
|
||||
* Built-in `Serial Port Monitor <http://docs.platformio.org/en/stable/userguide/cmd_serialports.html#platformio-serialports-monitor>`_ and configurable
|
||||
`build -flags/-options <http://docs.platformio.org/en/stable/projectconf.html#build-flags>`_
|
||||
* Automatic **firmware uploading**
|
||||
* Pre-built tool chains, frameworks for the popular `Hardware Platforms <http://platformio.org/platforms>`_
|
||||
Registry
|
||||
--------
|
||||
|
||||
.. image:: https://raw.githubusercontent.com/platformio/platformio-web/develop/app/images/platformio-embedded-development.png
|
||||
:target: http://platformio.org
|
||||
:alt: PlatformIO Embedded Development Process
|
||||
* `Libraries <https://platformio.org/lib?utm_source=github&utm_medium=core>`_
|
||||
* `Development Platforms <https://platformio.org/platforms?utm_source=github&utm_medium=core>`_
|
||||
* `Frameworks <https://platformio.org/frameworks?utm_source=github&utm_medium=core>`_
|
||||
* `Embedded Boards <https://platformio.org/boards?utm_source=github&utm_medium=core>`_
|
||||
|
||||
The Missing Library Manager. *It's here!*
|
||||
-----------------------------------------
|
||||
*PlatformIO Library Manager* is the missing library manager for development
|
||||
platforms which allows you to organize and have up-to-date external libraries.
|
||||
Development Platforms
|
||||
---------------------
|
||||
|
||||
* Friendly `Command-Line Interface <http://docs.platformio.org/en/stable/librarymanager/index.html>`_
|
||||
* Modern `Web 2.0 Library Search <http://platformio.org/lib>`_
|
||||
* Open Source `Library Registry API <https://github.com/platformio/platformio-api>`_
|
||||
* Library Crawler based on `library.json <http://docs.platformio.org/en/stable/librarymanager/config.html>`_
|
||||
specification
|
||||
* Library **dependency management**
|
||||
* Automatic library updating
|
||||
* `Atmel AVR <https://platformio.org/platforms/atmelavr?utm_source=github&utm_medium=core>`_
|
||||
* `Atmel SAM <https://platformio.org/platforms/atmelsam?utm_source=github&utm_medium=core>`_
|
||||
* `Espressif 32 <https://platformio.org/platforms/espressif32?utm_source=github&utm_medium=core>`_
|
||||
* `Espressif 8266 <https://platformio.org/platforms/espressif8266?utm_source=github&utm_medium=core>`_
|
||||
* `Freescale Kinetis <https://platformio.org/platforms/freescalekinetis?utm_source=github&utm_medium=core>`_
|
||||
* `Infineon XMC <https://platformio.org/platforms/infineonxmc?utm_source=github&utm_medium=core>`_
|
||||
* `Intel ARC32 <https://platformio.org/platforms/intel_arc32?utm_source=github&utm_medium=core>`_
|
||||
* `Lattice iCE40 <https://platformio.org/platforms/lattice_ice40?utm_source=github&utm_medium=core>`_
|
||||
* `Maxim 32 <https://platformio.org/platforms/maxim32?utm_source=github&utm_medium=core>`_
|
||||
* `Microchip PIC32 <https://platformio.org/platforms/microchippic32?utm_source=github&utm_medium=core>`_
|
||||
* `Nordic nRF51 <https://platformio.org/platforms/nordicnrf51?utm_source=github&utm_medium=core>`_
|
||||
* `Nordic nRF52 <https://platformio.org/platforms/nordicnrf52?utm_source=github&utm_medium=core>`_
|
||||
* `NXP LPC <https://platformio.org/platforms/nxplpc?utm_source=github&utm_medium=core>`_
|
||||
* `RISC-V <https://platformio.org/platforms/riscv?utm_source=github&utm_medium=core>`_
|
||||
* `Samsung ARTIK <https://platformio.org/platforms/samsung_artik?utm_source=github&utm_medium=core>`_
|
||||
* `Silicon Labs EFM32 <https://platformio.org/platforms/siliconlabsefm32?utm_source=github&utm_medium=core>`_
|
||||
* `ST STM32 <https://platformio.org/platforms/ststm32?utm_source=github&utm_medium=core>`_
|
||||
* `Teensy <https://platformio.org/platforms/teensy?utm_source=github&utm_medium=core>`_
|
||||
* `TI MSP430 <https://platformio.org/platforms/timsp430?utm_source=github&utm_medium=core>`_
|
||||
* `TI Tiva <https://platformio.org/platforms/titiva?utm_source=github&utm_medium=core>`_
|
||||
* `WIZNet W7500 <https://platformio.org/platforms/wiznet7500?utm_source=github&utm_medium=core>`_
|
||||
|
||||
.. image:: https://raw.githubusercontent.com/platformio/platformio-web/develop/app/images/platformio-library-manager.png
|
||||
:target: http://platformio.org
|
||||
:alt: PlatformIO Library Manager Architecture
|
||||
Frameworks
|
||||
----------
|
||||
|
||||
Smart Build System. *Fast and Reliable.*
|
||||
----------------------------------------
|
||||
*PlatformIO Code Builder* is built-on a next-generation software construction
|
||||
tool named `SCons <http://www.scons.org/>`_. Think of *SCons* as an improved,
|
||||
cross-platform substitute for the classic *Make* utility.
|
||||
|
||||
* Reliable, automatic *dependency analysis*
|
||||
* Reliable detection of *build changes*
|
||||
* Improved support for *parallel builds*
|
||||
* Ability to share *built files in a cache*
|
||||
* Lookup for external libraries which are installed via `Library Manager <http://docs.platformio.org/en/stable/librarymanager/index.html>`_
|
||||
|
||||
.. image:: https://raw.githubusercontent.com/platformio/platformio-web/develop/app/images/platformio-scons-builder.png
|
||||
:target: http://platformio.org
|
||||
:alt: PlatformIO Build System Architecture
|
||||
|
||||
Single source code. *Multiple platforms.*
|
||||
-----------------------------------------
|
||||
*PlatformIO* allows the developer to compile the same code with different
|
||||
development platforms using only *One Command*
|
||||
`platformio run <http://docs.platformio.org/en/stable/userguide/cmd_run.html>`_.
|
||||
This happens due to
|
||||
`Project Configuration File (platformio.ini) <http://docs.platformio.org/en/stable/projectconf.html>`_
|
||||
where you can setup different environments with specific options (platform
|
||||
type, firmware uploading settings, pre-built framework, build flags and many
|
||||
more).
|
||||
|
||||
It has support for the most popular embedded platforms:
|
||||
|
||||
* `Atmel AVR <http://platformio.org/platforms/atmelavr>`_
|
||||
* `Atmel SAM <http://platformio.org/platforms/atmelsam>`_
|
||||
* `Espressif <http://platformio.org/platforms/espressif>`_
|
||||
* `Freescale Kinetis <http://platformio.org/platforms/freescalekinetis>`_
|
||||
* `Intel ARC32 <http://platformio.org/platforms/intel_arc32>`_
|
||||
* `Lattice iCE40 <http://platformio.org/platforms/lattice_ice40>`_
|
||||
* `Microchip PIC32 <http://platformio.org/platforms/microchippic32>`_
|
||||
* `Nordic nRF51 <http://platformio.org/platforms/nordicnrf51>`_
|
||||
* `NXP LPC <http://platformio.org/platforms/nxplpc>`_
|
||||
* `ST STM32 <http://platformio.org/platforms/ststm32>`_
|
||||
* `Silicon Labs EFM32 <http://platformio.org/platforms/siliconlabsefm32>`_
|
||||
* `Teensy <http://platformio.org/platforms/teensy>`_
|
||||
* `TI MSP430 <http://platformio.org/platforms/timsp430>`_
|
||||
* `TI TIVA C <http://platformio.org/platforms/titiva>`_
|
||||
|
||||
Frameworks:
|
||||
|
||||
* `Arduino <http://platformio.org/frameworks/arduino>`_
|
||||
* `CMSIS <http://platformio.org/frameworks/cmsis>`_
|
||||
* `Energia <http://platformio.org/frameworks/energia>`_
|
||||
* `libOpenCM3 <http://platformio.org/frameworks/libopencm3>`_
|
||||
* `mbed <http://platformio.org/frameworks/mbed>`_
|
||||
* `Simba <http://platformio.org/frameworks/simba>`_
|
||||
* `SPL <http://platformio.org/frameworks/spl>`_
|
||||
* `WiringPi <http://platformio.org/frameworks/wiringpi>`_
|
||||
|
||||
For further details, please refer to `What is PlatformIO? <http://docs.platformio.org/en/stable/faq.html#what-is-platformio>`_
|
||||
* `Arduino <https://platformio.org/frameworks/arduino?utm_source=github&utm_medium=core>`_
|
||||
* `ARTIK SDK <https://platformio.org/frameworks/artik-sdk?utm_source=github&utm_medium=core>`_
|
||||
* `CMSIS <https://platformio.org/frameworks/cmsis?utm_source=github&utm_medium=core>`_
|
||||
* `Energia <https://platformio.org/frameworks/energia?utm_source=github&utm_medium=core>`_
|
||||
* `ESP-IDF <https://platformio.org/frameworks/espidf?utm_source=github&utm_medium=core>`_
|
||||
* `ESP8266 Non-OS SDK <https://platformio.org/frameworks/esp8266-nonos-sdk?utm_source=github&utm_medium=core>`_
|
||||
* `ESP8266 RTOS SDK <https://platformio.org/frameworks/esp8266-rtos-sdk?utm_source=github&utm_medium=core>`_
|
||||
* `libOpenCM3 <https://platformio.org/frameworks/libopencm3?utm_source=github&utm_medium=core>`_
|
||||
* `mbed <https://platformio.org/frameworks/mbed?utm_source=github&utm_medium=core>`_
|
||||
* `Pumbaa <https://platformio.org/frameworks/pumbaa?utm_source=github&utm_medium=core>`_
|
||||
* `Simba <https://platformio.org/frameworks/simba?utm_source=github&utm_medium=core>`_
|
||||
* `SPL <https://platformio.org/frameworks/spl?utm_source=github&utm_medium=core>`_
|
||||
* `STM32Cube <https://platformio.org/frameworks/stm32cube?utm_source=github&utm_medium=core>`_
|
||||
* `Tizen RT <https://platformio.org/frameworks/tizenrt?utm_source=github&utm_medium=core>`_
|
||||
* `WiringPi <https://platformio.org/frameworks/wiringpi?utm_source=github&utm_medium=core>`_
|
||||
|
||||
Contributing
|
||||
------------
|
||||
@@ -192,7 +127,7 @@ See `contributing guidelines <https://github.com/platformio/platformio/blob/deve
|
||||
License
|
||||
-------
|
||||
|
||||
Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
|
||||
The PlatformIO is licensed under the permissive Apache 2.0 license,
|
||||
so you can use it in both commercial and personal projects with confidence.
|
||||
|
||||
2
docs
2
docs
Submodule docs updated: fad663db0c...21c1cf522c
2
examples
2
examples
Submodule examples updated: 0ac639a82b...40bdd9e1b4
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -14,15 +14,17 @@
|
||||
|
||||
import sys
|
||||
|
||||
VERSION = (3, 2, 1)
|
||||
VERSION = (3, 6, 0)
|
||||
__version__ = ".".join([str(s) for s in VERSION])
|
||||
|
||||
__title__ = "platformio"
|
||||
__description__ = ("An open source ecosystem for IoT development. "
|
||||
"Cross-platform build system and library manager. "
|
||||
"Continuous and IDE integration. "
|
||||
"Arduino, ESP8266 and ARM mbed compatible")
|
||||
__url__ = "http://platformio.org"
|
||||
__description__ = (
|
||||
"An open source ecosystem for IoT development. "
|
||||
"Cross-platform IDE and unified debugger. "
|
||||
"Remote unit testing and firmware updates. "
|
||||
"Arduino, ARM mbed, Espressif (ESP8266/ESP32), STM32, PIC32, nRF51/nRF52, "
|
||||
"FPGA, CMSIS, SPL, AVR, Samsung ARTIK, libOpenCM3")
|
||||
__url__ = "https://platformio.org"
|
||||
|
||||
__author__ = "Ivan Kravets"
|
||||
__email__ = "me@ikravets.com"
|
||||
@@ -33,7 +35,8 @@ __copyright__ = "Copyright 2014-present PlatformIO"
|
||||
__apiurl__ = "https://api.platformio.org"
|
||||
|
||||
if sys.version_info < (2, 7, 0) or sys.version_info >= (3, 0, 0):
|
||||
msg = ("PlatformIO version %s does not run under Python version %s.\n"
|
||||
msg = ("PlatformIO Core v%s does not run under Python version %s.\n"
|
||||
"Minimum supported version is 2.7, please upgrade Python.\n"
|
||||
"Python 3 is not yet supported.\n")
|
||||
sys.stderr.write(msg % (__version__, sys.version.split()[0]))
|
||||
sys.stderr.write(msg % (__version__, sys.version))
|
||||
sys.exit(1)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -12,14 +12,13 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from os import getenv, listdir
|
||||
import os
|
||||
import sys
|
||||
from os.path import join
|
||||
from platform import system
|
||||
from sys import exit as sys_exit
|
||||
from traceback import format_exc
|
||||
|
||||
import click
|
||||
import requests
|
||||
|
||||
from platformio import __version__, exception, maintenance
|
||||
from platformio.util import get_source_dir
|
||||
@@ -29,7 +28,7 @@ class PlatformioCLI(click.MultiCommand): # pylint: disable=R0904
|
||||
|
||||
def list_commands(self, ctx):
|
||||
cmds = []
|
||||
for filename in listdir(join(get_source_dir(), "commands")):
|
||||
for filename in os.listdir(join(get_source_dir(), "commands")):
|
||||
if filename.startswith("__init__"):
|
||||
continue
|
||||
if filename.endswith(".py"):
|
||||
@@ -37,16 +36,16 @@ class PlatformioCLI(click.MultiCommand): # pylint: disable=R0904
|
||||
cmds.sort()
|
||||
return cmds
|
||||
|
||||
def get_command(self, ctx, name):
|
||||
def get_command(self, ctx, cmd_name):
|
||||
mod = None
|
||||
try:
|
||||
mod = __import__("platformio.commands." + name, None, None,
|
||||
mod = __import__("platformio.commands." + cmd_name, None, None,
|
||||
["cli"])
|
||||
except ImportError:
|
||||
try:
|
||||
return self._handle_obsolate_command(name)
|
||||
return self._handle_obsolate_command(cmd_name)
|
||||
except AttributeError:
|
||||
raise click.UsageError('No such command "%s"' % name, ctx)
|
||||
raise click.UsageError('No such command "%s"' % cmd_name, ctx)
|
||||
return mod.cli
|
||||
|
||||
@staticmethod
|
||||
@@ -81,30 +80,45 @@ def process_result(ctx, result, force, caller): # pylint: disable=W0613
|
||||
maintenance.on_platformio_end(ctx, result)
|
||||
|
||||
|
||||
def configure():
|
||||
if "cygwin" in system().lower():
|
||||
raise exception.CygwinEnvDetected()
|
||||
|
||||
# https://urllib3.readthedocs.org
|
||||
# /en/latest/security.html#insecureplatformwarning
|
||||
try:
|
||||
import urllib3
|
||||
urllib3.disable_warnings()
|
||||
except (AttributeError, ImportError):
|
||||
pass
|
||||
|
||||
# handle PLATFORMIO_FORCE_COLOR
|
||||
if str(os.getenv("PLATFORMIO_FORCE_COLOR", "")).lower() == "true":
|
||||
try:
|
||||
# pylint: disable=protected-access
|
||||
click._compat.isatty = lambda stream: True
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
# Handle IOError issue with VSCode's Terminal (Windows)
|
||||
click_echo_origin = [click.echo, click.secho]
|
||||
|
||||
def _safe_echo(origin, *args, **kwargs):
|
||||
try:
|
||||
click_echo_origin[origin](*args, **kwargs)
|
||||
except IOError:
|
||||
(sys.stderr.write if kwargs.get("err") else
|
||||
sys.stdout.write)("%s\n" % (args[0] if args else ""))
|
||||
|
||||
click.echo = lambda *args, **kwargs: _safe_echo(0, *args, **kwargs)
|
||||
click.secho = lambda *args, **kwargs: _safe_echo(1, *args, **kwargs)
|
||||
|
||||
|
||||
def main():
|
||||
try:
|
||||
if "cygwin" in system().lower():
|
||||
raise exception.CygwinEnvDetected()
|
||||
|
||||
# https://urllib3.readthedocs.org
|
||||
# /en/latest/security.html#insecureplatformwarning
|
||||
try:
|
||||
requests.packages.urllib3.disable_warnings()
|
||||
except AttributeError:
|
||||
raise exception.PlatformioException(
|
||||
"Invalid installation of Python `requests` package`. See "
|
||||
"< https://github.com/platformio/platformio-core/issues/252 >")
|
||||
|
||||
# handle PLATFORMIO_FORCE_COLOR
|
||||
if str(getenv("PLATFORMIO_FORCE_COLOR", "")).lower() == "true":
|
||||
try:
|
||||
# pylint: disable=protected-access
|
||||
click._compat.isatty = lambda stream: True
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
configure()
|
||||
cli(None, None, None)
|
||||
except Exception as e: # pylint: disable=W0703
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
if not isinstance(e, exception.ReturnErrorCode):
|
||||
maintenance.on_platformio_exception(e)
|
||||
error_str = "Error: "
|
||||
@@ -133,5 +147,10 @@ An unexpected error occurred. Further steps:
|
||||
return 0
|
||||
|
||||
|
||||
def debug_gdb_main():
|
||||
sys.argv = [sys.argv[0], "debug", "--interface", "gdb"] + sys.argv[1:]
|
||||
return main()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys_exit(main())
|
||||
sys.exit(main())
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -12,22 +12,39 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import codecs
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import uuid
|
||||
from copy import deepcopy
|
||||
from os import environ, getenv, listdir, remove
|
||||
from os.path import dirname, getmtime, isdir, isfile, join
|
||||
from os.path import abspath, dirname, expanduser, isdir, isfile, join
|
||||
from time import time
|
||||
|
||||
import requests
|
||||
from lockfile import LockFailed, LockFile
|
||||
|
||||
from platformio import __version__, exception, util
|
||||
from platformio.exception import InvalidSettingName, InvalidSettingValue
|
||||
from platformio import exception, lockfile, util
|
||||
|
||||
|
||||
def projects_dir_validate(projects_dir):
|
||||
assert isdir(projects_dir)
|
||||
return abspath(projects_dir)
|
||||
|
||||
|
||||
DEFAULT_SETTINGS = {
|
||||
"auto_update_libraries": {
|
||||
"description": "Automatically update libraries (Yes/No)",
|
||||
"value": False
|
||||
},
|
||||
"auto_update_platforms": {
|
||||
"description": "Automatically update platforms (Yes/No)",
|
||||
"value": False
|
||||
},
|
||||
"check_libraries_interval": {
|
||||
"description": "Check for the library updates interval (days)",
|
||||
"value": 7
|
||||
},
|
||||
"check_platformio_interval": {
|
||||
"description": "Check for the new PlatformIO interval (days)",
|
||||
"value": 3
|
||||
@@ -36,36 +53,30 @@ DEFAULT_SETTINGS = {
|
||||
"description": "Check for the platform updates interval (days)",
|
||||
"value": 7
|
||||
},
|
||||
"check_libraries_interval": {
|
||||
"description": "Check for the library updates interval (days)",
|
||||
"value": 7
|
||||
},
|
||||
"auto_update_platforms": {
|
||||
"description": "Automatically update platforms (Yes/No)",
|
||||
"value": False
|
||||
},
|
||||
"auto_update_libraries": {
|
||||
"description": "Automatically update libraries (Yes/No)",
|
||||
"value": False
|
||||
},
|
||||
"force_verbose": {
|
||||
"description": "Force verbose output when processing environments",
|
||||
"value": False
|
||||
"enable_cache": {
|
||||
"description": "Enable caching for API requests and Library Manager",
|
||||
"value": True
|
||||
},
|
||||
"enable_ssl": {
|
||||
"description": "Enable SSL for PlatformIO Services",
|
||||
"value": False
|
||||
},
|
||||
"enable_cache": {
|
||||
"description": "Enable caching for API requests and Library Manager",
|
||||
"value": True
|
||||
},
|
||||
"enable_telemetry": {
|
||||
"description":
|
||||
("Telemetry service <http://docs.platformio.org/page/"
|
||||
"userguide/cmd_settings.html?#enable-telemetry> (Yes/No)"),
|
||||
"value": True
|
||||
}
|
||||
"value":
|
||||
True
|
||||
},
|
||||
"force_verbose": {
|
||||
"description": "Force verbose output when processing environments",
|
||||
"value": False
|
||||
},
|
||||
"projects_dir": {
|
||||
"description": "Default location for PlatformIO projects (PIO Home)",
|
||||
"value": join(expanduser("~"), "Documents", "PlatformIO", "Projects"),
|
||||
"validator": projects_dir_validate
|
||||
},
|
||||
}
|
||||
|
||||
SESSION_VARS = {"command_ctx": None, "force_option": False, "caller_id": None}
|
||||
@@ -87,44 +98,36 @@ class State(object):
|
||||
self._lock_state_file()
|
||||
if isfile(self.path):
|
||||
self._state = util.load_json(self.path)
|
||||
except ValueError:
|
||||
except exception.PlatformioException:
|
||||
self._state = {}
|
||||
self._prev_state = deepcopy(self._state)
|
||||
return self._state
|
||||
|
||||
def __exit__(self, type_, value, traceback):
|
||||
if self._prev_state != self._state:
|
||||
with open(self.path, "w") as fp:
|
||||
if "dev" in __version__:
|
||||
json.dump(self._state, fp, indent=4)
|
||||
else:
|
||||
try:
|
||||
with codecs.open(self.path, "w", encoding="utf8") as fp:
|
||||
json.dump(self._state, fp)
|
||||
except IOError:
|
||||
raise exception.HomeDirPermissionsError(util.get_home_dir())
|
||||
self._unlock_state_file()
|
||||
|
||||
def _lock_state_file(self):
|
||||
if not self.lock:
|
||||
return
|
||||
self._lockfile = LockFile(self.path)
|
||||
|
||||
if self._lockfile.is_locked() and \
|
||||
(time() - getmtime(self._lockfile.lock_file)) > 10:
|
||||
self._lockfile.break_lock()
|
||||
|
||||
self._lockfile = lockfile.LockFile(self.path)
|
||||
try:
|
||||
self._lockfile.acquire()
|
||||
except LockFailed:
|
||||
raise exception.PlatformioException(
|
||||
"The directory `{0}` or its parent directory is not owned by "
|
||||
"the current user and PlatformIO can not store configuration "
|
||||
"data. \nPlease check the permissions and owner of that "
|
||||
"directory. Otherwise, please remove manually `{0}` "
|
||||
"directory and PlatformIO will create new from the current "
|
||||
"user.".format(dirname(self.path)))
|
||||
except IOError:
|
||||
raise exception.HomeDirPermissionsError(dirname(self.path))
|
||||
|
||||
def _unlock_state_file(self):
|
||||
if self._lockfile:
|
||||
self._lockfile.release()
|
||||
|
||||
def __del__(self):
|
||||
self._unlock_state_file()
|
||||
|
||||
|
||||
class ContentCache(object):
|
||||
|
||||
@@ -133,53 +136,23 @@ class ContentCache(object):
|
||||
self._db_path = None
|
||||
self._lockfile = None
|
||||
|
||||
if not get_setting("enable_cache"):
|
||||
return
|
||||
|
||||
self.cache_dir = cache_dir or join(util.get_home_dir(), ".cache")
|
||||
if not self.cache_dir:
|
||||
os.makedirs(self.cache_dir)
|
||||
self.cache_dir = cache_dir or util.get_cache_dir()
|
||||
self._db_path = join(self.cache_dir, "db.data")
|
||||
|
||||
def __enter__(self):
|
||||
if not self._db_path or not isfile(self._db_path):
|
||||
return self
|
||||
found = False
|
||||
newlines = []
|
||||
with open(self._db_path) as fp:
|
||||
for line in fp.readlines():
|
||||
if "=" not in line:
|
||||
continue
|
||||
line = line.strip()
|
||||
expire, path = line.split("=")
|
||||
if time() < int(expire):
|
||||
newlines.append(line)
|
||||
continue
|
||||
found = True
|
||||
if isfile(path):
|
||||
remove(path)
|
||||
if not len(listdir(dirname(path))):
|
||||
util.rmtree_(dirname(path))
|
||||
|
||||
if found and self._lock_dbindex():
|
||||
with open(self._db_path, "w") as fp:
|
||||
fp.write("\n".join(newlines) + "\n")
|
||||
self._unlock_dbindex()
|
||||
|
||||
self.delete()
|
||||
return self
|
||||
|
||||
def __exit__(self, type_, value, traceback):
|
||||
pass
|
||||
|
||||
def _lock_dbindex(self):
|
||||
self._lockfile = LockFile(self.cache_dir)
|
||||
if self._lockfile.is_locked() and \
|
||||
(time() - getmtime(self._lockfile.lock_file)) > 10:
|
||||
self._lockfile.break_lock()
|
||||
|
||||
if not self.cache_dir:
|
||||
os.makedirs(self.cache_dir)
|
||||
self._lockfile = lockfile.LockFile(self.cache_dir)
|
||||
try:
|
||||
self._lockfile.acquire()
|
||||
except LockFailed:
|
||||
except: # pylint: disable=bare-except
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -187,6 +160,7 @@ class ContentCache(object):
|
||||
def _unlock_dbindex(self):
|
||||
if self._lockfile:
|
||||
self._lockfile.release()
|
||||
return True
|
||||
|
||||
def get_cache_path(self, key):
|
||||
assert len(key) > 3
|
||||
@@ -200,63 +174,108 @@ class ContentCache(object):
|
||||
return h.hexdigest()
|
||||
|
||||
def get(self, key):
|
||||
if not self.cache_dir:
|
||||
return None
|
||||
cache_path = self.get_cache_path(key)
|
||||
if not isfile(cache_path):
|
||||
return None
|
||||
with open(cache_path, "rb") as fp:
|
||||
data = fp.read()
|
||||
if data[0] in ("{", "["):
|
||||
return json.loads(data)
|
||||
return data
|
||||
with codecs.open(cache_path, "rb", encoding="utf8") as fp:
|
||||
return fp.read()
|
||||
|
||||
def set(self, key, data, valid):
|
||||
if not self.cache_dir or not data:
|
||||
return
|
||||
if not get_setting("enable_cache"):
|
||||
return False
|
||||
cache_path = self.get_cache_path(key)
|
||||
if isfile(cache_path):
|
||||
self.delete(key)
|
||||
if not data:
|
||||
return False
|
||||
if not isdir(self.cache_dir):
|
||||
os.makedirs(self.cache_dir)
|
||||
tdmap = {"s": 1, "m": 60, "h": 3600, "d": 86400}
|
||||
assert valid.endswith(tuple(tdmap.keys()))
|
||||
cache_path = self.get_cache_path(key)
|
||||
expire_time = int(time() + tdmap[valid[-1]] * int(valid[:-1]))
|
||||
|
||||
if not self._lock_dbindex():
|
||||
return False
|
||||
with open(self._db_path, "a") as fp:
|
||||
fp.write("%s=%s\n" % (str(expire_time), cache_path))
|
||||
self._unlock_dbindex()
|
||||
|
||||
if not isdir(dirname(cache_path)):
|
||||
os.makedirs(dirname(cache_path))
|
||||
with open(cache_path, "wb") as fp:
|
||||
if isinstance(data, dict) or isinstance(data, list):
|
||||
json.dump(data, fp)
|
||||
else:
|
||||
fp.write(str(data))
|
||||
try:
|
||||
with codecs.open(cache_path, "wb", encoding="utf8") as fp:
|
||||
fp.write(data)
|
||||
with open(self._db_path, "a") as fp:
|
||||
fp.write("%s=%s\n" % (str(expire_time), cache_path))
|
||||
except UnicodeError:
|
||||
if isfile(cache_path):
|
||||
try:
|
||||
remove(cache_path)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
return self._unlock_dbindex()
|
||||
|
||||
def delete(self, keys=None):
|
||||
""" Keys=None, delete expired items """
|
||||
if not isfile(self._db_path):
|
||||
return None
|
||||
if not keys:
|
||||
keys = []
|
||||
if not isinstance(keys, list):
|
||||
keys = [keys]
|
||||
paths_for_delete = [self.get_cache_path(k) for k in keys]
|
||||
found = False
|
||||
newlines = []
|
||||
with open(self._db_path) as fp:
|
||||
for line in fp.readlines():
|
||||
if "=" not in line:
|
||||
continue
|
||||
line = line.strip()
|
||||
expire, path = line.split("=")
|
||||
if time() < int(expire) and isfile(path) and \
|
||||
path not in paths_for_delete:
|
||||
newlines.append(line)
|
||||
continue
|
||||
found = True
|
||||
if isfile(path):
|
||||
try:
|
||||
remove(path)
|
||||
if not listdir(dirname(path)):
|
||||
util.rmtree_(dirname(path))
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
if found and self._lock_dbindex():
|
||||
with open(self._db_path, "w") as fp:
|
||||
fp.write("\n".join(newlines) + "\n")
|
||||
self._unlock_dbindex()
|
||||
|
||||
return True
|
||||
|
||||
def clean(self):
|
||||
if self.cache_dir and isdir(self.cache_dir):
|
||||
util.rmtree_(self.cache_dir)
|
||||
if not self.cache_dir or not isdir(self.cache_dir):
|
||||
return
|
||||
util.rmtree_(self.cache_dir)
|
||||
|
||||
|
||||
def clean_cache():
|
||||
with ContentCache() as cc:
|
||||
cc.clean()
|
||||
|
||||
|
||||
def sanitize_setting(name, value):
|
||||
if name not in DEFAULT_SETTINGS:
|
||||
raise InvalidSettingName(name)
|
||||
raise exception.InvalidSettingName(name)
|
||||
|
||||
defdata = DEFAULT_SETTINGS[name]
|
||||
try:
|
||||
if "validator" in defdata:
|
||||
value = defdata['validator']()
|
||||
value = defdata['validator'](value)
|
||||
elif isinstance(defdata['value'], bool):
|
||||
if not isinstance(value, bool):
|
||||
value = str(value).lower() in ("true", "yes", "y", "1")
|
||||
elif isinstance(defdata['value'], int):
|
||||
value = int(value)
|
||||
except Exception:
|
||||
raise InvalidSettingValue(value, name)
|
||||
raise exception.InvalidSettingValue(value, name)
|
||||
return value
|
||||
|
||||
|
||||
@@ -270,6 +289,12 @@ def set_state_item(name, value):
|
||||
data[name] = value
|
||||
|
||||
|
||||
def delete_state_item(name):
|
||||
with State(lock=True) as data:
|
||||
if name in data:
|
||||
del data[name]
|
||||
|
||||
|
||||
def get_setting(name):
|
||||
_env_name = "PLATFORMIO_SETTING_%s" % name.upper()
|
||||
if _env_name in environ:
|
||||
@@ -306,7 +331,8 @@ def set_session_var(name, value):
|
||||
|
||||
def is_disabled_progressbar():
|
||||
return any([
|
||||
get_session_var("force_option"), util.is_ci(),
|
||||
get_session_var("force_option"),
|
||||
util.is_ci(),
|
||||
getenv("PLATFORMIO_DISABLE_PROGRESSBAR") == "true"
|
||||
])
|
||||
|
||||
@@ -325,7 +351,9 @@ def get_cid():
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
cid = str(
|
||||
uuid.UUID(bytes=hashlib.md5(
|
||||
str(_uid if _uid else uuid.getnode())).digest()))
|
||||
set_state_item("cid", cid)
|
||||
uuid.UUID(
|
||||
bytes=hashlib.md5(str(_uid if _uid else uuid.getnode()))
|
||||
.digest()))
|
||||
if "windows" in util.get_systype() or os.getuid() > 0:
|
||||
set_state_item("cid", cid)
|
||||
return cid
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -14,12 +14,13 @@
|
||||
|
||||
import base64
|
||||
import json
|
||||
import sys
|
||||
from os import environ
|
||||
from os.path import join
|
||||
from os.path import expanduser, join
|
||||
from time import time
|
||||
|
||||
from SCons.Script import (ARGUMENTS, COMMAND_LINE_TARGETS, DEFAULT_TARGETS,
|
||||
AllowSubstExceptions, AlwaysBuild,
|
||||
AllowSubstExceptions, AlwaysBuild, Default,
|
||||
DefaultEnvironment, Variables)
|
||||
|
||||
from platformio import util
|
||||
@@ -31,7 +32,7 @@ commonvars = Variables(None)
|
||||
commonvars.AddVariables(
|
||||
("PLATFORM_MANIFEST",),
|
||||
("BUILD_SCRIPT",),
|
||||
("EXTRA_SCRIPT",),
|
||||
("EXTRA_SCRIPTS",),
|
||||
("PIOENV",),
|
||||
("PIOTEST",),
|
||||
("PIOPLATFORM",),
|
||||
@@ -49,26 +50,43 @@ commonvars.AddVariables(
|
||||
("LIB_DEPS",),
|
||||
("LIB_IGNORE",),
|
||||
("LIB_EXTRA_DIRS",),
|
||||
("LIB_ARCHIVE",),
|
||||
|
||||
# board options
|
||||
("BOARD",),
|
||||
# deprecated options, use board_{object.path} instead
|
||||
("BOARD_MCU",),
|
||||
("BOARD_F_CPU",),
|
||||
("BOARD_F_FLASH",),
|
||||
("BOARD_FLASH_MODE",),
|
||||
# end of deprecated options
|
||||
|
||||
# upload options
|
||||
("UPLOAD_PORT",),
|
||||
("UPLOAD_PROTOCOL",),
|
||||
("UPLOAD_SPEED",),
|
||||
("UPLOAD_FLAGS",),
|
||||
("UPLOAD_RESETMETHOD",)
|
||||
("UPLOAD_RESETMETHOD",),
|
||||
|
||||
# test options
|
||||
("TEST_BUILD_PROJECT_SRC",),
|
||||
|
||||
# debug options
|
||||
("DEBUG_TOOL",),
|
||||
("DEBUG_SVD_PATH",),
|
||||
|
||||
) # yapf: disable
|
||||
|
||||
MULTILINE_VARS = [
|
||||
"EXTRA_SCRIPTS", "PIOFRAMEWORK", "BUILD_FLAGS", "SRC_BUILD_FLAGS",
|
||||
"BUILD_UNFLAGS", "UPLOAD_FLAGS", "SRC_FILTER", "LIB_DEPS", "LIB_IGNORE",
|
||||
"LIB_EXTRA_DIRS"
|
||||
]
|
||||
|
||||
DEFAULT_ENV_OPTIONS = dict(
|
||||
tools=[
|
||||
"ar", "as", "gcc", "g++", "gnulink", "platformio", "pioplatform",
|
||||
"piowinhooks", "piolib", "piotest", "pioupload", "piomisc"
|
||||
"ar", "gas", "gcc", "g++", "gnulink", "platformio", "pioplatform",
|
||||
"piowinhooks", "piolib", "pioupload", "piomisc", "pioide"
|
||||
], # yapf: disable
|
||||
toolpath=[join(util.get_source_dir(), "builder", "tools")],
|
||||
variables=commonvars,
|
||||
@@ -77,20 +95,24 @@ DEFAULT_ENV_OPTIONS = dict(
|
||||
PIOVARIABLES=commonvars.keys(),
|
||||
ENV=environ,
|
||||
UNIX_TIME=int(time()),
|
||||
PROGNAME="program",
|
||||
PIOHOME_DIR=util.get_home_dir(),
|
||||
PROJECT_DIR=util.get_project_dir(),
|
||||
PROJECTINCLUDE_DIR=util.get_projectinclude_dir(),
|
||||
PROJECTSRC_DIR=util.get_projectsrc_dir(),
|
||||
PROJECTTEST_DIR=util.get_projecttest_dir(),
|
||||
PROJECTDATA_DIR=util.get_projectdata_dir(),
|
||||
PROJECTPIOENVS_DIR=util.get_projectpioenvs_dir(),
|
||||
BUILD_DIR=join("$PROJECTPIOENVS_DIR", "$PIOENV"),
|
||||
PROJECTBUILD_DIR=util.get_projectbuild_dir(),
|
||||
BUILD_DIR=join("$PROJECTBUILD_DIR", "$PIOENV"),
|
||||
BUILDSRC_DIR=join("$BUILD_DIR", "src"),
|
||||
BUILDTEST_DIR=join("$BUILD_DIR", "test"),
|
||||
LIBPATH=["$BUILD_DIR"],
|
||||
LIBSOURCE_DIRS=[
|
||||
util.get_projectlib_dir(), util.get_projectlibdeps_dir(),
|
||||
util.get_projectlib_dir(),
|
||||
util.get_projectlibdeps_dir(),
|
||||
join("$PIOHOME_DIR", "lib")
|
||||
],
|
||||
PROGNAME="program",
|
||||
PROG_PATH=join("$BUILD_DIR", "$PROGNAME$PROGSUFFIX"),
|
||||
PYTHONEXE=util.get_pythonexe_path())
|
||||
|
||||
if not int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
@@ -106,6 +128,8 @@ env = DefaultEnvironment(**DEFAULT_ENV_OPTIONS)
|
||||
for k in commonvars.keys():
|
||||
if k in env:
|
||||
env[k] = base64.b64decode(env[k])
|
||||
if k in MULTILINE_VARS:
|
||||
env[k] = util.parse_conf_multi_values(env[k])
|
||||
|
||||
if env.GetOption('clean'):
|
||||
env.PioClean(env.subst("$BUILD_DIR"))
|
||||
@@ -114,45 +138,77 @@ elif not int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
print "Verbose mode can be enabled via `-v, --verbose` option"
|
||||
|
||||
# Handle custom variables from system environment
|
||||
for var in ("BUILD_FLAGS", "SRC_BUILD_FLAGS", "SRC_FILTER", "EXTRA_SCRIPT",
|
||||
for var in ("BUILD_FLAGS", "SRC_BUILD_FLAGS", "SRC_FILTER", "EXTRA_SCRIPTS",
|
||||
"UPLOAD_PORT", "UPLOAD_FLAGS", "LIB_EXTRA_DIRS"):
|
||||
k = "PLATFORMIO_%s" % var
|
||||
if environ.get(k):
|
||||
env[var] = environ.get(k)
|
||||
|
||||
# Parse comma separated items
|
||||
for opt in ("PIOFRAMEWORK", "LIB_DEPS", "LIB_IGNORE", "LIB_EXTRA_DIRS"):
|
||||
if opt not in env:
|
||||
if k not in environ:
|
||||
continue
|
||||
env[opt] = [l.strip() for l in env[opt].split(", ") if l.strip()]
|
||||
if var in ("UPLOAD_PORT", ):
|
||||
env[var] = environ.get(k)
|
||||
continue
|
||||
env.Append(**{var: util.parse_conf_multi_values(environ.get(k))})
|
||||
|
||||
# Configure extra library source directories for LDF
|
||||
if util.get_project_optional_dir("lib_extra_dirs"):
|
||||
env.Prepend(LIBSOURCE_DIRS=[
|
||||
l.strip()
|
||||
for l in util.get_project_optional_dir("lib_extra_dirs").split(", ")
|
||||
if l.strip()
|
||||
])
|
||||
env.Prepend(
|
||||
LIBSOURCE_DIRS=util.parse_conf_multi_values(
|
||||
util.get_project_optional_dir("lib_extra_dirs")))
|
||||
env.Prepend(LIBSOURCE_DIRS=env.get("LIB_EXTRA_DIRS", []))
|
||||
env['LIBSOURCE_DIRS'] = [
|
||||
expanduser(d) if d.startswith("~") else d for d in env['LIBSOURCE_DIRS']
|
||||
]
|
||||
|
||||
env.LoadPioPlatform(commonvars)
|
||||
|
||||
env.SConscriptChdir(0)
|
||||
env.SConsignFile(join("$PROJECTPIOENVS_DIR", ".sconsign.dblite"))
|
||||
env.SConsignFile(join("$PROJECTBUILD_DIR", ".sconsign.dblite"))
|
||||
|
||||
for item in env.GetExtraScripts("pre"):
|
||||
env.SConscript(item, exports="env")
|
||||
|
||||
env.SConscript("$BUILD_SCRIPT")
|
||||
|
||||
AlwaysBuild(env.Alias("__test", DEFAULT_TARGETS + ["size"]))
|
||||
|
||||
if "UPLOAD_FLAGS" in env:
|
||||
env.Append(UPLOADERFLAGS=["$UPLOAD_FLAGS"])
|
||||
env.Prepend(UPLOADERFLAGS=["$UPLOAD_FLAGS"])
|
||||
|
||||
if env.get("EXTRA_SCRIPT"):
|
||||
env.SConscript(env.get("EXTRA_SCRIPT"), exports="env")
|
||||
for item in env.GetExtraScripts("post"):
|
||||
env.SConscript(item, exports="env")
|
||||
|
||||
##############################################################################
|
||||
|
||||
# Checking program size
|
||||
if env.get("SIZETOOL") and "nobuild" not in COMMAND_LINE_TARGETS:
|
||||
env.Depends(["upload", "program"], "checkprogsize")
|
||||
# Replace platform's "size" target with our
|
||||
_new_targets = [t for t in DEFAULT_TARGETS if str(t) != "size"]
|
||||
Default(None)
|
||||
Default(_new_targets)
|
||||
Default("checkprogsize")
|
||||
|
||||
# Print configured protocols
|
||||
env.AddPreAction(
|
||||
["upload", "program"],
|
||||
env.VerboseAction(lambda source, target, env: env.PrintUploadInfo(),
|
||||
"Configuring upload protocol..."))
|
||||
|
||||
AlwaysBuild(env.Alias("__debug", DEFAULT_TARGETS))
|
||||
AlwaysBuild(env.Alias("__test", DEFAULT_TARGETS))
|
||||
|
||||
##############################################################################
|
||||
|
||||
if "envdump" in COMMAND_LINE_TARGETS:
|
||||
print env.Dump()
|
||||
env.Exit(0)
|
||||
|
||||
if "idedata" in COMMAND_LINE_TARGETS:
|
||||
print "\n%s\n" % json.dumps(env.DumpIDEData())
|
||||
env.Exit(0)
|
||||
try:
|
||||
print "\n%s\n" % util.path_to_unicode(
|
||||
json.dumps(env.DumpIDEData(), ensure_ascii=False))
|
||||
env.Exit(0)
|
||||
except UnicodeDecodeError:
|
||||
sys.stderr.write(
|
||||
"\nUnicodeDecodeError: Non-ASCII characters found in build "
|
||||
"environment\n"
|
||||
"See explanation in FAQ > Troubleshooting > Building\n"
|
||||
"http://docs.platformio.org/page/faq.html\n\n")
|
||||
env.Exit(1)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
190
platformio/builder/tools/pioide.py
Normal file
190
platformio/builder/tools/pioide.py
Normal file
@@ -0,0 +1,190 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from glob import glob
|
||||
from os import environ
|
||||
from os.path import abspath, isfile, join
|
||||
|
||||
from SCons.Defaults import processDefines
|
||||
|
||||
from platformio import util
|
||||
from platformio.managers.core import get_core_package_dir
|
||||
|
||||
|
||||
def _dump_includes(env):
|
||||
includes = []
|
||||
|
||||
for item in env.get("CPPPATH", []):
|
||||
includes.append(env.subst(item))
|
||||
|
||||
# installed libs
|
||||
for lb in env.GetLibBuilders():
|
||||
includes.extend(lb.get_include_dirs())
|
||||
|
||||
# includes from toolchains
|
||||
p = env.PioPlatform()
|
||||
for name in p.get_installed_packages():
|
||||
if p.get_package_type(name) != "toolchain":
|
||||
continue
|
||||
toolchain_dir = util.glob_escape(p.get_package_dir(name))
|
||||
toolchain_incglobs = [
|
||||
join(toolchain_dir, "*", "include*"),
|
||||
join(toolchain_dir, "*", "include", "c++", "*"),
|
||||
join(toolchain_dir, "*", "include", "c++", "*", "*-*-*"),
|
||||
join(toolchain_dir, "lib", "gcc", "*", "*", "include*")
|
||||
]
|
||||
for g in toolchain_incglobs:
|
||||
includes.extend(glob(g))
|
||||
|
||||
unity_dir = get_core_package_dir("tool-unity")
|
||||
if unity_dir:
|
||||
includes.append(unity_dir)
|
||||
|
||||
# remove duplicates
|
||||
result = []
|
||||
for item in includes:
|
||||
if item not in result:
|
||||
result.append(abspath(item))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _get_gcc_defines(env):
|
||||
items = []
|
||||
try:
|
||||
sysenv = environ.copy()
|
||||
sysenv['PATH'] = str(env['ENV']['PATH'])
|
||||
result = util.exec_command(
|
||||
"echo | %s -dM -E -" % env.subst("$CC"), env=sysenv, shell=True)
|
||||
except OSError:
|
||||
return items
|
||||
if result['returncode'] != 0:
|
||||
return items
|
||||
for line in result['out'].split("\n"):
|
||||
tokens = line.strip().split(" ", 2)
|
||||
if not tokens or tokens[0] != "#define":
|
||||
continue
|
||||
if len(tokens) > 2:
|
||||
items.append("%s=%s" % (tokens[1], tokens[2]))
|
||||
else:
|
||||
items.append(tokens[1])
|
||||
return items
|
||||
|
||||
|
||||
def _dump_defines(env):
|
||||
defines = []
|
||||
# global symbols
|
||||
for item in processDefines(env.get("CPPDEFINES", [])):
|
||||
defines.append(env.subst(item).replace('\\', ''))
|
||||
|
||||
# special symbol for Atmel AVR MCU
|
||||
if env['PIOPLATFORM'] == "atmelavr":
|
||||
board_mcu = env.get("BOARD_MCU")
|
||||
if not board_mcu and "BOARD" in env:
|
||||
board_mcu = env.BoardConfig().get("build.mcu")
|
||||
if board_mcu:
|
||||
defines.append(
|
||||
str("__AVR_%s__" % board_mcu.upper()
|
||||
.replace("ATMEGA", "ATmega").replace("ATTINY", "ATtiny")))
|
||||
|
||||
# built-in GCC marcos
|
||||
# if env.GetCompilerType() == "gcc":
|
||||
# defines.extend(_get_gcc_defines(env))
|
||||
|
||||
return defines
|
||||
|
||||
|
||||
def _get_svd_path(env):
|
||||
svd_path = env.subst("$DEBUG_SVD_PATH")
|
||||
if svd_path:
|
||||
return abspath(svd_path)
|
||||
|
||||
if "BOARD" not in env:
|
||||
return None
|
||||
try:
|
||||
svd_path = env.BoardConfig().get("debug.svd_path")
|
||||
assert svd_path
|
||||
except (AssertionError, KeyError):
|
||||
return None
|
||||
# custom path to SVD file
|
||||
if isfile(svd_path):
|
||||
return svd_path
|
||||
# default file from ./platform/misc/svd folder
|
||||
p = env.PioPlatform()
|
||||
if isfile(join(p.get_dir(), "misc", "svd", svd_path)):
|
||||
return abspath(join(p.get_dir(), "misc", "svd", svd_path))
|
||||
return None
|
||||
|
||||
|
||||
def DumpIDEData(env):
|
||||
LINTCCOM = "$CFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS"
|
||||
LINTCXXCOM = "$CXXFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS"
|
||||
|
||||
data = {
|
||||
"libsource_dirs":
|
||||
[env.subst(l) for l in env.get("LIBSOURCE_DIRS", [])],
|
||||
"defines":
|
||||
_dump_defines(env),
|
||||
"includes":
|
||||
_dump_includes(env),
|
||||
"cc_flags":
|
||||
env.subst(LINTCCOM),
|
||||
"cxx_flags":
|
||||
env.subst(LINTCXXCOM),
|
||||
"cc_path":
|
||||
util.where_is_program(env.subst("$CC"), env.subst("${ENV['PATH']}")),
|
||||
"cxx_path":
|
||||
util.where_is_program(env.subst("$CXX"), env.subst("${ENV['PATH']}")),
|
||||
"gdb_path":
|
||||
util.where_is_program(env.subst("$GDB"), env.subst("${ENV['PATH']}")),
|
||||
"prog_path":
|
||||
env.subst("$PROG_PATH"),
|
||||
"flash_extra_images": [{
|
||||
"offset": item[0],
|
||||
"path": env.subst(item[1])
|
||||
} for item in env.get("FLASH_EXTRA_IMAGES", [])],
|
||||
"svd_path":
|
||||
_get_svd_path(env),
|
||||
"compiler_type":
|
||||
env.GetCompilerType()
|
||||
}
|
||||
|
||||
env_ = env.Clone()
|
||||
# https://github.com/platformio/platformio-atom-ide/issues/34
|
||||
_new_defines = []
|
||||
for item in processDefines(env_.get("CPPDEFINES", [])):
|
||||
item = item.replace('\\"', '"')
|
||||
if " " in item:
|
||||
_new_defines.append(item.replace(" ", "\\\\ "))
|
||||
else:
|
||||
_new_defines.append(item)
|
||||
env_.Replace(CPPDEFINES=_new_defines)
|
||||
|
||||
data.update({
|
||||
"cc_flags": env_.subst(LINTCCOM),
|
||||
"cxx_flags": env_.subst(LINTCXXCOM)
|
||||
})
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(DumpIDEData)
|
||||
return env
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -17,17 +17,20 @@
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
import sys
|
||||
from os.path import basename, commonprefix, isdir, isfile, join, realpath, sep
|
||||
from platform import system
|
||||
from glob import glob
|
||||
from os.path import (basename, commonprefix, dirname, isdir, isfile, join,
|
||||
realpath, sep)
|
||||
|
||||
import SCons.Scanner
|
||||
from SCons.Script import ARGUMENTS, DefaultEnvironment
|
||||
from SCons.Script import ARGUMENTS, COMMAND_LINE_TARGETS, DefaultEnvironment
|
||||
|
||||
from platformio import util
|
||||
from platformio import exception, util
|
||||
from platformio.builder.tools import platformio as piotool
|
||||
from platformio.managers.lib import LibraryManager
|
||||
from platformio.managers.package import PackageManager
|
||||
|
||||
|
||||
class LibBuilderFactory(object):
|
||||
@@ -39,25 +42,23 @@ class LibBuilderFactory(object):
|
||||
clsname = "PlatformIOLibBuilder"
|
||||
else:
|
||||
used_frameworks = LibBuilderFactory.get_used_frameworks(env, path)
|
||||
common_frameworks = (set(env.get("PIOFRAMEWORK", [])) &
|
||||
set(used_frameworks))
|
||||
common_frameworks = (
|
||||
set(env.get("PIOFRAMEWORK", [])) & set(used_frameworks))
|
||||
if common_frameworks:
|
||||
clsname = "%sLibBuilder" % list(common_frameworks)[0].title()
|
||||
elif used_frameworks:
|
||||
clsname = "%sLibBuilder" % used_frameworks[0].title()
|
||||
|
||||
obj = getattr(sys.modules[__name__], clsname)(env,
|
||||
path,
|
||||
verbose=verbose)
|
||||
obj = getattr(sys.modules[__name__], clsname)(
|
||||
env, path, verbose=verbose)
|
||||
assert isinstance(obj, LibBuilderBase)
|
||||
return obj
|
||||
|
||||
@staticmethod
|
||||
def get_used_frameworks(env, path):
|
||||
if any([
|
||||
if any(
|
||||
isfile(join(path, fname))
|
||||
for fname in ("library.properties", "keywords.txt")
|
||||
]):
|
||||
for fname in ("library.properties", "keywords.txt")):
|
||||
return ["arduino"]
|
||||
|
||||
if isfile(join(path, "module.json")):
|
||||
@@ -66,7 +67,8 @@ class LibBuilderFactory(object):
|
||||
# check source files
|
||||
for root, _, files in os.walk(path, followlinks=True):
|
||||
for fname in files:
|
||||
if not env.IsFileWithExt(fname, ("c", "cpp", "h", "hpp")):
|
||||
if not env.IsFileWithExt(
|
||||
fname, piotool.SRC_BUILD_EXT + piotool.SRC_HEADER_EXT):
|
||||
continue
|
||||
with open(join(root, fname)) as f:
|
||||
content = f.read()
|
||||
@@ -79,12 +81,24 @@ class LibBuilderFactory(object):
|
||||
|
||||
class LibBuilderBase(object):
|
||||
|
||||
IS_WINDOWS = "windows" in util.get_systype()
|
||||
|
||||
LDF_MODES = ["off", "chain", "deep", "chain+", "deep+"]
|
||||
LDF_MODE_DEFAULT = "chain"
|
||||
|
||||
COMPAT_MODES = ["off", "soft", "strict"]
|
||||
COMPAT_MODE_DEFAULT = "soft"
|
||||
|
||||
CLASSIC_SCANNER = SCons.Scanner.C.CScanner()
|
||||
ADVANCED_SCANNER = SCons.Scanner.C.CScanner(advanced=True)
|
||||
INC_DIRS_CACHE = None
|
||||
CCONDITIONAL_SCANNER = SCons.Scanner.C.CConditionalScanner()
|
||||
# Max depth of nested includes:
|
||||
# -1 = unlimited
|
||||
# 0 - disabled nesting
|
||||
# >0 - number of allowed nested includes
|
||||
CCONDITIONAL_SCANNER_DEPTH = 99
|
||||
PARSE_SRC_BY_H_NAME = True
|
||||
|
||||
_INCLUDE_DIRS_CACHE = None
|
||||
|
||||
def __init__(self, env, path, manifest=None, verbose=False):
|
||||
self.env = env.Clone()
|
||||
@@ -93,13 +107,14 @@ class LibBuilderBase(object):
|
||||
self.verbose = verbose
|
||||
|
||||
self._manifest = manifest if manifest else self.load_manifest()
|
||||
self._ldf_mode = self.validate_ldf_mode(
|
||||
self.env.get("LIB_LDF_MODE", self.LDF_MODE_DEFAULT))
|
||||
self._is_dependent = False
|
||||
self._is_built = False
|
||||
self._depbuilders = list()
|
||||
self._circular_deps = list()
|
||||
self._scanned_paths = list()
|
||||
self._processed_files = list()
|
||||
|
||||
# reset source filter, could be overridden with extra script
|
||||
self.env['SRC_FILTER'] = ""
|
||||
|
||||
# process extra options and append to build environment
|
||||
self.process_extra_options()
|
||||
@@ -110,7 +125,7 @@ class LibBuilderBase(object):
|
||||
def __contains__(self, path):
|
||||
p1 = self.path
|
||||
p2 = path
|
||||
if system() == "Windows":
|
||||
if self.IS_WINDOWS:
|
||||
p1 = p1.lower()
|
||||
p2 = p2.lower()
|
||||
return commonprefix((p1 + sep, p2)) == p1 + sep
|
||||
@@ -123,6 +138,13 @@ class LibBuilderBase(object):
|
||||
def version(self):
|
||||
return self._manifest.get("version")
|
||||
|
||||
@property
|
||||
def vcs_info(self):
|
||||
items = glob(join(self.path, ".*", PackageManager.SRC_MANIFEST_NAME))
|
||||
if not items:
|
||||
return None
|
||||
return util.load_json(items[0])
|
||||
|
||||
@property
|
||||
def dependencies(self):
|
||||
return LibraryManager.normalize_dependencies(
|
||||
@@ -131,21 +153,35 @@ class LibBuilderBase(object):
|
||||
@property
|
||||
def src_filter(self):
|
||||
return piotool.SRC_FILTER_DEFAULT + [
|
||||
"-<example%s>" % os.sep, "-<examples%s>" % os.sep, "-<test%s>" %
|
||||
os.sep, "-<tests%s>" % os.sep
|
||||
"-<example%s>" % os.sep,
|
||||
"-<examples%s>" % os.sep,
|
||||
"-<test%s>" % os.sep,
|
||||
"-<tests%s>" % os.sep
|
||||
]
|
||||
|
||||
@property
|
||||
def include_dir(self):
|
||||
if not all(isdir(join(self.path, d)) for d in ("include", "src")):
|
||||
return None
|
||||
return join(self.path, "include")
|
||||
|
||||
@property
|
||||
def src_dir(self):
|
||||
return (join(self.path, "src")
|
||||
if isdir(join(self.path, "src")) else self.path)
|
||||
|
||||
def get_include_dirs(self):
|
||||
items = [self.src_dir]
|
||||
include_dir = self.include_dir
|
||||
if include_dir and include_dir not in items:
|
||||
items.append(include_dir)
|
||||
return items
|
||||
|
||||
@property
|
||||
def build_dir(self):
|
||||
return join("$BUILD_DIR", "lib", basename(self.path))
|
||||
|
||||
def get_inc_dirs(self):
|
||||
return [self.src_dir]
|
||||
return join("$BUILD_DIR",
|
||||
"lib%s" % hashlib.sha1(self.path).hexdigest()[:3],
|
||||
basename(self.path))
|
||||
|
||||
@property
|
||||
def build_flags(self):
|
||||
@@ -161,23 +197,17 @@ class LibBuilderBase(object):
|
||||
|
||||
@property
|
||||
def lib_archive(self):
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def validate_ldf_mode(mode):
|
||||
if isinstance(mode, basestring):
|
||||
mode = mode.strip().lower()
|
||||
if mode in LibBuilderBase.LDF_MODES:
|
||||
return mode
|
||||
try:
|
||||
return LibBuilderBase.LDF_MODES[int(mode)]
|
||||
except (IndexError, ValueError):
|
||||
pass
|
||||
return LibBuilderBase.LDF_MODE_DEFAULT
|
||||
return self.env.get("LIB_ARCHIVE", "") != "false"
|
||||
|
||||
@property
|
||||
def lib_ldf_mode(self):
|
||||
return self._ldf_mode
|
||||
return self.validate_ldf_mode(
|
||||
self.env.get("LIB_LDF_MODE", self.LDF_MODE_DEFAULT))
|
||||
|
||||
@property
|
||||
def lib_compat_mode(self):
|
||||
return self.validate_compat_mode(
|
||||
self.env.get("LIB_COMPAT_MODE", self.COMPAT_MODE_DEFAULT))
|
||||
|
||||
@property
|
||||
def depbuilders(self):
|
||||
@@ -192,18 +222,28 @@ class LibBuilderBase(object):
|
||||
return self._is_built
|
||||
|
||||
@staticmethod
|
||||
def items_in_list(items, ilist):
|
||||
def validate_ldf_mode(mode):
|
||||
if isinstance(mode, basestring):
|
||||
mode = mode.strip().lower()
|
||||
if mode in LibBuilderBase.LDF_MODES:
|
||||
return mode
|
||||
try:
|
||||
return LibBuilderBase.LDF_MODES[int(mode)]
|
||||
except (IndexError, ValueError):
|
||||
pass
|
||||
return LibBuilderBase.LDF_MODE_DEFAULT
|
||||
|
||||
def _items_to_list(items_):
|
||||
if not isinstance(items_, list):
|
||||
items_ = [i.strip() for i in items_.split(",")]
|
||||
return [i.lower() for i in items_ if i]
|
||||
|
||||
items = _items_to_list(items)
|
||||
ilist = _items_to_list(ilist)
|
||||
if "*" in items or "*" in ilist:
|
||||
return True
|
||||
return set(items) & set(ilist)
|
||||
@staticmethod
|
||||
def validate_compat_mode(mode):
|
||||
if isinstance(mode, basestring):
|
||||
mode = mode.strip().lower()
|
||||
if mode in LibBuilderBase.COMPAT_MODES:
|
||||
return mode
|
||||
try:
|
||||
return LibBuilderBase.COMPAT_MODES[int(mode)]
|
||||
except (IndexError, ValueError):
|
||||
pass
|
||||
return LibBuilderBase.COMPAT_MODE_DEFAULT
|
||||
|
||||
def is_platforms_compatible(self, platforms):
|
||||
return True
|
||||
@@ -214,24 +254,20 @@ class LibBuilderBase(object):
|
||||
def load_manifest(self):
|
||||
return {}
|
||||
|
||||
def get_src_files(self):
|
||||
return [
|
||||
join(self.src_dir, item)
|
||||
for item in self.env.MatchSourceFiles(self.src_dir,
|
||||
self.src_filter)
|
||||
]
|
||||
|
||||
def process_extra_options(self):
|
||||
with util.cd(self.path):
|
||||
self.env.ProcessUnFlags(self.build_unflags)
|
||||
self.env.ProcessFlags(self.build_flags)
|
||||
if self.extra_script:
|
||||
self.env.SConscriptChdir(1)
|
||||
self.env.SConscript(
|
||||
realpath(self.extra_script),
|
||||
exports={"env": self.env,
|
||||
"pio_lib_builder": self})
|
||||
exports={
|
||||
"env": self.env,
|
||||
"pio_lib_builder": self
|
||||
})
|
||||
|
||||
def _process_dependencies(self):
|
||||
def process_dependencies(self):
|
||||
if not self.dependencies:
|
||||
return
|
||||
for item in self.dependencies:
|
||||
@@ -241,7 +277,7 @@ class LibBuilderBase(object):
|
||||
if env_key not in self.env:
|
||||
continue
|
||||
if (key in item and
|
||||
not self.items_in_list(self.env[env_key], item[key])):
|
||||
not util.items_in_list(self.env[env_key], item[key])):
|
||||
if self.verbose:
|
||||
sys.stderr.write("Skip %s incompatible dependency %s\n"
|
||||
% (key[:-1], item))
|
||||
@@ -250,7 +286,7 @@ class LibBuilderBase(object):
|
||||
continue
|
||||
|
||||
found = False
|
||||
for lb in self.envorigin.GetLibBuilders():
|
||||
for lb in self.env.GetLibBuilders():
|
||||
if item['name'] != lb.name:
|
||||
continue
|
||||
elif "frameworks" in item and \
|
||||
@@ -269,56 +305,81 @@ class LibBuilderBase(object):
|
||||
"library\n" % (item['name'], self.name))
|
||||
self.env.Exit(1)
|
||||
|
||||
def _validate_search_paths(self, search_paths=None):
|
||||
if not search_paths:
|
||||
search_paths = []
|
||||
assert isinstance(search_paths, list)
|
||||
def get_search_files(self):
|
||||
items = [
|
||||
join(self.src_dir, item) for item in self.env.MatchSourceFiles(
|
||||
self.src_dir, self.src_filter)
|
||||
]
|
||||
include_dir = self.include_dir
|
||||
if include_dir:
|
||||
items.extend([
|
||||
join(include_dir, item)
|
||||
for item in self.env.MatchSourceFiles(include_dir)
|
||||
])
|
||||
return items
|
||||
|
||||
_search_paths = []
|
||||
for path in search_paths:
|
||||
if path not in self._scanned_paths:
|
||||
_search_paths.append(path)
|
||||
self._scanned_paths.append(path)
|
||||
def _validate_search_files(self, search_files=None):
|
||||
if not search_files:
|
||||
search_files = []
|
||||
assert isinstance(search_files, list)
|
||||
|
||||
return _search_paths
|
||||
_search_files = []
|
||||
for path in search_files:
|
||||
if path not in self._processed_files:
|
||||
_search_files.append(path)
|
||||
self._processed_files.append(path)
|
||||
|
||||
def _get_found_includes(self, search_paths=None):
|
||||
return _search_files
|
||||
|
||||
def _get_found_includes(self, search_files=None):
|
||||
# all include directories
|
||||
if not LibBuilderBase.INC_DIRS_CACHE:
|
||||
inc_dirs = []
|
||||
used_inc_dirs = []
|
||||
for lb in self.envorigin.GetLibBuilders():
|
||||
items = [self.env.Dir(d) for d in lb.get_inc_dirs()]
|
||||
if lb.dependent:
|
||||
used_inc_dirs.extend(items)
|
||||
else:
|
||||
inc_dirs.extend(items)
|
||||
LibBuilderBase.INC_DIRS_CACHE = used_inc_dirs + inc_dirs
|
||||
if not LibBuilderBase._INCLUDE_DIRS_CACHE:
|
||||
LibBuilderBase._INCLUDE_DIRS_CACHE = []
|
||||
for lb in self.env.GetLibBuilders():
|
||||
LibBuilderBase._INCLUDE_DIRS_CACHE.extend(
|
||||
[self.env.Dir(d) for d in lb.get_include_dirs()])
|
||||
|
||||
# append self include directories
|
||||
inc_dirs = [self.env.Dir(d) for d in self.get_inc_dirs()]
|
||||
inc_dirs.extend(LibBuilderBase.INC_DIRS_CACHE)
|
||||
include_dirs = [self.env.Dir(d) for d in self.get_include_dirs()]
|
||||
include_dirs.extend(LibBuilderBase._INCLUDE_DIRS_CACHE)
|
||||
|
||||
result = []
|
||||
for path in self._validate_search_paths(search_paths):
|
||||
for path in self._validate_search_files(search_files):
|
||||
try:
|
||||
assert "+" in self.lib_ldf_mode
|
||||
incs = self.env.File(path).get_found_includes(
|
||||
self.env, LibBuilderBase.ADVANCED_SCANNER, tuple(inc_dirs))
|
||||
incs = LibBuilderBase.CCONDITIONAL_SCANNER(
|
||||
self.env.File(path),
|
||||
self.env,
|
||||
tuple(include_dirs),
|
||||
depth=self.CCONDITIONAL_SCANNER_DEPTH)
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
if self.verbose and "+" in self.lib_ldf_mode:
|
||||
sys.stderr.write(
|
||||
"Warning! Classic Pre Processor is used for `%s`, "
|
||||
"advanced has failed with `%s`\n" % (path, e))
|
||||
incs = self.env.File(path).get_found_includes(
|
||||
self.env, LibBuilderBase.CLASSIC_SCANNER, tuple(inc_dirs))
|
||||
_incs = LibBuilderBase.CLASSIC_SCANNER(
|
||||
self.env.File(path), self.env, tuple(include_dirs))
|
||||
incs = []
|
||||
for inc in _incs:
|
||||
incs.append(inc)
|
||||
if not self.PARSE_SRC_BY_H_NAME:
|
||||
continue
|
||||
_h_path = inc.get_abspath()
|
||||
if not self.env.IsFileWithExt(_h_path,
|
||||
piotool.SRC_HEADER_EXT):
|
||||
continue
|
||||
_f_part = _h_path[:_h_path.rindex(".")]
|
||||
for ext in piotool.SRC_C_EXT:
|
||||
if isfile("%s.%s" % (_f_part, ext)):
|
||||
incs.append(
|
||||
self.env.File("%s.%s" % (_f_part, ext)))
|
||||
# print path, map(lambda n: n.get_abspath(), incs)
|
||||
for inc in incs:
|
||||
if inc not in result:
|
||||
result.append(inc)
|
||||
return result
|
||||
|
||||
def depend_recursive(self, lb, search_paths=None):
|
||||
def depend_recursive(self, lb, search_files=None):
|
||||
|
||||
def _already_depends(_lb):
|
||||
if self in _lb.depbuilders:
|
||||
@@ -332,38 +393,38 @@ class LibBuilderBase(object):
|
||||
if self != lb:
|
||||
if _already_depends(lb):
|
||||
if self.verbose:
|
||||
sys.stderr.write("Warning! Circular dependencies detected "
|
||||
"between `%s` and `%s`\n" %
|
||||
(self.path, lb.path))
|
||||
sys.stderr.write(
|
||||
"Warning! Circular dependencies detected "
|
||||
"between `%s` and `%s`\n" % (self.path, lb.path))
|
||||
self._circular_deps.append(lb)
|
||||
elif lb not in self._depbuilders:
|
||||
self._depbuilders.append(lb)
|
||||
LibBuilderBase.INC_DIRS_CACHE = None
|
||||
lb.search_deps_recursive(search_paths)
|
||||
LibBuilderBase._INCLUDE_DIRS_CACHE = None
|
||||
lb.search_deps_recursive(search_files)
|
||||
|
||||
def search_deps_recursive(self, search_paths=None):
|
||||
def search_deps_recursive(self, search_files=None):
|
||||
if not self._is_dependent:
|
||||
self._is_dependent = True
|
||||
self._process_dependencies()
|
||||
self.process_dependencies()
|
||||
|
||||
if self.lib_ldf_mode.startswith("deep"):
|
||||
search_paths = self.get_src_files()
|
||||
search_files = self.get_search_files()
|
||||
|
||||
# when LDF is disabled
|
||||
if self.lib_ldf_mode == "off":
|
||||
return
|
||||
|
||||
lib_inc_map = {}
|
||||
for inc in self._get_found_includes(search_paths):
|
||||
for lb in self.envorigin.GetLibBuilders():
|
||||
for inc in self._get_found_includes(search_files):
|
||||
for lb in self.env.GetLibBuilders():
|
||||
if inc.get_abspath() in lb:
|
||||
if lb not in lib_inc_map:
|
||||
lib_inc_map[lb] = []
|
||||
lib_inc_map[lb].append(inc.get_abspath())
|
||||
break
|
||||
|
||||
for lb, lb_search_paths in lib_inc_map.items():
|
||||
self.depend_recursive(lb, lb_search_paths)
|
||||
for lb, lb_search_files in lib_inc_map.items():
|
||||
self.depend_recursive(lb, lb_search_files)
|
||||
|
||||
def build(self):
|
||||
libs = []
|
||||
@@ -374,26 +435,28 @@ class LibBuilderBase(object):
|
||||
self.env.AppendUnique(**{key: lb.env.get(key)})
|
||||
|
||||
for lb in self._circular_deps:
|
||||
self.env.AppendUnique(CPPPATH=lb.get_inc_dirs())
|
||||
self.env.AppendUnique(CPPPATH=lb.get_include_dirs())
|
||||
|
||||
if not self._is_built:
|
||||
self.env.AppendUnique(CPPPATH=self.get_inc_dirs())
|
||||
if self._is_built:
|
||||
return libs
|
||||
self._is_built = True
|
||||
|
||||
if self.lib_ldf_mode == "off":
|
||||
for lb in self.envorigin.GetLibBuilders():
|
||||
if self == lb or not lb.is_built:
|
||||
continue
|
||||
for key in ("CPPPATH", "LIBPATH", "LIBS", "LINKFLAGS"):
|
||||
self.env.AppendUnique(**{key: lb.env.get(key)})
|
||||
self.env.AppendUnique(CPPPATH=self.get_include_dirs())
|
||||
|
||||
if self.lib_archive:
|
||||
libs.append(
|
||||
self.env.BuildLibrary(self.build_dir, self.src_dir,
|
||||
self.src_filter))
|
||||
else:
|
||||
self.env.BuildSources(self.build_dir, self.src_dir,
|
||||
self.src_filter)
|
||||
self._is_built = True
|
||||
if self.lib_ldf_mode == "off":
|
||||
for lb in self.env.GetLibBuilders():
|
||||
if self == lb or not lb.is_built:
|
||||
continue
|
||||
for key in ("CPPPATH", "LIBPATH", "LIBS", "LINKFLAGS"):
|
||||
self.env.AppendUnique(**{key: lb.env.get(key)})
|
||||
|
||||
if self.lib_archive:
|
||||
libs.append(
|
||||
self.env.BuildLibrary(self.build_dir, self.src_dir,
|
||||
self.src_filter))
|
||||
else:
|
||||
self.env.BuildSources(self.build_dir, self.src_dir,
|
||||
self.src_filter)
|
||||
return libs
|
||||
|
||||
|
||||
@@ -401,41 +464,6 @@ class UnknownLibBuilder(LibBuilderBase):
|
||||
pass
|
||||
|
||||
|
||||
class ProjectAsLibBuilder(LibBuilderBase):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
LibBuilderBase.__init__(self, *args, **kwargs)
|
||||
self._is_built = True
|
||||
|
||||
@property
|
||||
def lib_ldf_mode(self):
|
||||
mode = LibBuilderBase.lib_ldf_mode.fget(self)
|
||||
if not mode.startswith("chain"):
|
||||
return mode
|
||||
# parse all project files
|
||||
return "deep+" if "+" in mode else "deep"
|
||||
|
||||
@property
|
||||
def src_filter(self):
|
||||
return self.env.get("SRC_FILTER", LibBuilderBase.src_filter.fget(self))
|
||||
|
||||
def process_extra_options(self):
|
||||
# skip for project, options are already processed
|
||||
pass
|
||||
|
||||
def search_deps_recursive(self, search_paths=None):
|
||||
for dep in self.env.get("LIB_DEPS", []):
|
||||
for token in ("@", "="):
|
||||
if token in dep:
|
||||
dep, _ = dep.split(token, 1)
|
||||
for lb in self.envorigin.GetLibBuilders():
|
||||
if lb.name == dep:
|
||||
if lb not in self.depbuilders:
|
||||
self.depend_recursive(lb)
|
||||
break
|
||||
return LibBuilderBase.search_deps_recursive(self, search_paths)
|
||||
|
||||
|
||||
class ArduinoLibBuilder(LibBuilderBase):
|
||||
|
||||
def load_manifest(self):
|
||||
@@ -450,13 +478,13 @@ class ArduinoLibBuilder(LibBuilderBase):
|
||||
manifest[key.strip()] = value.strip()
|
||||
return manifest
|
||||
|
||||
def get_inc_dirs(self):
|
||||
inc_dirs = LibBuilderBase.get_inc_dirs(self)
|
||||
def get_include_dirs(self):
|
||||
include_dirs = LibBuilderBase.get_include_dirs(self)
|
||||
if isdir(join(self.path, "src")):
|
||||
return inc_dirs
|
||||
return include_dirs
|
||||
if isdir(join(self.path, "utility")):
|
||||
inc_dirs.append(join(self.path, "utility"))
|
||||
return inc_dirs
|
||||
include_dirs.append(join(self.path, "utility"))
|
||||
return include_dirs
|
||||
|
||||
@property
|
||||
def src_filter(self):
|
||||
@@ -471,7 +499,29 @@ class ArduinoLibBuilder(LibBuilderBase):
|
||||
return src_filter
|
||||
|
||||
def is_frameworks_compatible(self, frameworks):
|
||||
return self.items_in_list(frameworks, ["arduino", "energia"])
|
||||
return util.items_in_list(frameworks, ["arduino", "energia"])
|
||||
|
||||
def is_platforms_compatible(self, platforms):
|
||||
platforms_map = {
|
||||
"avr": "atmelavr",
|
||||
"sam": "atmelsam",
|
||||
"samd": "atmelsam",
|
||||
"esp8266": "espressif8266",
|
||||
"esp32": "espressif32",
|
||||
"arc32": "intel_arc32",
|
||||
"stm32": "ststm32"
|
||||
}
|
||||
items = []
|
||||
for arch in self._manifest.get("architectures", "").split(","):
|
||||
arch = arch.strip()
|
||||
if arch == "*":
|
||||
items = "*"
|
||||
break
|
||||
if arch in platforms_map:
|
||||
items.append(platforms_map[arch])
|
||||
if not items:
|
||||
return LibBuilderBase.is_platforms_compatible(self, platforms)
|
||||
return util.items_in_list(platforms, items)
|
||||
|
||||
|
||||
class MbedLibBuilder(LibBuilderBase):
|
||||
@@ -481,22 +531,40 @@ class MbedLibBuilder(LibBuilderBase):
|
||||
return {}
|
||||
return util.load_json(join(self.path, "module.json"))
|
||||
|
||||
@property
|
||||
def include_dir(self):
|
||||
if isdir(join(self.path, "include")):
|
||||
return join(self.path, "include")
|
||||
return None
|
||||
|
||||
@property
|
||||
def src_dir(self):
|
||||
if isdir(join(self.path, "source")):
|
||||
return join(self.path, "source")
|
||||
return LibBuilderBase.src_dir.fget(self)
|
||||
|
||||
def get_inc_dirs(self):
|
||||
inc_dirs = LibBuilderBase.get_inc_dirs(self)
|
||||
if self.path not in inc_dirs:
|
||||
inc_dirs.append(self.path)
|
||||
def get_include_dirs(self):
|
||||
include_dirs = LibBuilderBase.get_include_dirs(self)
|
||||
if self.path not in include_dirs:
|
||||
include_dirs.append(self.path)
|
||||
|
||||
# library with module.json
|
||||
for p in self._manifest.get("extraIncludes", []):
|
||||
inc_dirs.append(join(self.path, p))
|
||||
return inc_dirs
|
||||
include_dirs.append(join(self.path, p))
|
||||
|
||||
# old mbed library without manifest, add to CPPPATH all folders
|
||||
if not self._manifest:
|
||||
for root, _, __ in os.walk(self.path):
|
||||
part = root.replace(self.path, "").lower()
|
||||
if any(s in part for s in ("%s." % sep, "test", "example")):
|
||||
continue
|
||||
if root not in include_dirs:
|
||||
include_dirs.append(root)
|
||||
|
||||
return include_dirs
|
||||
|
||||
def is_frameworks_compatible(self, frameworks):
|
||||
return self.items_in_list(frameworks, ["mbed"])
|
||||
return util.items_in_list(frameworks, ["mbed"])
|
||||
|
||||
|
||||
class PlatformIOLibBuilder(LibBuilderBase):
|
||||
@@ -505,6 +573,14 @@ class PlatformIOLibBuilder(LibBuilderBase):
|
||||
assert isfile(join(self.path, "library.json"))
|
||||
manifest = util.load_json(join(self.path, "library.json"))
|
||||
assert "name" in manifest
|
||||
|
||||
# replace "espressif" old name dev/platform with ESP8266
|
||||
if "platforms" in manifest:
|
||||
manifest['platforms'] = [
|
||||
"espressif8266" if p == "espressif" else p
|
||||
for p in util.items_to_list(manifest['platforms'])
|
||||
]
|
||||
|
||||
return manifest
|
||||
|
||||
def _is_arduino_manifest(self):
|
||||
@@ -514,6 +590,8 @@ class PlatformIOLibBuilder(LibBuilderBase):
|
||||
def src_filter(self):
|
||||
if "srcFilter" in self._manifest.get("build", {}):
|
||||
return self._manifest.get("build").get("srcFilter")
|
||||
elif self.env['SRC_FILTER']:
|
||||
return self.env['SRC_FILTER']
|
||||
elif self._is_arduino_manifest():
|
||||
return ArduinoLibBuilder.src_filter.fget(self)
|
||||
return LibBuilderBase.src_filter.fget(self)
|
||||
@@ -549,59 +627,154 @@ class PlatformIOLibBuilder(LibBuilderBase):
|
||||
self._manifest.get("build").get("libLDFMode"))
|
||||
return LibBuilderBase.lib_ldf_mode.fget(self)
|
||||
|
||||
@property
|
||||
def lib_compat_mode(self):
|
||||
if "libCompatMode" in self._manifest.get("build", {}):
|
||||
return self.validate_compat_mode(
|
||||
self._manifest.get("build").get("libCompatMode"))
|
||||
return LibBuilderBase.lib_compat_mode.fget(self)
|
||||
|
||||
def is_platforms_compatible(self, platforms):
|
||||
items = self._manifest.get("platforms")
|
||||
if not items:
|
||||
return LibBuilderBase.is_platforms_compatible(self, platforms)
|
||||
return self.items_in_list(platforms, items)
|
||||
return util.items_in_list(platforms, items)
|
||||
|
||||
def is_frameworks_compatible(self, frameworks):
|
||||
items = self._manifest.get("frameworks")
|
||||
if not items:
|
||||
return LibBuilderBase.is_frameworks_compatible(self, frameworks)
|
||||
return self.items_in_list(frameworks, items)
|
||||
return util.items_in_list(frameworks, items)
|
||||
|
||||
def get_inc_dirs(self):
|
||||
inc_dirs = LibBuilderBase.get_inc_dirs(self)
|
||||
def get_include_dirs(self):
|
||||
include_dirs = LibBuilderBase.get_include_dirs(self)
|
||||
|
||||
# backwards compatibility with PlatformIO 2.0
|
||||
if ("build" not in self._manifest and self._is_arduino_manifest() and
|
||||
not isdir(join(self.path, "src")) and
|
||||
isdir(join(self.path, "utility"))):
|
||||
inc_dirs.append(join(self.path, "utility"))
|
||||
# backwards compatibility with PlatformIO 2.0
|
||||
if ("build" not in self._manifest and self._is_arduino_manifest()
|
||||
and not isdir(join(self.path, "src"))
|
||||
and isdir(join(self.path, "utility"))):
|
||||
include_dirs.append(join(self.path, "utility"))
|
||||
|
||||
for path in self.env.get("CPPPATH", []):
|
||||
if path not in self.envorigin['CPPPATH']:
|
||||
inc_dirs.append(self.env.subst(path))
|
||||
return inc_dirs
|
||||
if path not in self.envorigin.get("CPPPATH", []):
|
||||
include_dirs.append(self.env.subst(path))
|
||||
return include_dirs
|
||||
|
||||
|
||||
class ProjectAsLibBuilder(LibBuilderBase):
|
||||
|
||||
@property
|
||||
def include_dir(self):
|
||||
include_dir = self.env.subst("$PROJECTINCLUDE_DIR")
|
||||
return include_dir if isdir(include_dir) else None
|
||||
|
||||
@property
|
||||
def src_dir(self):
|
||||
return self.env.subst("$PROJECTSRC_DIR")
|
||||
|
||||
def get_include_dirs(self):
|
||||
include_dirs = LibBuilderBase.get_include_dirs(self)
|
||||
project_include_dir = self.env.subst("$PROJECTINCLUDE_DIR")
|
||||
if isdir(project_include_dir):
|
||||
include_dirs.append(project_include_dir)
|
||||
return include_dirs
|
||||
|
||||
def get_search_files(self):
|
||||
# project files
|
||||
items = LibBuilderBase.get_search_files(self)
|
||||
# test files
|
||||
if "__test" in COMMAND_LINE_TARGETS:
|
||||
items.extend([
|
||||
join("$PROJECTTEST_DIR",
|
||||
item) for item in self.env.MatchSourceFiles(
|
||||
"$PROJECTTEST_DIR", "$PIOTEST_SRC_FILTER")
|
||||
])
|
||||
return items
|
||||
|
||||
@property
|
||||
def lib_ldf_mode(self):
|
||||
mode = LibBuilderBase.lib_ldf_mode.fget(self)
|
||||
if not mode.startswith("chain"):
|
||||
return mode
|
||||
# parse all project files
|
||||
return "deep+" if "+" in mode else "deep"
|
||||
|
||||
@property
|
||||
def src_filter(self):
|
||||
return self.env.get("SRC_FILTER", LibBuilderBase.src_filter.fget(self))
|
||||
|
||||
def process_extra_options(self):
|
||||
# skip for project, options are already processed
|
||||
pass
|
||||
|
||||
def process_dependencies(self): # pylint: disable=too-many-branches
|
||||
uris = self.env.get("LIB_DEPS", [])
|
||||
if not uris:
|
||||
return
|
||||
storage_dirs = []
|
||||
for lb in self.env.GetLibBuilders():
|
||||
if dirname(lb.path) not in storage_dirs:
|
||||
storage_dirs.append(dirname(lb.path))
|
||||
|
||||
for uri in uris:
|
||||
found = False
|
||||
for storage_dir in storage_dirs:
|
||||
if found:
|
||||
break
|
||||
lm = LibraryManager(storage_dir)
|
||||
pkg_dir = lm.get_package_dir(*lm.parse_pkg_uri(uri))
|
||||
if not pkg_dir:
|
||||
continue
|
||||
for lb in self.env.GetLibBuilders():
|
||||
if lb.path != pkg_dir:
|
||||
continue
|
||||
if lb not in self.depbuilders:
|
||||
self.depend_recursive(lb)
|
||||
found = True
|
||||
break
|
||||
|
||||
if not found:
|
||||
for lb in self.env.GetLibBuilders():
|
||||
if lb.name != uri:
|
||||
continue
|
||||
if lb not in self.depbuilders:
|
||||
self.depend_recursive(lb)
|
||||
break
|
||||
|
||||
def build(self):
|
||||
self._is_built = True # do not build Project now
|
||||
self.env.AppendUnique(CPPPATH=self.get_include_dirs())
|
||||
return LibBuilderBase.build(self)
|
||||
|
||||
|
||||
def GetLibBuilders(env): # pylint: disable=too-many-branches
|
||||
|
||||
if "__PIO_LIB_BUILDERS" in DefaultEnvironment():
|
||||
return DefaultEnvironment()['__PIO_LIB_BUILDERS']
|
||||
return sorted(
|
||||
DefaultEnvironment()['__PIO_LIB_BUILDERS'],
|
||||
key=lambda lb: 0 if lb.dependent else 1)
|
||||
|
||||
items = []
|
||||
compat_mode = int(env.get("LIB_COMPAT_MODE", 1))
|
||||
verbose = (int(ARGUMENTS.get("PIOVERBOSE", 0)) and
|
||||
not env.GetOption('clean'))
|
||||
verbose = int(ARGUMENTS.get("PIOVERBOSE",
|
||||
0)) and not env.GetOption('clean')
|
||||
|
||||
def _check_lib_builder(lb):
|
||||
compat_mode = lb.lib_compat_mode
|
||||
if lb.name in env.get("LIB_IGNORE", []):
|
||||
if verbose:
|
||||
sys.stderr.write("Ignored library %s\n" % lb.path)
|
||||
return
|
||||
if compat_mode > 1 and not lb.is_platforms_compatible(env[
|
||||
'PIOPLATFORM']):
|
||||
return None
|
||||
if compat_mode == "strict" and not lb.is_platforms_compatible(
|
||||
env['PIOPLATFORM']):
|
||||
if verbose:
|
||||
sys.stderr.write("Platform incompatible library %s\n" %
|
||||
lb.path)
|
||||
sys.stderr.write(
|
||||
"Platform incompatible library %s\n" % lb.path)
|
||||
return False
|
||||
if compat_mode > 0 and "PIOFRAMEWORK" in env and \
|
||||
if compat_mode == "soft" and "PIOFRAMEWORK" in env and \
|
||||
not lb.is_frameworks_compatible(env.get("PIOFRAMEWORK", [])):
|
||||
if verbose:
|
||||
sys.stderr.write("Framework incompatible library %s\n" %
|
||||
lb.path)
|
||||
sys.stderr.write(
|
||||
"Framework incompatible library %s\n" % lb.path)
|
||||
return False
|
||||
return True
|
||||
|
||||
@@ -614,10 +787,9 @@ def GetLibBuilders(env): # pylint: disable=too-many-branches
|
||||
if item == "__cores__" or not isdir(join(libs_dir, item)):
|
||||
continue
|
||||
try:
|
||||
lb = LibBuilderFactory.new(env,
|
||||
join(libs_dir, item),
|
||||
verbose=verbose)
|
||||
except ValueError:
|
||||
lb = LibBuilderFactory.new(
|
||||
env, join(libs_dir, item), verbose=verbose)
|
||||
except exception.InvalidJSONFile:
|
||||
if verbose:
|
||||
sys.stderr.write("Skip library with broken manifest: %s\n"
|
||||
% join(libs_dir, item))
|
||||
@@ -643,15 +815,14 @@ def GetLibBuilders(env): # pylint: disable=too-many-branches
|
||||
return items
|
||||
|
||||
|
||||
def BuildDependentLibraries(env, src_dir):
|
||||
lib_builders = env.GetLibBuilders()
|
||||
def ConfigureProjectLibBuilder(env):
|
||||
|
||||
def correct_found_libs():
|
||||
def correct_found_libs(lib_builders):
|
||||
# build full dependency graph
|
||||
found_lbs = [lb for lb in lib_builders if lb.dependent]
|
||||
for lb in lib_builders:
|
||||
if lb in found_lbs:
|
||||
lb.search_deps_recursive(lb.get_src_files())
|
||||
lb.search_deps_recursive(lb.get_search_files())
|
||||
for lb in lib_builders:
|
||||
for deplb in lb.depbuilders[:]:
|
||||
if deplb not in found_lbs:
|
||||
@@ -661,33 +832,45 @@ def BuildDependentLibraries(env, src_dir):
|
||||
margin = "| " * (level)
|
||||
for lb in root.depbuilders:
|
||||
title = "<%s>" % lb.name
|
||||
vcs_info = lb.vcs_info
|
||||
if lb.version:
|
||||
title += " v%s" % lb.version
|
||||
title += " %s" % lb.version
|
||||
if vcs_info and vcs_info.get("version"):
|
||||
title += " #%s" % vcs_info.get("version")
|
||||
sys.stdout.write("%s|-- %s" % (margin, title))
|
||||
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
title += " (%s)" % lb.path
|
||||
print "%s|-- %s" % (margin, title)
|
||||
if vcs_info:
|
||||
sys.stdout.write(" [%s]" % vcs_info.get("url"))
|
||||
sys.stdout.write(" (")
|
||||
sys.stdout.write(lb.path)
|
||||
sys.stdout.write(")")
|
||||
sys.stdout.write("\n")
|
||||
if lb.depbuilders:
|
||||
print_deps_tree(lb, level + 1)
|
||||
|
||||
print "Collected %d compatible libraries" % len(lib_builders)
|
||||
print "Looking for dependencies..."
|
||||
project = ProjectAsLibBuilder(env, "$PROJECT_DIR")
|
||||
ldf_mode = LibBuilderBase.lib_ldf_mode.fget(project)
|
||||
|
||||
project = ProjectAsLibBuilder(env, src_dir)
|
||||
project.env = env
|
||||
print "Library Dependency Finder -> http://bit.ly/configure-pio-ldf"
|
||||
print "LDF MODES: FINDER(%s) COMPATIBILITY(%s)" % (ldf_mode,
|
||||
project.lib_compat_mode)
|
||||
|
||||
lib_builders = env.GetLibBuilders()
|
||||
print "Collected %d compatible libraries" % len(lib_builders)
|
||||
|
||||
print "Scanning dependencies..."
|
||||
project.search_deps_recursive()
|
||||
|
||||
if (LibBuilderBase.validate_ldf_mode(
|
||||
env.get("LIB_LDF_MODE", LibBuilderBase.LDF_MODE_DEFAULT))
|
||||
.startswith("chain") and project.depbuilders):
|
||||
correct_found_libs()
|
||||
if ldf_mode.startswith("chain") and project.depbuilders:
|
||||
correct_found_libs(lib_builders)
|
||||
|
||||
if project.depbuilders:
|
||||
print "Library Dependency Graph"
|
||||
print "Dependency Graph"
|
||||
print_deps_tree(project)
|
||||
else:
|
||||
print "Project does not have dependencies"
|
||||
print "No dependencies"
|
||||
|
||||
return project.build()
|
||||
return project
|
||||
|
||||
|
||||
def exists(_):
|
||||
@@ -696,5 +879,5 @@ def exists(_):
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(GetLibBuilders)
|
||||
env.AddMethod(BuildDependentLibraries)
|
||||
env.AddMethod(ConfigureProjectLibBuilder)
|
||||
return env
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -17,22 +17,23 @@ from __future__ import absolute_import
|
||||
import atexit
|
||||
import re
|
||||
import sys
|
||||
from glob import glob
|
||||
from os import environ, remove, walk
|
||||
from os.path import basename, isdir, isfile, join, relpath
|
||||
from os.path import basename, isdir, isfile, join, realpath, relpath, sep
|
||||
from tempfile import mkstemp
|
||||
|
||||
from SCons.Action import Action
|
||||
from SCons.Defaults import processDefines
|
||||
from SCons.Script import ARGUMENTS
|
||||
|
||||
from platformio import util
|
||||
from platformio.managers.core import get_core_package_dir
|
||||
|
||||
|
||||
class InoToCPPConverter(object):
|
||||
|
||||
PROTOTYPE_RE = re.compile(r"""^(
|
||||
([a-z_\d]+\*?\s+){1,2} # return type
|
||||
PROTOTYPE_RE = re.compile(
|
||||
r"""^(
|
||||
(?:template\<.*\>\s*)? # template
|
||||
([a-z_\d\&]+\*?\s+){1,2} # return type
|
||||
([a-z_\d]+\s*) # name of prototype
|
||||
\([a-z_,\.\*\&\[\]\s\d]*\) # arguments
|
||||
)\s*\{ # must end with {
|
||||
@@ -50,7 +51,7 @@ class InoToCPPConverter(object):
|
||||
def convert(self, nodes):
|
||||
contents = self.merge(nodes)
|
||||
if not contents:
|
||||
return
|
||||
return None
|
||||
return self.process(contents)
|
||||
|
||||
def merge(self, nodes):
|
||||
@@ -89,8 +90,8 @@ class InoToCPPConverter(object):
|
||||
self.env.Execute(
|
||||
self.env.VerboseAction(
|
||||
'$CXX -o "{0}" -x c++ -fpreprocessed -dD -E "{1}"'.format(
|
||||
out_file, tmp_path), "Converting " + basename(
|
||||
out_file[:-4])))
|
||||
out_file, tmp_path),
|
||||
"Converting " + basename(out_file[:-4])))
|
||||
atexit.register(_delete_file, tmp_path)
|
||||
return isfile(out_file)
|
||||
|
||||
@@ -115,7 +116,7 @@ class InoToCPPConverter(object):
|
||||
elif stropen:
|
||||
newlines[len(newlines) - 1] += line[:-1]
|
||||
continue
|
||||
elif stropen and line.endswith('";'):
|
||||
elif stropen and line.endswith(('",', '";')):
|
||||
newlines[len(newlines) - 1] += line
|
||||
stropen = False
|
||||
newlines.append('#line %d "%s"' %
|
||||
@@ -139,8 +140,8 @@ class InoToCPPConverter(object):
|
||||
prototypes = []
|
||||
reserved_keywords = set(["if", "else", "while"])
|
||||
for match in self.PROTOTYPE_RE.finditer(contents):
|
||||
if (set([match.group(2).strip(), match.group(3).strip()]) &
|
||||
reserved_keywords):
|
||||
if (set([match.group(2).strip(),
|
||||
match.group(3).strip()]) & reserved_keywords):
|
||||
continue
|
||||
prototypes.append(match)
|
||||
return prototypes
|
||||
@@ -163,25 +164,25 @@ class InoToCPPConverter(object):
|
||||
|
||||
prototype_names = set([m.group(3).strip() for m in prototypes])
|
||||
split_pos = prototypes[0].start()
|
||||
match_ptrs = re.search(self.PROTOPTRS_TPLRE %
|
||||
("|".join(prototype_names)),
|
||||
contents[:split_pos], re.M)
|
||||
match_ptrs = re.search(
|
||||
self.PROTOPTRS_TPLRE % ("|".join(prototype_names)),
|
||||
contents[:split_pos], re.M)
|
||||
if match_ptrs:
|
||||
split_pos = contents.rfind("\n", 0, match_ptrs.start()) + 1
|
||||
|
||||
result = []
|
||||
result.append(contents[:split_pos].strip())
|
||||
result.append("%s;" % ";\n".join([m.group(1) for m in prototypes]))
|
||||
result.append('#line %d "%s"' %
|
||||
(self._get_total_lines(contents[:split_pos]),
|
||||
self._main_ino.replace("\\", "/")))
|
||||
result.append('#line %d "%s"' % (self._get_total_lines(
|
||||
contents[:split_pos]), self._main_ino.replace("\\", "/")))
|
||||
result.append(contents[split_pos:].strip())
|
||||
return "\n".join(result)
|
||||
|
||||
|
||||
def ConvertInoToCpp(env):
|
||||
ino_nodes = (env.Glob(join("$PROJECTSRC_DIR", "*.ino")) +
|
||||
env.Glob(join("$PROJECTSRC_DIR", "*.pde")))
|
||||
src_dir = util.glob_escape(env.subst("$PROJECTSRC_DIR"))
|
||||
ino_nodes = (
|
||||
env.Glob(join(src_dir, "*.ino")) + env.Glob(join(src_dir, "*.pde")))
|
||||
if not ino_nodes:
|
||||
return
|
||||
c = InoToCPPConverter(env)
|
||||
@@ -198,82 +199,8 @@ def _delete_file(path):
|
||||
pass
|
||||
|
||||
|
||||
def DumpIDEData(env):
|
||||
|
||||
def get_includes(env_):
|
||||
includes = []
|
||||
|
||||
for item in env_.get("CPPPATH", []):
|
||||
includes.append(env_.subst(item))
|
||||
|
||||
# installed libs
|
||||
for lb in env.GetLibBuilders():
|
||||
includes.extend(lb.get_inc_dirs())
|
||||
|
||||
# includes from toolchains
|
||||
p = env.PioPlatform()
|
||||
for name in p.get_installed_packages():
|
||||
if p.get_package_type(name) != "toolchain":
|
||||
continue
|
||||
toolchain_dir = p.get_package_dir(name)
|
||||
toolchain_incglobs = [
|
||||
join(toolchain_dir, "*", "include*"),
|
||||
join(toolchain_dir, "lib", "gcc", "*", "*", "include*")
|
||||
]
|
||||
for g in toolchain_incglobs:
|
||||
includes.extend(glob(g))
|
||||
|
||||
return includes
|
||||
|
||||
def get_defines(env_):
|
||||
defines = []
|
||||
# global symbols
|
||||
for item in processDefines(env_.get("CPPDEFINES", [])):
|
||||
defines.append(env_.subst(item).replace('\\', ''))
|
||||
|
||||
# special symbol for Atmel AVR MCU
|
||||
if env['PIOPLATFORM'] == "atmelavr":
|
||||
defines.append(
|
||||
"__AVR_%s__" % env.BoardConfig().get("build.mcu").upper()
|
||||
.replace("ATMEGA", "ATmega").replace("ATTINY", "ATtiny"))
|
||||
return defines
|
||||
|
||||
LINTCCOM = "$CFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS"
|
||||
LINTCXXCOM = "$CXXFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS"
|
||||
env_ = env.Clone()
|
||||
|
||||
data = {
|
||||
"libsource_dirs":
|
||||
[env_.subst(l) for l in env_.get("LIBSOURCE_DIRS", [])],
|
||||
"defines": get_defines(env_),
|
||||
"includes": get_includes(env_),
|
||||
"cc_flags": env_.subst(LINTCCOM),
|
||||
"cxx_flags": env_.subst(LINTCXXCOM),
|
||||
"cc_path": util.where_is_program(
|
||||
env_.subst("$CC"), env_.subst("${ENV['PATH']}")),
|
||||
"cxx_path": util.where_is_program(
|
||||
env_.subst("$CXX"), env_.subst("${ENV['PATH']}"))
|
||||
}
|
||||
|
||||
# https://github.com/platformio/platformio-atom-ide/issues/34
|
||||
_new_defines = []
|
||||
for item in processDefines(env_.get("CPPDEFINES", [])):
|
||||
item = item.replace('\\"', '"')
|
||||
if " " in item:
|
||||
_new_defines.append(item.replace(" ", "\\\\ "))
|
||||
else:
|
||||
_new_defines.append(item)
|
||||
env_.Replace(CPPDEFINES=_new_defines)
|
||||
|
||||
data.update({
|
||||
"cc_flags": env_.subst(LINTCCOM),
|
||||
"cxx_flags": env_.subst(LINTCXXCOM)
|
||||
})
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def GetCompilerType(env):
|
||||
@util.memoized()
|
||||
def _get_compiler_type(env):
|
||||
try:
|
||||
sysenv = environ.copy()
|
||||
sysenv['PATH'] = str(env['ENV']['PATH'])
|
||||
@@ -290,6 +217,10 @@ def GetCompilerType(env):
|
||||
return None
|
||||
|
||||
|
||||
def GetCompilerType(env):
|
||||
return _get_compiler_type(env)
|
||||
|
||||
|
||||
def GetActualLDScript(env):
|
||||
|
||||
def _lookup_in_ldpath(script):
|
||||
@@ -300,14 +231,25 @@ def GetActualLDScript(env):
|
||||
return None
|
||||
|
||||
script = None
|
||||
script_in_next = False
|
||||
for f in env.get("LINKFLAGS", []):
|
||||
if f.startswith("-Wl,-T"):
|
||||
script = env.subst(f[6:].replace('"', "").strip())
|
||||
if isfile(script):
|
||||
return script
|
||||
path = _lookup_in_ldpath(script)
|
||||
if path:
|
||||
return path
|
||||
raw_script = None
|
||||
if f == "-T":
|
||||
script_in_next = True
|
||||
continue
|
||||
elif script_in_next:
|
||||
script_in_next = False
|
||||
raw_script = f
|
||||
elif f.startswith("-Wl,-T"):
|
||||
raw_script = f[6:]
|
||||
else:
|
||||
continue
|
||||
script = env.subst(raw_script.replace('"', "").strip())
|
||||
if isfile(script):
|
||||
return script
|
||||
path = _lookup_in_ldpath(script)
|
||||
if path:
|
||||
return path
|
||||
|
||||
if script:
|
||||
sys.stderr.write(
|
||||
@@ -327,8 +269,7 @@ def GetActualLDScript(env):
|
||||
def VerboseAction(_, act, actstr):
|
||||
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
return act
|
||||
else:
|
||||
return Action(act, actstr)
|
||||
return Action(act, actstr)
|
||||
|
||||
|
||||
def PioClean(env, clean_dir):
|
||||
@@ -344,15 +285,53 @@ def PioClean(env, clean_dir):
|
||||
env.Exit(0)
|
||||
|
||||
|
||||
def ProcessDebug(env):
|
||||
if not env.subst("$PIODEBUGFLAGS"):
|
||||
env.Replace(PIODEBUGFLAGS=["-Og", "-g3", "-ggdb3"])
|
||||
env.Append(PIODEBUGFLAGS=["-D__PLATFORMIO_DEBUG__"])
|
||||
env.Append(
|
||||
BUILD_FLAGS=env.get("PIODEBUGFLAGS", []),
|
||||
BUILD_UNFLAGS=["-Os", "-O0", "-O1", "-O2", "-O3"])
|
||||
|
||||
|
||||
def ProcessTest(env):
|
||||
env.Append(
|
||||
CPPDEFINES=["UNIT_TEST", "UNITY_INCLUDE_CONFIG_H"],
|
||||
CPPPATH=[join("$BUILD_DIR", "UnityTestLib")])
|
||||
unitylib = env.BuildLibrary(
|
||||
join("$BUILD_DIR", "UnityTestLib"), get_core_package_dir("tool-unity"))
|
||||
env.Prepend(LIBS=[unitylib])
|
||||
|
||||
src_filter = ["+<*.cpp>", "+<*.c>"]
|
||||
if "PIOTEST" in env:
|
||||
src_filter.append("+<%s%s>" % (env['PIOTEST'], sep))
|
||||
env.Replace(PIOTEST_SRC_FILTER=src_filter)
|
||||
|
||||
|
||||
def GetExtraScripts(env, scope):
|
||||
items = []
|
||||
for item in env.get("EXTRA_SCRIPTS", []):
|
||||
if scope == "post" and ":" not in item:
|
||||
items.append(item)
|
||||
elif item.startswith("%s:" % scope):
|
||||
items.append(item[len(scope) + 1:])
|
||||
if not items:
|
||||
return items
|
||||
with util.cd(env.subst("$PROJECT_DIR")):
|
||||
return [realpath(item) for item in items]
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(ConvertInoToCpp)
|
||||
env.AddMethod(DumpIDEData)
|
||||
env.AddMethod(GetCompilerType)
|
||||
env.AddMethod(GetActualLDScript)
|
||||
env.AddMethod(VerboseAction)
|
||||
env.AddMethod(PioClean)
|
||||
env.AddMethod(ProcessDebug)
|
||||
env.AddMethod(ProcessTest)
|
||||
env.AddMethod(GetExtraScripts)
|
||||
return env
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -14,6 +14,7 @@
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import base64
|
||||
import sys
|
||||
from os.path import isdir, isfile, join
|
||||
|
||||
@@ -22,8 +23,10 @@ from SCons.Script import COMMAND_LINE_TARGETS
|
||||
from platformio import exception, util
|
||||
from platformio.managers.platform import PlatformFactory
|
||||
|
||||
# pylint: disable=too-many-branches
|
||||
|
||||
@util.memoized
|
||||
|
||||
@util.memoized()
|
||||
def initPioPlatform(name):
|
||||
return PlatformFactory.newPlatform(name)
|
||||
|
||||
@@ -41,8 +44,10 @@ def PioPlatform(env):
|
||||
def BoardConfig(env, board=None):
|
||||
p = initPioPlatform(env['PLATFORM_MANIFEST'])
|
||||
try:
|
||||
config = p.board_config(board if board else env['BOARD'])
|
||||
except exception.UnknownBoard as e:
|
||||
board = board or env.get("BOARD")
|
||||
assert board, "BoardConfig: Board is not defined"
|
||||
config = p.board_config(board)
|
||||
except (AssertionError, exception.UnknownBoard) as e:
|
||||
sys.stderr.write("Error: %s\n" % str(e))
|
||||
env.Exit(1)
|
||||
return config
|
||||
@@ -61,38 +66,111 @@ def LoadPioPlatform(env, variables):
|
||||
p = env.PioPlatform()
|
||||
installed_packages = p.get_installed_packages()
|
||||
|
||||
# Add toolchains and uploaders to $PATH
|
||||
# Ensure real platform name
|
||||
env['PIOPLATFORM'] = p.name
|
||||
|
||||
# Add toolchains and uploaders to $PATH and $*_LIBRARY_PATH
|
||||
systype = util.get_systype()
|
||||
for name in installed_packages:
|
||||
type_ = p.get_package_type(name)
|
||||
if type_ not in ("toolchain", "uploader"):
|
||||
if type_ not in ("toolchain", "uploader", "debugger"):
|
||||
continue
|
||||
path = p.get_package_dir(name)
|
||||
if isdir(join(path, "bin")):
|
||||
path = join(path, "bin")
|
||||
env.PrependENVPath("PATH", path)
|
||||
pkg_dir = p.get_package_dir(name)
|
||||
env.PrependENVPath(
|
||||
"PATH",
|
||||
join(pkg_dir, "bin") if isdir(join(pkg_dir, "bin")) else pkg_dir)
|
||||
if ("windows" not in systype and isdir(join(pkg_dir, "lib"))
|
||||
and type_ != "toolchain"):
|
||||
env.PrependENVPath(
|
||||
"DYLD_LIBRARY_PATH"
|
||||
if "darwin" in systype else "LD_LIBRARY_PATH",
|
||||
join(pkg_dir, "lib"))
|
||||
|
||||
# Platform specific LD Scripts
|
||||
if isdir(join(p.get_dir(), "ldscripts")):
|
||||
env.Prepend(LIBPATH=[join(p.get_dir(), "ldscripts")])
|
||||
|
||||
if "BOARD" not in env:
|
||||
# handle _MCU and _F_CPU variables for AVR native
|
||||
for key, value in variables.UnknownVariables().items():
|
||||
if not key.startswith("BOARD_"):
|
||||
continue
|
||||
env.Replace(
|
||||
**{key.upper().replace("BUILD.", ""): base64.b64decode(value)})
|
||||
return
|
||||
|
||||
# update board manifest with a custom data
|
||||
board_config = env.BoardConfig()
|
||||
for k in variables.keys():
|
||||
if (k in env or
|
||||
not any([k.startswith("BOARD_"), k.startswith("UPLOAD_")])):
|
||||
for key, value in variables.UnknownVariables().items():
|
||||
if not key.startswith("BOARD_"):
|
||||
continue
|
||||
_opt, _val = k.lower().split("_", 1)
|
||||
board_config.update(key.lower()[6:], base64.b64decode(value))
|
||||
|
||||
# update default environment variables
|
||||
for key in variables.keys():
|
||||
if key in env or \
|
||||
not any([key.startswith("BOARD_"), key.startswith("UPLOAD_")]):
|
||||
continue
|
||||
_opt, _val = key.lower().split("_", 1)
|
||||
if _opt == "board":
|
||||
_opt = "build"
|
||||
if _val in board_config.get(_opt):
|
||||
env.Replace(**{k: board_config.get("%s.%s" % (_opt, _val))})
|
||||
env.Replace(**{key: board_config.get("%s.%s" % (_opt, _val))})
|
||||
|
||||
if "build.ldscript" in board_config:
|
||||
env.Replace(LDSCRIPT_PATH=board_config.get("build.ldscript"))
|
||||
|
||||
|
||||
def PrintConfiguration(env):
|
||||
platform_data = ["PLATFORM: %s >" % env.PioPlatform().title]
|
||||
system_data = ["SYSTEM:"]
|
||||
mcu = env.subst("$BOARD_MCU")
|
||||
f_cpu = env.subst("$BOARD_F_CPU")
|
||||
if mcu:
|
||||
system_data.append(mcu.upper())
|
||||
if f_cpu:
|
||||
f_cpu = int("".join([c for c in str(f_cpu) if c.isdigit()]))
|
||||
system_data.append("%dMHz" % (f_cpu / 1000000))
|
||||
|
||||
debug_tools = None
|
||||
if "BOARD" in env:
|
||||
board_config = env.BoardConfig()
|
||||
platform_data.append(board_config.get("name"))
|
||||
|
||||
debug_tools = board_config.get("debug", {}).get("tools")
|
||||
ram = board_config.get("upload", {}).get("maximum_ram_size")
|
||||
flash = board_config.get("upload", {}).get("maximum_size")
|
||||
system_data.append("%s RAM (%s Flash)" % (util.format_filesize(ram),
|
||||
util.format_filesize(flash)))
|
||||
|
||||
if platform_data:
|
||||
print " ".join(platform_data)
|
||||
if system_data:
|
||||
print " ".join(system_data)
|
||||
|
||||
# Debugging
|
||||
if not debug_tools:
|
||||
return
|
||||
|
||||
data = [
|
||||
"CURRENT(%s)" % board_config.get_debug_tool_name(
|
||||
env.subst("$DEBUG_TOOL"))
|
||||
]
|
||||
onboard = []
|
||||
external = []
|
||||
for key, value in debug_tools.items():
|
||||
if value.get("onboard"):
|
||||
onboard.append(key)
|
||||
else:
|
||||
external.append(key)
|
||||
if onboard:
|
||||
data.append("ON-BOARD(%s)" % ", ".join(sorted(onboard)))
|
||||
if external:
|
||||
data.append("EXTERNAL(%s)" % ", ".join(sorted(external)))
|
||||
|
||||
print "DEBUG: %s" % " ".join(data)
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
||||
|
||||
@@ -102,4 +180,5 @@ def generate(env):
|
||||
env.AddMethod(BoardConfig)
|
||||
env.AddMethod(GetFrameworkScript)
|
||||
env.AddMethod(LoadPioPlatform)
|
||||
env.AddMethod(PrintConfiguration)
|
||||
return env
|
||||
|
||||
@@ -1,47 +0,0 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from os.path import join, sep
|
||||
|
||||
|
||||
def ProcessTest(env):
|
||||
env.Append(
|
||||
CPPDEFINES=["UNIT_TEST", "UNITY_INCLUDE_CONFIG_H"],
|
||||
CPPPATH=[join("$BUILD_DIR", "UnityTestLib")])
|
||||
unitylib = env.BuildLibrary(
|
||||
join("$BUILD_DIR", "UnityTestLib"),
|
||||
env.PioPlatform().get_package_dir("tool-unity"))
|
||||
env.Prepend(LIBS=[unitylib])
|
||||
|
||||
src_filter = None
|
||||
if "PIOTEST" in env:
|
||||
src_filter = "+<output_export.cpp>"
|
||||
src_filter += " +<%s%s>" % (env['PIOTEST'], sep)
|
||||
|
||||
return env.CollectBuildFiles(
|
||||
"$BUILDTEST_DIR",
|
||||
"$PROJECTTEST_DIR",
|
||||
src_filter=src_filter,
|
||||
duplicate=False)
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(ProcessTest)
|
||||
return env
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -14,19 +14,21 @@
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import re
|
||||
import sys
|
||||
from fnmatch import fnmatch
|
||||
from os import environ
|
||||
from os.path import isfile, join
|
||||
from platform import system
|
||||
from shutil import copyfile
|
||||
from time import sleep
|
||||
|
||||
from SCons.Node.Alias import Alias
|
||||
from serial import Serial
|
||||
from SCons.Script import ARGUMENTS
|
||||
from serial import Serial, SerialException
|
||||
|
||||
from platformio import util
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
|
||||
|
||||
def FlushSerialBuffer(env, port):
|
||||
s = Serial(env.subst(port))
|
||||
@@ -46,9 +48,9 @@ def TouchSerialPort(env, port, baudrate):
|
||||
s = Serial(port=port, baudrate=baudrate)
|
||||
s.setDTR(False)
|
||||
s.close()
|
||||
except: # pylint: disable=W0702
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
sleep(0.4)
|
||||
sleep(0.4) # DO NOT REMOVE THAT (required by SAM-BA based boards)
|
||||
|
||||
|
||||
def WaitForNewSerialPort(env, before):
|
||||
@@ -56,12 +58,12 @@ def WaitForNewSerialPort(env, before):
|
||||
prev_port = env.subst("$UPLOAD_PORT")
|
||||
new_port = None
|
||||
elapsed = 0
|
||||
sleep(1)
|
||||
before = [p['port'] for p in before]
|
||||
while elapsed < 5 and new_port is None:
|
||||
now = util.get_serialports()
|
||||
now = [p['port'] for p in util.get_serial_ports()]
|
||||
for p in now:
|
||||
if p not in before:
|
||||
new_port = p['port']
|
||||
new_port = p
|
||||
break
|
||||
before = now
|
||||
sleep(0.25)
|
||||
@@ -69,10 +71,16 @@ def WaitForNewSerialPort(env, before):
|
||||
|
||||
if not new_port:
|
||||
for p in now:
|
||||
if prev_port == p['port']:
|
||||
new_port = p['port']
|
||||
if prev_port == p:
|
||||
new_port = p
|
||||
break
|
||||
|
||||
try:
|
||||
s = Serial(new_port)
|
||||
s.close()
|
||||
except SerialException:
|
||||
sleep(1)
|
||||
|
||||
if not new_port:
|
||||
sys.stderr.write("Error: Couldn't find a board on the selected port. "
|
||||
"Check that you have the correct port selected. "
|
||||
@@ -83,7 +91,7 @@ def WaitForNewSerialPort(env, before):
|
||||
return new_port
|
||||
|
||||
|
||||
def AutodetectUploadPort(*args, **kwargs): # pylint: disable=unused-argument
|
||||
def AutodetectUploadPort(*args, **kwargs):
|
||||
env = args[0]
|
||||
|
||||
def _get_pattern():
|
||||
@@ -101,25 +109,36 @@ def AutodetectUploadPort(*args, **kwargs): # pylint: disable=unused-argument
|
||||
|
||||
def _look_for_mbed_disk():
|
||||
msdlabels = ("mbed", "nucleo", "frdm", "microbit")
|
||||
for item in util.get_logicaldisks():
|
||||
if not _is_match_pattern(item['disk']):
|
||||
for item in util.get_logical_devices():
|
||||
if item['path'].startswith("/net") or not _is_match_pattern(
|
||||
item['path']):
|
||||
continue
|
||||
if (item['name'] and
|
||||
any([l in item['name'].lower() for l in msdlabels])):
|
||||
return item['disk']
|
||||
if isfile(join(item['disk'], "mbed.html")):
|
||||
return item['disk']
|
||||
mbed_pages = [
|
||||
join(item['path'], n) for n in ("mbed.htm", "mbed.html")
|
||||
]
|
||||
if any(isfile(p) for p in mbed_pages):
|
||||
return item['path']
|
||||
if item['name'] \
|
||||
and any(l in item['name'].lower() for l in msdlabels):
|
||||
return item['path']
|
||||
return None
|
||||
|
||||
def _look_for_serial_port():
|
||||
port = None
|
||||
board_hwids = []
|
||||
upload_protocol = env.subst("$UPLOAD_PROTOCOL")
|
||||
if "BOARD" in env and "build.hwids" in env.BoardConfig():
|
||||
board_hwids = env.BoardConfig().get("build.hwids")
|
||||
for item in util.get_serialports(filter_hwid=True):
|
||||
for item in util.get_serial_ports(filter_hwid=True):
|
||||
if not _is_match_pattern(item['port']):
|
||||
continue
|
||||
port = item['port']
|
||||
if upload_protocol.startswith("blackmagic"):
|
||||
if "windows" in util.get_systype() and \
|
||||
port.startswith("COM") and len(port) > 4:
|
||||
port = "\\\\.\\%s" % port
|
||||
if "GDB" in item['description']:
|
||||
return port
|
||||
for hwid in board_hwids:
|
||||
hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "")
|
||||
if hwid_str in item['hwid']:
|
||||
@@ -130,18 +149,20 @@ def AutodetectUploadPort(*args, **kwargs): # pylint: disable=unused-argument
|
||||
print env.subst("Use manually specified: $UPLOAD_PORT")
|
||||
return
|
||||
|
||||
if "mbed" in env.subst("$PIOFRAMEWORK"):
|
||||
if (env.subst("$UPLOAD_PROTOCOL") == "mbed"
|
||||
or ("mbed" in env.subst("$PIOFRAMEWORK")
|
||||
and not env.subst("$UPLOAD_PROTOCOL"))):
|
||||
env.Replace(UPLOAD_PORT=_look_for_mbed_disk())
|
||||
else:
|
||||
if (system() == "Linux" and not any([
|
||||
if ("linux" in util.get_systype() and not any([
|
||||
isfile("/etc/udev/rules.d/99-platformio-udev.rules"),
|
||||
isfile("/lib/udev/rules.d/99-platformio-udev.rules")
|
||||
])):
|
||||
sys.stderr.write(
|
||||
"\nWarning! Please install `99-platformio-udev.rules` and "
|
||||
"check that your board's PID and VID are listed in the rules."
|
||||
"\n https://raw.githubusercontent.com/platformio/platformio"
|
||||
"/develop/scripts/99-platformio-udev.rules\n")
|
||||
"\n http://docs.platformio.org/en/latest/faq.html"
|
||||
"#platformio-udev-rules\n")
|
||||
env.Replace(UPLOAD_PORT=_look_for_serial_port())
|
||||
|
||||
if env.subst("$UPLOAD_PORT"):
|
||||
@@ -155,7 +176,7 @@ def AutodetectUploadPort(*args, **kwargs): # pylint: disable=unused-argument
|
||||
env.Exit(1)
|
||||
|
||||
|
||||
def UploadToDisk(_, target, source, env): # pylint: disable=W0613,W0621
|
||||
def UploadToDisk(_, target, source, env):
|
||||
assert "UPLOAD_PORT" in env
|
||||
progname = env.subst("$PROGNAME")
|
||||
for ext in ("bin", "hex"):
|
||||
@@ -168,35 +189,102 @@ def UploadToDisk(_, target, source, env): # pylint: disable=W0613,W0621
|
||||
"(Some boards may require manual hard reset)"
|
||||
|
||||
|
||||
def CheckUploadSize(_, target, source, env): # pylint: disable=W0613,W0621
|
||||
if "BOARD" not in env:
|
||||
return
|
||||
max_size = int(env.BoardConfig().get("upload.maximum_size", 0))
|
||||
if max_size == 0 or "SIZETOOL" not in env:
|
||||
return
|
||||
|
||||
sysenv = environ.copy()
|
||||
sysenv['PATH'] = str(env['ENV']['PATH'])
|
||||
cmd = [
|
||||
env.subst("$SIZETOOL"), "-B",
|
||||
str(source[0] if isinstance(target[0], Alias) else target[0])
|
||||
def CheckUploadSize(_, target, source, env):
|
||||
check_conditions = [
|
||||
env.get("BOARD"),
|
||||
env.get("SIZETOOL") or env.get("SIZECHECKCMD")
|
||||
]
|
||||
result = util.exec_command(cmd, env=sysenv)
|
||||
if result['returncode'] != 0:
|
||||
if not all(check_conditions):
|
||||
return
|
||||
program_max_size = int(env.BoardConfig().get("upload.maximum_size", 0))
|
||||
data_max_size = int(env.BoardConfig().get("upload.maximum_ram_size", 0))
|
||||
if program_max_size == 0:
|
||||
return
|
||||
print result['out'].strip()
|
||||
|
||||
line = result['out'].strip().splitlines()[1]
|
||||
values = [v.strip() for v in line.split("\t")]
|
||||
used_size = int(values[0]) + int(values[1])
|
||||
def _configure_defaults():
|
||||
env.Replace(
|
||||
SIZECHECKCMD="$SIZETOOL -B -d $SOURCES",
|
||||
SIZEPROGREGEXP=r"^(\d+)\s+(\d+)\s+\d+\s",
|
||||
SIZEDATAREGEXP=r"^\d+\s+(\d+)\s+(\d+)\s+\d+")
|
||||
|
||||
if used_size > max_size:
|
||||
def _get_size_output():
|
||||
cmd = env.get("SIZECHECKCMD")
|
||||
if not cmd:
|
||||
return None
|
||||
if not isinstance(cmd, list):
|
||||
cmd = cmd.split()
|
||||
cmd = [arg.replace("$SOURCES", str(source[0])) for arg in cmd if arg]
|
||||
sysenv = environ.copy()
|
||||
sysenv['PATH'] = str(env['ENV']['PATH'])
|
||||
result = util.exec_command(env.subst(cmd), env=sysenv)
|
||||
if result['returncode'] != 0:
|
||||
return None
|
||||
return result['out'].strip()
|
||||
|
||||
def _calculate_size(output, pattern):
|
||||
if not output or not pattern:
|
||||
return -1
|
||||
size = 0
|
||||
regexp = re.compile(pattern)
|
||||
for line in output.split("\n"):
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
match = regexp.search(line)
|
||||
if not match:
|
||||
continue
|
||||
size += sum(int(value) for value in match.groups())
|
||||
return size
|
||||
|
||||
def _format_availale_bytes(value, total):
|
||||
percent_raw = float(value) / float(total)
|
||||
blocks_per_progress = 10
|
||||
used_blocks = int(round(blocks_per_progress * percent_raw))
|
||||
if used_blocks > blocks_per_progress:
|
||||
used_blocks = blocks_per_progress
|
||||
return "[{:{}}] {: 6.1%} (used {:d} bytes from {:d} bytes)".format(
|
||||
"=" * used_blocks, blocks_per_progress, percent_raw, value, total)
|
||||
|
||||
if not env.get("SIZECHECKCMD") and not env.get("SIZEPROGREGEXP"):
|
||||
_configure_defaults()
|
||||
output = _get_size_output()
|
||||
program_size = _calculate_size(output, env.get("SIZEPROGREGEXP"))
|
||||
data_size = _calculate_size(output, env.get("SIZEDATAREGEXP"))
|
||||
|
||||
print "Memory Usage -> http://bit.ly/pio-memory-usage"
|
||||
if data_max_size and data_size > -1:
|
||||
print "DATA: %s" % _format_availale_bytes(data_size, data_max_size)
|
||||
if program_size > -1:
|
||||
print "PROGRAM: %s" % _format_availale_bytes(program_size,
|
||||
program_max_size)
|
||||
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
print output
|
||||
|
||||
# raise error
|
||||
# if data_max_size and data_size > data_max_size:
|
||||
# sys.stderr.write(
|
||||
# "Error: The data size (%d bytes) is greater "
|
||||
# "than maximum allowed (%s bytes)\n" % (data_size, data_max_size))
|
||||
# env.Exit(1)
|
||||
if program_size > program_max_size:
|
||||
sys.stderr.write("Error: The program size (%d bytes) is greater "
|
||||
"than maximum allowed (%s bytes)\n" %
|
||||
(used_size, max_size))
|
||||
(program_size, program_max_size))
|
||||
env.Exit(1)
|
||||
|
||||
|
||||
def PrintUploadInfo(env):
|
||||
configured = env.subst("$UPLOAD_PROTOCOL")
|
||||
available = [configured] if configured else []
|
||||
if "BOARD" in env:
|
||||
available.extend(env.BoardConfig().get("upload", {}).get(
|
||||
"protocols", []))
|
||||
if available:
|
||||
print "AVAILABLE: %s" % ", ".join(sorted(set(available)))
|
||||
if configured:
|
||||
print "CURRENT: upload_protocol = %s" % configured
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
||||
|
||||
@@ -208,4 +296,5 @@ def generate(env):
|
||||
env.AddMethod(AutodetectUploadPort)
|
||||
env.AddMethod(UploadToDisk)
|
||||
env.AddMethod(CheckUploadSize)
|
||||
env.AddMethod(PrintUploadInfo)
|
||||
return env
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -72,7 +72,7 @@ def exists(_):
|
||||
|
||||
def generate(env):
|
||||
if system() != "Windows":
|
||||
return
|
||||
return None
|
||||
|
||||
env.Replace(_long_sources_hook=long_sources_hook)
|
||||
env.Replace(_long_incflags_hook=long_incflags_hook)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -20,16 +20,62 @@ from glob import glob
|
||||
from os import sep, walk
|
||||
from os.path import basename, dirname, isdir, join, realpath
|
||||
|
||||
from SCons.Action import Action
|
||||
from SCons import Builder, Util
|
||||
from SCons.Script import (COMMAND_LINE_TARGETS, AlwaysBuild,
|
||||
DefaultEnvironment, SConscript)
|
||||
from SCons.Util import case_sensitive_suffixes, is_Sequence
|
||||
DefaultEnvironment, Export, SConscript)
|
||||
|
||||
from platformio.util import pioversion_to_intstr
|
||||
from platformio.util import glob_escape, pioversion_to_intstr
|
||||
|
||||
SRC_BUILD_EXT = ["c", "cpp", "S", "spp", "SPP", "sx", "s", "asm", "ASM"]
|
||||
SRC_HEADER_EXT = ["h", "hpp"]
|
||||
SRC_C_EXT = ["c", "cc", "cpp"]
|
||||
SRC_BUILD_EXT = SRC_C_EXT + ["S", "spp", "SPP", "sx", "s", "asm", "ASM"]
|
||||
SRC_FILTER_DEFAULT = ["+<*>", "-<.git%s>" % sep, "-<svn%s>" % sep]
|
||||
SRC_FILTER_PATTERNS_RE = re.compile(r"(\+|\-)<([^>]+)>")
|
||||
|
||||
|
||||
def scons_patched_match_splitext(path, suffixes=None):
|
||||
"""Patch SCons Builder, append $OBJSUFFIX to the end of each target"""
|
||||
tokens = Util.splitext(path)
|
||||
if suffixes and tokens[1] and tokens[1] in suffixes:
|
||||
return (path, tokens[1])
|
||||
return tokens
|
||||
|
||||
|
||||
def _build_project_deps(env):
|
||||
project_lib_builder = env.ConfigureProjectLibBuilder()
|
||||
|
||||
# append project libs to the beginning of list
|
||||
env.Prepend(LIBS=project_lib_builder.build())
|
||||
# append extra linker related options from libs
|
||||
env.AppendUnique(
|
||||
**{
|
||||
key: project_lib_builder.env.get(key)
|
||||
for key in ("LIBS", "LIBPATH", "LINKFLAGS")
|
||||
if project_lib_builder.env.get(key)
|
||||
})
|
||||
|
||||
projenv = env.Clone()
|
||||
|
||||
# CPPPATH from dependencies
|
||||
projenv.PrependUnique(CPPPATH=project_lib_builder.env.get("CPPPATH"))
|
||||
# extra build flags from `platformio.ini`
|
||||
projenv.ProcessFlags(env.get("SRC_BUILD_FLAGS"))
|
||||
|
||||
is_test = "__test" in COMMAND_LINE_TARGETS
|
||||
if is_test:
|
||||
projenv.BuildSources("$BUILDTEST_DIR", "$PROJECTTEST_DIR",
|
||||
"$PIOTEST_SRC_FILTER")
|
||||
if not is_test or env.get("TEST_BUILD_PROJECT_SRC") == "true":
|
||||
projenv.BuildSources("$BUILDSRC_DIR", "$PROJECTSRC_DIR",
|
||||
env.get("SRC_FILTER"))
|
||||
|
||||
if not env.get("PIOBUILDFILES") and not COMMAND_LINE_TARGETS:
|
||||
sys.stderr.write(
|
||||
"Error: Nothing to build. Please put your source code files "
|
||||
"to '%s' folder\n" % env.subst("$PROJECTSRC_DIR"))
|
||||
env.Exit(1)
|
||||
|
||||
Export("projenv")
|
||||
|
||||
|
||||
def BuildProgram(env):
|
||||
@@ -41,101 +87,101 @@ def BuildProgram(env):
|
||||
|
||||
_append_pio_macros()
|
||||
|
||||
# fix ASM handling under non-casitive OS
|
||||
if not case_sensitive_suffixes(".s", ".S"):
|
||||
env.PrintConfiguration()
|
||||
|
||||
# fix ASM handling under non case-sensitive OS
|
||||
if not Util.case_sensitive_suffixes(".s", ".S"):
|
||||
env.Replace(AS="$CC", ASCOM="$ASPPCOM")
|
||||
|
||||
if "__debug" in COMMAND_LINE_TARGETS:
|
||||
env.ProcessDebug()
|
||||
|
||||
# process extra flags from board
|
||||
if "BOARD" in env and "build.extra_flags" in env.BoardConfig():
|
||||
env.ProcessFlags(env.BoardConfig().get("build.extra_flags"))
|
||||
# remove base flags
|
||||
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
|
||||
|
||||
# apply user flags
|
||||
env.ProcessFlags(env.get("BUILD_FLAGS"))
|
||||
|
||||
# process framework scripts
|
||||
env.BuildFrameworks(env.get("PIOFRAMEWORK"))
|
||||
|
||||
# restore PIO macros if it was deleted by framework
|
||||
_append_pio_macros()
|
||||
|
||||
# build dependent libs
|
||||
deplibs = env.BuildDependentLibraries("$PROJECTSRC_DIR")
|
||||
# remove specified flags
|
||||
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
|
||||
|
||||
# append specified LD_SCRIPT
|
||||
if ("LDSCRIPT_PATH" in env and
|
||||
not any(["-Wl,-T" in f for f in env['LINKFLAGS']])):
|
||||
env.Append(LINKFLAGS=['-Wl,-T"$LDSCRIPT_PATH"'])
|
||||
if "__test" in COMMAND_LINE_TARGETS:
|
||||
env.ProcessTest()
|
||||
|
||||
# build project with dependencies
|
||||
_build_project_deps(env)
|
||||
|
||||
# append into the beginning a main LD script
|
||||
if (env.get("LDSCRIPT_PATH")
|
||||
and not any("-Wl,-T" in f for f in env['LINKFLAGS'])):
|
||||
env.Prepend(LINKFLAGS=["-T", "$LDSCRIPT_PATH"])
|
||||
|
||||
# enable "cyclic reference" for linker
|
||||
if env.get("LIBS", deplibs) and env.GetCompilerType() == "gcc":
|
||||
if env.get("LIBS") and env.GetCompilerType() == "gcc":
|
||||
env.Prepend(_LIBFLAGS="-Wl,--start-group ")
|
||||
env.Append(_LIBFLAGS=" -Wl,--end-group")
|
||||
|
||||
# Handle SRC_BUILD_FLAGS
|
||||
env.ProcessFlags(env.get("SRC_BUILD_FLAGS"))
|
||||
|
||||
env.Append(
|
||||
CPPPATH=["$PROJECTSRC_DIR"],
|
||||
LIBS=deplibs,
|
||||
LIBPATH=["$BUILD_DIR"],
|
||||
PIOBUILDFILES=env.CollectBuildFiles(
|
||||
"$BUILDSRC_DIR",
|
||||
"$PROJECTSRC_DIR",
|
||||
src_filter=env.get("SRC_FILTER"),
|
||||
duplicate=False))
|
||||
|
||||
if "__test" in COMMAND_LINE_TARGETS:
|
||||
env.Append(PIOBUILDFILES=env.ProcessTest())
|
||||
|
||||
if not env['PIOBUILDFILES'] and not COMMAND_LINE_TARGETS:
|
||||
sys.stderr.write(
|
||||
"Error: Nothing to build. Please put your source code files "
|
||||
"to '%s' folder\n" % env.subst("$PROJECTSRC_DIR"))
|
||||
env.Exit(1)
|
||||
|
||||
program = env.Program(
|
||||
join("$BUILD_DIR", env.subst("$PROGNAME")), env['PIOBUILDFILES'])
|
||||
env.Replace(PIOMAINPROG=program)
|
||||
|
||||
checksize_action = Action(env.CheckUploadSize, "Checking program size")
|
||||
AlwaysBuild(env.Alias("checkprogsize", program, checksize_action))
|
||||
if set(["upload", "program"]) & set(COMMAND_LINE_TARGETS):
|
||||
env.AddPostAction(program, checksize_action)
|
||||
AlwaysBuild(
|
||||
env.Alias(
|
||||
"checkprogsize", program,
|
||||
env.VerboseAction(env.CheckUploadSize,
|
||||
"Checking size $PIOMAINPROG")))
|
||||
|
||||
return program
|
||||
|
||||
|
||||
def ParseFlagsExtended(env, flags):
|
||||
if isinstance(flags, list):
|
||||
flags = " ".join(flags)
|
||||
result = env.ParseFlags(str(flags))
|
||||
|
||||
cppdefines = []
|
||||
for item in result['CPPDEFINES']:
|
||||
if not Util.is_Sequence(item):
|
||||
cppdefines.append(item)
|
||||
continue
|
||||
name, value = item[:2]
|
||||
if '\"' in value:
|
||||
value = value.replace('\"', '\\\"')
|
||||
elif value.isdigit():
|
||||
value = int(value)
|
||||
elif value.replace(".", "", 1).isdigit():
|
||||
value = float(value)
|
||||
cppdefines.append((name, value))
|
||||
result['CPPDEFINES'] = cppdefines
|
||||
|
||||
# fix relative CPPPATH & LIBPATH
|
||||
for k in ("CPPPATH", "LIBPATH"):
|
||||
for i, p in enumerate(result.get(k, [])):
|
||||
if isdir(p):
|
||||
result[k][i] = realpath(p)
|
||||
|
||||
# fix relative path for "-include"
|
||||
for i, f in enumerate(result.get("CCFLAGS", [])):
|
||||
if isinstance(f, tuple) and f[0] == "-include":
|
||||
result['CCFLAGS'][i] = (f[0], env.File(realpath(f[1].get_path())))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def ProcessFlags(env, flags): # pylint: disable=too-many-branches
|
||||
if not flags:
|
||||
return
|
||||
if isinstance(flags, list):
|
||||
flags = " ".join(flags)
|
||||
parsed_flags = env.ParseFlags(str(flags))
|
||||
for flag in parsed_flags.pop("CPPDEFINES"):
|
||||
if not is_Sequence(flag):
|
||||
env.Append(CPPDEFINES=flag)
|
||||
continue
|
||||
_key, _value = flag[:2]
|
||||
if '\"' in _value:
|
||||
_value = _value.replace('\"', '\\\"')
|
||||
elif _value.isdigit():
|
||||
_value = int(_value)
|
||||
elif _value.replace(".", "", 1).isdigit():
|
||||
_value = float(_value)
|
||||
env.Append(CPPDEFINES=(_key, _value))
|
||||
env.Append(**parsed_flags)
|
||||
|
||||
# fix relative CPPPATH & LIBPATH
|
||||
for k in ("CPPPATH", "LIBPATH"):
|
||||
for i, p in enumerate(env.get(k, [])):
|
||||
if isdir(p):
|
||||
env[k][i] = realpath(p)
|
||||
# fix relative path for "-include"
|
||||
for i, f in enumerate(env.get("CCFLAGS", [])):
|
||||
if isinstance(f, tuple) and f[0] == "-include":
|
||||
env['CCFLAGS'][i] = (f[0], env.File(realpath(f[1].get_path())))
|
||||
env.Append(**env.ParseFlagsExtended(flags))
|
||||
|
||||
# Cancel any previous definition of name, either built in or
|
||||
# provided with a -D option // Issue #191
|
||||
# provided with a -U option // Issue #191
|
||||
undefines = [
|
||||
u for u in env.get("CCFLAGS", [])
|
||||
if isinstance(u, basestring) and u.startswith("-U")
|
||||
@@ -149,19 +195,27 @@ def ProcessFlags(env, flags): # pylint: disable=too-many-branches
|
||||
def ProcessUnFlags(env, flags):
|
||||
if not flags:
|
||||
return
|
||||
if isinstance(flags, list):
|
||||
flags = " ".join(flags)
|
||||
parsed_flags = env.ParseFlags(str(flags))
|
||||
all_flags = []
|
||||
for items in parsed_flags.values():
|
||||
all_flags.extend(items)
|
||||
all_flags = set(all_flags)
|
||||
parsed = env.ParseFlagsExtended(flags)
|
||||
|
||||
for key in parsed_flags:
|
||||
cur_flags = set(env.Flatten(env.get(key, [])))
|
||||
for item in cur_flags & all_flags:
|
||||
while item in env[key]:
|
||||
env[key].remove(item)
|
||||
# get all flags and copy them to each "*FLAGS" variable
|
||||
all_flags = []
|
||||
for key, unflags in parsed.items():
|
||||
if key.endswith("FLAGS"):
|
||||
all_flags.extend(unflags)
|
||||
for key, unflags in parsed.items():
|
||||
if key.endswith("FLAGS"):
|
||||
parsed[key].extend(all_flags)
|
||||
|
||||
for key, unflags in parsed.items():
|
||||
for unflag in unflags:
|
||||
for current in env.get(key, []):
|
||||
conditions = [
|
||||
unflag == current,
|
||||
isinstance(current, (tuple, list))
|
||||
and unflag[0] == current[0]
|
||||
]
|
||||
if any(conditions):
|
||||
env[key].remove(current)
|
||||
|
||||
|
||||
def IsFileWithExt(env, file_, ext): # pylint: disable=W0613
|
||||
@@ -175,15 +229,14 @@ def IsFileWithExt(env, file_, ext): # pylint: disable=W0613
|
||||
|
||||
def MatchSourceFiles(env, src_dir, src_filter=None):
|
||||
|
||||
SRC_FILTER_PATTERNS_RE = re.compile(r"(\+|\-)<([^>]+)>")
|
||||
|
||||
def _append_build_item(items, item, src_dir):
|
||||
if env.IsFileWithExt(item, SRC_BUILD_EXT + SRC_HEADER_EXT):
|
||||
items.add(item.replace(src_dir + sep, ""))
|
||||
|
||||
src_dir = env.subst(src_dir)
|
||||
src_filter = env.subst(src_filter) if src_filter else None
|
||||
src_filter = src_filter or SRC_FILTER_DEFAULT
|
||||
if isinstance(src_filter, list) or isinstance(src_filter, tuple):
|
||||
if isinstance(src_filter, (list, tuple)):
|
||||
src_filter = " ".join(src_filter)
|
||||
|
||||
matches = set()
|
||||
@@ -191,7 +244,7 @@ def MatchSourceFiles(env, src_dir, src_filter=None):
|
||||
src_filter = src_filter.replace("/", sep).replace("\\", sep)
|
||||
for (action, pattern) in SRC_FILTER_PATTERNS_RE.findall(src_filter):
|
||||
items = set()
|
||||
for item in glob(join(src_dir, pattern)):
|
||||
for item in glob(join(glob_escape(src_dir), pattern)):
|
||||
if isdir(item):
|
||||
for root, _, files in walk(item, followlinks=True):
|
||||
for f in files:
|
||||
@@ -252,10 +305,13 @@ def BuildFrameworks(env, frameworks):
|
||||
|
||||
for f in frameworks:
|
||||
if f in ("arduino", "energia"):
|
||||
env.ConvertInoToCpp()
|
||||
# Arduino IDE appends .o the end of filename
|
||||
Builder.match_splitext = scons_patched_match_splitext
|
||||
if "nobuild" not in COMMAND_LINE_TARGETS:
|
||||
env.ConvertInoToCpp()
|
||||
|
||||
if f in board_frameworks:
|
||||
SConscript(env.GetFrameworkScript(f))
|
||||
SConscript(env.GetFrameworkScript(f), exports="env")
|
||||
else:
|
||||
sys.stderr.write(
|
||||
"Error: This board doesn't support %s framework!\n" % f)
|
||||
@@ -263,16 +319,15 @@ def BuildFrameworks(env, frameworks):
|
||||
|
||||
|
||||
def BuildLibrary(env, variant_dir, src_dir, src_filter=None):
|
||||
lib = env.Clone()
|
||||
return lib.StaticLibrary(
|
||||
lib.subst(variant_dir),
|
||||
lib.CollectBuildFiles(
|
||||
variant_dir, src_dir, src_filter=src_filter))
|
||||
return env.StaticLibrary(
|
||||
env.subst(variant_dir),
|
||||
env.CollectBuildFiles(variant_dir, src_dir, src_filter))
|
||||
|
||||
|
||||
def BuildSources(env, variant_dir, src_dir, src_filter=None):
|
||||
DefaultEnvironment().Append(PIOBUILDFILES=env.Clone().CollectBuildFiles(
|
||||
variant_dir, src_dir, src_filter=src_filter))
|
||||
nodes = env.CollectBuildFiles(variant_dir, src_dir, src_filter)
|
||||
DefaultEnvironment().Append(
|
||||
PIOBUILDFILES=[env.Object(node) for node in nodes])
|
||||
|
||||
|
||||
def exists(_):
|
||||
@@ -281,6 +336,7 @@ def exists(_):
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(BuildProgram)
|
||||
env.AddMethod(ParseFlagsExtended)
|
||||
env.AddMethod(ProcessFlags)
|
||||
env.AddMethod(ProcessUnFlags)
|
||||
env.AddMethod(IsFileWithExt)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -18,7 +18,7 @@ import sys
|
||||
|
||||
import click
|
||||
|
||||
from platformio.pioplus import pioplus_call
|
||||
from platformio.managers.core import pioplus_call
|
||||
|
||||
|
||||
@click.group("account", short_help="Manage PIO Account")
|
||||
@@ -45,12 +45,16 @@ def account_logout():
|
||||
|
||||
|
||||
@cli.command("password", short_help="Change password")
|
||||
def account_password():
|
||||
@click.option("--old-password")
|
||||
@click.option("--new-password")
|
||||
def account_password(**kwargs):
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@cli.command("token", short_help="Get or regenerate Authentication Token")
|
||||
@click.option("-p", "--password")
|
||||
@click.option("--regenerate", is_flag=True)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def account_token(**kwargs):
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
@@ -61,7 +65,8 @@ def account_forgot(**kwargs):
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@cli.command("show", short_help="PIO Account information: groups, permissions")
|
||||
@cli.command("show", short_help="PIO Account information")
|
||||
@click.option("--offline", is_flag=True)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def account_show(**kwargs):
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -16,100 +16,69 @@ import json
|
||||
|
||||
import click
|
||||
|
||||
from platformio.exception import APIRequestError, InternetIsOffline
|
||||
from platformio import util
|
||||
from platformio.managers.platform import PlatformManager
|
||||
|
||||
|
||||
@click.command("boards", short_help="Pre-configured Embedded Boards")
|
||||
@click.command("boards", short_help="Embedded Board Explorer")
|
||||
@click.argument("query", required=False)
|
||||
@click.option("--installed", is_flag=True)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def cli(query, installed, json_output): # pylint: disable=R0912
|
||||
if json_output:
|
||||
return _ouput_boards_json(query, installed)
|
||||
|
||||
BOARDLIST_TPL = ("{type:<30} {mcu:<14} {frequency:<8} "
|
||||
" {flash:<7} {ram:<6} {name}")
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
return _print_boards_json(query, installed)
|
||||
|
||||
grpboards = {}
|
||||
for board in _get_boards(installed):
|
||||
if query and query.lower() not in json.dumps(board).lower():
|
||||
continue
|
||||
if board['platform'] not in grpboards:
|
||||
grpboards[board['platform']] = []
|
||||
grpboards[board['platform']].append(board)
|
||||
|
||||
for (platform, pboards) in sorted(grpboards.items()):
|
||||
if query:
|
||||
search_data = json.dumps(pboards).lower()
|
||||
if query.lower() not in search_data.lower():
|
||||
continue
|
||||
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
for (platform, boards) in sorted(grpboards.items()):
|
||||
click.echo("")
|
||||
click.echo("Platform: ", nl=False)
|
||||
click.secho(platform, bold=True)
|
||||
click.echo("-" * terminal_width)
|
||||
print_boards(boards)
|
||||
return True
|
||||
|
||||
|
||||
def print_boards(boards):
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
BOARDLIST_TPL = ("{type:<30} {mcu:<14} {frequency:<8} "
|
||||
" {flash:<7} {ram:<6} {name}")
|
||||
click.echo(
|
||||
BOARDLIST_TPL.format(
|
||||
type=click.style("ID", fg="cyan"),
|
||||
mcu="MCU",
|
||||
frequency="Frequency",
|
||||
flash="Flash",
|
||||
ram="RAM",
|
||||
name="Name"))
|
||||
click.echo("-" * terminal_width)
|
||||
|
||||
for board in boards:
|
||||
click.echo(
|
||||
BOARDLIST_TPL.format(
|
||||
type=click.style(
|
||||
"ID", fg="cyan"),
|
||||
mcu="MCU",
|
||||
frequency="Frequency",
|
||||
flash="Flash",
|
||||
ram="RAM",
|
||||
name="Name"))
|
||||
click.echo("-" * terminal_width)
|
||||
|
||||
for board in sorted(pboards, key=lambda b: b['id']):
|
||||
if query:
|
||||
search_data = "%s %s" % (board['id'],
|
||||
json.dumps(board).lower())
|
||||
if query.lower() not in search_data.lower():
|
||||
continue
|
||||
|
||||
flash_size = "%dkB" % (board['rom'] / 1024)
|
||||
|
||||
ram_size = board['ram']
|
||||
if ram_size >= 1024:
|
||||
if ram_size % 1024:
|
||||
ram_size = "%.1fkB" % (ram_size / 1024.0)
|
||||
else:
|
||||
ram_size = "%dkB" % (ram_size / 1024)
|
||||
else:
|
||||
ram_size = "%dB" % ram_size
|
||||
|
||||
click.echo(
|
||||
BOARDLIST_TPL.format(
|
||||
type=click.style(
|
||||
board['id'], fg="cyan"),
|
||||
mcu=board['mcu'],
|
||||
frequency="%dMhz" % (board['fcpu'] / 1000000),
|
||||
flash=flash_size,
|
||||
ram=ram_size,
|
||||
name=board['name']))
|
||||
type=click.style(board['id'], fg="cyan"),
|
||||
mcu=board['mcu'],
|
||||
frequency="%dMHz" % (board['fcpu'] / 1000000),
|
||||
flash=util.format_filesize(board['rom']),
|
||||
ram=util.format_filesize(board['ram']),
|
||||
name=board['name']))
|
||||
|
||||
|
||||
def _get_boards(installed=False):
|
||||
boards = PlatformManager().get_installed_boards()
|
||||
if not installed:
|
||||
know_boards = ["%s:%s" % (b['platform'], b['id']) for b in boards]
|
||||
try:
|
||||
for board in PlatformManager().get_registered_boards():
|
||||
key = "%s:%s" % (board['platform'], board['id'])
|
||||
if key not in know_boards:
|
||||
boards.append(board)
|
||||
except InternetIsOffline:
|
||||
pass
|
||||
return boards
|
||||
pm = PlatformManager()
|
||||
return pm.get_installed_boards() if installed else pm.get_all_boards()
|
||||
|
||||
|
||||
def _ouput_boards_json(query, installed=False):
|
||||
def _print_boards_json(query, installed=False):
|
||||
result = []
|
||||
try:
|
||||
boards = _get_boards(installed)
|
||||
except APIRequestError:
|
||||
if not installed:
|
||||
boards = _get_boards(True)
|
||||
for board in boards:
|
||||
for board in _get_boards(installed):
|
||||
if query:
|
||||
search_data = "%s %s" % (board['id'], json.dumps(board).lower())
|
||||
if query.lower() not in search_data.lower():
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -152,7 +152,7 @@ def _copy_contents(dst_dir, contents):
|
||||
def _exclude_contents(dst_dir, patterns):
|
||||
contents = []
|
||||
for p in patterns:
|
||||
contents += glob(join(dst_dir, p))
|
||||
contents += glob(join(util.glob_escape(dst_dir), p))
|
||||
for path in contents:
|
||||
path = abspath(path)
|
||||
if isdir(path):
|
||||
|
||||
42
platformio/commands/debug.py
Normal file
42
platformio/commands/debug.py
Normal file
@@ -0,0 +1,42 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
from os import getcwd
|
||||
|
||||
import click
|
||||
|
||||
from platformio.managers.core import pioplus_call
|
||||
|
||||
|
||||
@click.command(
|
||||
"debug",
|
||||
context_settings=dict(ignore_unknown_options=True),
|
||||
short_help="PIO Unified Debugger")
|
||||
@click.option(
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=getcwd,
|
||||
type=click.Path(
|
||||
exists=True,
|
||||
file_okay=False,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
resolve_path=True))
|
||||
@click.option("--environment", "-e", metavar="<environment>")
|
||||
@click.option("--verbose", "-v", is_flag=True)
|
||||
@click.option("--interface", type=click.Choice(["gdb"]))
|
||||
@click.argument("__unprocessed", nargs=-1, type=click.UNPROCESSED)
|
||||
def cli(*args, **kwargs): # pylint: disable=unused-argument
|
||||
pioplus_call(sys.argv[1:])
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -14,12 +14,12 @@
|
||||
|
||||
import json
|
||||
import sys
|
||||
from os import getcwd
|
||||
|
||||
import click
|
||||
from serial.tools import miniterm
|
||||
|
||||
from platformio.exception import MinitermException
|
||||
from platformio.util import get_serialports
|
||||
from platformio import exception, util
|
||||
|
||||
|
||||
@click.group(short_help="Monitor device or list existing")
|
||||
@@ -28,25 +28,76 @@ def cli():
|
||||
|
||||
|
||||
@cli.command("list", short_help="List devices")
|
||||
@click.option("--serial", is_flag=True, help="List serial ports, default")
|
||||
@click.option("--logical", is_flag=True, help="List logical devices")
|
||||
@click.option("--mdns", is_flag=True, help="List multicast DNS services")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def device_list(json_output):
|
||||
def device_list( # pylint: disable=too-many-branches
|
||||
serial, logical, mdns, json_output):
|
||||
if not logical and not mdns:
|
||||
serial = True
|
||||
data = {}
|
||||
if serial:
|
||||
data['serial'] = util.get_serial_ports()
|
||||
if logical:
|
||||
data['logical'] = util.get_logical_devices()
|
||||
if mdns:
|
||||
data['mdns'] = util.get_mdns_services()
|
||||
|
||||
single_key = data.keys()[0] if len(data.keys()) == 1 else None
|
||||
|
||||
if json_output:
|
||||
click.echo(json.dumps(get_serialports()))
|
||||
return
|
||||
return click.echo(json.dumps(data[single_key] if single_key else data))
|
||||
|
||||
for item in get_serialports():
|
||||
click.secho(item['port'], fg="cyan")
|
||||
click.echo("-" * len(item['port']))
|
||||
click.echo("Hardware ID: %s" % item['hwid'])
|
||||
click.echo("Description: %s" % item['description'])
|
||||
click.echo("")
|
||||
titles = {
|
||||
"serial": "Serial Ports",
|
||||
"logical": "Logical Devices",
|
||||
"mdns": "Multicast DNS Services"
|
||||
}
|
||||
|
||||
for key, value in data.iteritems():
|
||||
if not single_key:
|
||||
click.secho(titles[key], bold=True)
|
||||
click.echo("=" * len(titles[key]))
|
||||
|
||||
if key == "serial":
|
||||
for item in value:
|
||||
click.secho(item['port'], fg="cyan")
|
||||
click.echo("-" * len(item['port']))
|
||||
click.echo("Hardware ID: %s" % item['hwid'])
|
||||
click.echo("Description: %s" % item['description'])
|
||||
click.echo("")
|
||||
|
||||
if key == "logical":
|
||||
for item in value:
|
||||
click.secho(item['path'], fg="cyan")
|
||||
click.echo("-" * len(item['path']))
|
||||
click.echo("Name: %s" % item['name'])
|
||||
click.echo("")
|
||||
|
||||
if key == "mdns":
|
||||
for item in value:
|
||||
click.secho(item['name'], fg="cyan")
|
||||
click.echo("-" * len(item['name']))
|
||||
click.echo("Type: %s" % item['type'])
|
||||
click.echo("IP: %s" % item['ip'])
|
||||
click.echo("Port: %s" % item['port'])
|
||||
if item['properties']:
|
||||
click.echo("Properties: %s" % ("; ".join([
|
||||
"%s=%s" % (k, v)
|
||||
for k, v in item['properties'].iteritems()
|
||||
])))
|
||||
click.echo("")
|
||||
|
||||
if single_key:
|
||||
click.echo("")
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@cli.command("monitor", short_help="Monitor device (Serial)")
|
||||
@click.option("--port", "-p", help="Port, a number or a device name")
|
||||
@click.option(
|
||||
"--baud", "-b", type=int, default=9600, help="Set baud rate, default=9600")
|
||||
@click.option("--baud", "-b", type=int, help="Set baud rate, default=9600")
|
||||
@click.option(
|
||||
"--parity",
|
||||
default="N",
|
||||
@@ -61,12 +112,12 @@ def device_list(json_output):
|
||||
@click.option(
|
||||
"--rts",
|
||||
default=None,
|
||||
type=click.Choice(["0", "1"]),
|
||||
type=click.IntRange(0, 1),
|
||||
help="Set initial RTS line state")
|
||||
@click.option(
|
||||
"--dtr",
|
||||
default=None,
|
||||
type=click.Choice(["0", "1"]),
|
||||
type=click.IntRange(0, 1),
|
||||
help="Set initial DTR line state")
|
||||
@click.option("--echo", is_flag=True, help="Enable local echo, default=Off")
|
||||
@click.option(
|
||||
@@ -98,15 +149,41 @@ def device_list(json_output):
|
||||
"--quiet",
|
||||
is_flag=True,
|
||||
help="Diagnostics: suppress non-error messages, default=Off")
|
||||
def device_monitor(**kwargs):
|
||||
@click.option(
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=getcwd,
|
||||
type=click.Path(
|
||||
exists=True, file_okay=False, dir_okay=True, resolve_path=True))
|
||||
@click.option(
|
||||
"-e",
|
||||
"--environment",
|
||||
help="Load configuration from `platformio.ini` and specified environment")
|
||||
def device_monitor(**kwargs): # pylint: disable=too-many-branches
|
||||
try:
|
||||
project_options = get_project_options(kwargs['project_dir'],
|
||||
kwargs['environment'])
|
||||
monitor_options = {k: v for k, v in project_options or []}
|
||||
if monitor_options:
|
||||
for k in ("port", "baud", "speed", "rts", "dtr"):
|
||||
k2 = "monitor_%s" % k
|
||||
if k == "speed":
|
||||
k = "baud"
|
||||
if kwargs[k] is None and k2 in monitor_options:
|
||||
kwargs[k] = monitor_options[k2]
|
||||
if k != "port":
|
||||
kwargs[k] = int(kwargs[k])
|
||||
except exception.NotPlatformIOProject:
|
||||
pass
|
||||
|
||||
if not kwargs['port']:
|
||||
ports = get_serialports(filter_hwid=True)
|
||||
ports = util.get_serial_ports(filter_hwid=True)
|
||||
if len(ports) == 1:
|
||||
kwargs['port'] = ports[0]['port']
|
||||
|
||||
sys.argv = ["monitor"]
|
||||
for k, v in kwargs.iteritems():
|
||||
if k in ("port", "baud", "rts", "dtr"):
|
||||
if k in ("port", "baud", "rts", "dtr", "environment", "project_dir"):
|
||||
continue
|
||||
k = "--" + k.replace("_", "-")
|
||||
if isinstance(v, bool):
|
||||
@@ -121,8 +198,31 @@ def device_monitor(**kwargs):
|
||||
try:
|
||||
miniterm.main(
|
||||
default_port=kwargs['port'],
|
||||
default_baudrate=kwargs['baud'],
|
||||
default_baudrate=kwargs['baud'] or 9600,
|
||||
default_rts=kwargs['rts'],
|
||||
default_dtr=kwargs['dtr'])
|
||||
except Exception as e:
|
||||
raise MinitermException(e)
|
||||
raise exception.MinitermException(e)
|
||||
|
||||
|
||||
def get_project_options(project_dir, environment):
|
||||
config = util.load_project_config(project_dir)
|
||||
if not config.sections():
|
||||
return None
|
||||
|
||||
known_envs = [s[4:] for s in config.sections() if s.startswith("env:")]
|
||||
if environment:
|
||||
if environment in known_envs:
|
||||
return config.items("env:%s" % environment)
|
||||
raise exception.UnknownEnvNames(environment, ", ".join(known_envs))
|
||||
|
||||
if not known_envs:
|
||||
return None
|
||||
|
||||
if config.has_option("platformio", "env_default"):
|
||||
env_default = config.get("platformio",
|
||||
"env_default").split(", ")[0].strip()
|
||||
if env_default and env_default in known_envs:
|
||||
return config.items("env:%s" % env_default)
|
||||
|
||||
return config.items("env:%s" % known_envs[0])
|
||||
|
||||
42
platformio/commands/home.py
Normal file
42
platformio/commands/home.py
Normal file
@@ -0,0 +1,42 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
|
||||
import click
|
||||
import requests
|
||||
|
||||
from platformio.managers.core import pioplus_call
|
||||
|
||||
|
||||
@click.command("home", short_help="PIO Home")
|
||||
@click.option("--port", type=int, default=8008, help="HTTP port, default=8008")
|
||||
@click.option(
|
||||
"--host",
|
||||
default="127.0.0.1",
|
||||
help="HTTP host, default=127.0.0.1. "
|
||||
"You can open PIO Home for inbound connections with --host=0.0.0.0")
|
||||
@click.option("--no-open", is_flag=True)
|
||||
def cli(*args, **kwargs): # pylint: disable=unused-argument
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
def shutdown_servers():
|
||||
port = 8010
|
||||
while port < 9000:
|
||||
try:
|
||||
requests.get("http://127.0.0.1:%d?__shutdown__=1" % port)
|
||||
port += 1
|
||||
except: # pylint: disable=bare-except
|
||||
return
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -57,6 +57,7 @@ def validate_boards(ctx, param, value): # pylint: disable=W0613
|
||||
"--ide", type=click.Choice(ProjectGenerator.get_supported_ides()))
|
||||
@click.option("-O", "--project-option", multiple=True)
|
||||
@click.option("--env-prefix", default="")
|
||||
@click.option("-s", "--silent", is_flag=True)
|
||||
@click.pass_context
|
||||
def cli(
|
||||
ctx, # pylint: disable=R0913
|
||||
@@ -64,28 +65,29 @@ def cli(
|
||||
board,
|
||||
ide,
|
||||
project_option,
|
||||
env_prefix):
|
||||
env_prefix,
|
||||
silent):
|
||||
|
||||
if project_dir == getcwd():
|
||||
click.secho("\nThe current working directory", fg="yellow", nl=False)
|
||||
click.secho(" %s " % project_dir, fg="cyan", nl=False)
|
||||
click.secho(
|
||||
"will be used for project.\n"
|
||||
"You can specify another project directory via\n"
|
||||
"`platformio init -d %PATH_TO_THE_PROJECT_DIR%` command.",
|
||||
fg="yellow")
|
||||
click.echo("")
|
||||
if not silent:
|
||||
if project_dir == getcwd():
|
||||
click.secho(
|
||||
"\nThe current working directory", fg="yellow", nl=False)
|
||||
click.secho(" %s " % project_dir, fg="cyan", nl=False)
|
||||
click.secho(
|
||||
"will be used for project.\n"
|
||||
"You can specify another project directory via\n"
|
||||
"`platformio init -d %PATH_TO_THE_PROJECT_DIR%` command.",
|
||||
fg="yellow")
|
||||
click.echo("")
|
||||
|
||||
click.echo("The next files/directories have been created in %s" %
|
||||
click.style(
|
||||
project_dir, fg="cyan"))
|
||||
click.echo("%s - Project Configuration File" % click.style(
|
||||
"platformio.ini", fg="cyan"))
|
||||
click.echo("%s - Put your source files here" % click.style(
|
||||
"src", fg="cyan"))
|
||||
click.echo("%s - Put here project specific (private) libraries" %
|
||||
click.style(
|
||||
"lib", fg="cyan"))
|
||||
click.echo("The next files/directories have been created in %s" %
|
||||
click.style(project_dir, fg="cyan"))
|
||||
click.echo("%s - Project Configuration File" % click.style(
|
||||
"platformio.ini", fg="cyan"))
|
||||
click.echo(
|
||||
"%s - Put your source files here" % click.style("src", fg="cyan"))
|
||||
click.echo("%s - Put here project specific (private) libraries" %
|
||||
click.style("lib", fg="cyan"))
|
||||
|
||||
init_base_project(project_dir)
|
||||
|
||||
@@ -94,43 +96,41 @@ def cli(
|
||||
ide is not None)
|
||||
|
||||
if ide:
|
||||
if not board:
|
||||
board = get_first_board(project_dir)
|
||||
if board:
|
||||
board = [board]
|
||||
if not board:
|
||||
env_name = get_best_envname(project_dir, board)
|
||||
if not env_name:
|
||||
raise exception.BoardNotDefined()
|
||||
if len(board) > 1:
|
||||
click.secho(
|
||||
"Warning! You have initialised project with more than 1 board"
|
||||
" for the specified IDE.\n"
|
||||
"However, the IDE features (code autocompletion, syntax "
|
||||
"linter) have been configured for the first board '%s' from "
|
||||
"your list '%s'." % (board[0], ", ".join(board)),
|
||||
fg="yellow")
|
||||
pg = ProjectGenerator(project_dir, ide, board[0])
|
||||
pg = ProjectGenerator(project_dir, ide, env_name)
|
||||
pg.generate()
|
||||
|
||||
click.secho(
|
||||
"\nProject has been successfully initialized!\nUseful commands:\n"
|
||||
"`platformio run` - process/build project from the current "
|
||||
"directory\n"
|
||||
"`platformio run --target upload` or `platformio run -t upload` "
|
||||
"- upload firmware to embedded board\n"
|
||||
"`platformio run --target clean` - clean project (remove compiled "
|
||||
"files)\n"
|
||||
"`platformio run --help` - additional information",
|
||||
fg="green")
|
||||
if not silent:
|
||||
click.secho(
|
||||
"\nProject has been successfully initialized!\nUseful commands:\n"
|
||||
"`platformio run` - process/build project from the current "
|
||||
"directory\n"
|
||||
"`platformio run --target upload` or `platformio run -t upload` "
|
||||
"- upload firmware to embedded board\n"
|
||||
"`platformio run --target clean` - clean project (remove compiled "
|
||||
"files)\n"
|
||||
"`platformio run --help` - additional information",
|
||||
fg="green")
|
||||
|
||||
|
||||
def get_first_board(project_dir):
|
||||
def get_best_envname(project_dir, boards=None):
|
||||
config = util.load_project_config(project_dir)
|
||||
env_default = None
|
||||
if config.has_option("platformio", "env_default"):
|
||||
env_default = config.get("platformio",
|
||||
"env_default").split(", ")[0].strip()
|
||||
if env_default:
|
||||
return env_default
|
||||
section = None
|
||||
for section in config.sections():
|
||||
if not section.startswith("env:"):
|
||||
continue
|
||||
elif config.has_option(section, "board"):
|
||||
return config.get(section, "board")
|
||||
return None
|
||||
elif config.has_option(section, "board") and (not boards or config.get(
|
||||
section, "board") in boards):
|
||||
break
|
||||
return section[4:] if section else None
|
||||
|
||||
|
||||
def init_base_project(project_dir):
|
||||
@@ -139,15 +139,12 @@ def init_base_project(project_dir):
|
||||
join(util.get_source_dir(), "projectconftpl.ini"),
|
||||
join(project_dir, "platformio.ini"))
|
||||
|
||||
lib_dir = join(project_dir, "lib")
|
||||
src_dir = join(project_dir, "src")
|
||||
config = util.load_project_config(project_dir)
|
||||
if config.has_option("platformio", "src_dir"):
|
||||
src_dir = join(project_dir, config.get("platformio", "src_dir"))
|
||||
|
||||
for d in (src_dir, lib_dir):
|
||||
if not isdir(d):
|
||||
makedirs(d)
|
||||
with util.cd(project_dir):
|
||||
lib_dir = util.get_projectlib_dir()
|
||||
src_dir = util.get_projectsrc_dir()
|
||||
for d in (src_dir, lib_dir):
|
||||
if not isdir(d):
|
||||
makedirs(d)
|
||||
|
||||
init_lib_readme(lib_dir)
|
||||
init_ci_conf(project_dir)
|
||||
@@ -168,16 +165,21 @@ The source code of each library should be placed in separate directory, like
|
||||
For example, see how can be organized `Foo` and `Bar` libraries:
|
||||
|
||||
|--lib
|
||||
| |
|
||||
| |--Bar
|
||||
| | |--docs
|
||||
| | |--examples
|
||||
| | |--src
|
||||
| | |- Bar.c
|
||||
| | |- Bar.h
|
||||
| | |- library.json (optional, custom build options, etc) http://docs.platformio.org/page/librarymanager/config.html
|
||||
| |
|
||||
| |--Foo
|
||||
| | |- Foo.c
|
||||
| | |- Foo.h
|
||||
| |
|
||||
| |- readme.txt --> THIS FILE
|
||||
|
|
||||
|- platformio.ini
|
||||
|--src
|
||||
|- main.c
|
||||
@@ -238,6 +240,7 @@ def init_ci_conf(project_dir):
|
||||
#
|
||||
# install:
|
||||
# - pip install -U platformio
|
||||
# - platformio update
|
||||
#
|
||||
# script:
|
||||
# - platformio run
|
||||
@@ -263,6 +266,7 @@ def init_ci_conf(project_dir):
|
||||
#
|
||||
# install:
|
||||
# - pip install -U platformio
|
||||
# - platformio update
|
||||
#
|
||||
# script:
|
||||
# - platformio ci --lib="." --board=ID_1 --board=ID_2 --board=ID_N
|
||||
@@ -298,7 +302,8 @@ def fill_project_envs(ctx, project_dir, board_ids, project_option, env_prefix,
|
||||
config = util.load_project_config(project_dir)
|
||||
for section in config.sections():
|
||||
cond = [
|
||||
section.startswith("env:"), config.has_option(section, "board")
|
||||
section.startswith("env:"),
|
||||
config.has_option(section, "board")
|
||||
]
|
||||
if all(cond):
|
||||
used_boards.append(config.get(section, "board"))
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -12,14 +12,17 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=too-many-branches, too-many-locals
|
||||
|
||||
import json
|
||||
from os.path import join
|
||||
from time import sleep
|
||||
import time
|
||||
from os.path import isdir, join
|
||||
from urllib import quote
|
||||
|
||||
import click
|
||||
|
||||
from platformio import exception, util
|
||||
from platformio.managers.lib import LibraryManager
|
||||
from platformio.managers.lib import LibraryManager, get_builtin_libs
|
||||
from platformio.util import get_api_result
|
||||
|
||||
|
||||
@@ -28,8 +31,7 @@ from platformio.util import get_api_result
|
||||
"-g",
|
||||
"--global",
|
||||
is_flag=True,
|
||||
help="Manage global PlatformIO"
|
||||
" library storage `%s`" % join(util.get_home_dir(), "lib"))
|
||||
help="Manage global PlatformIO library storage")
|
||||
@click.option(
|
||||
"-d",
|
||||
"--storage-dir",
|
||||
@@ -43,8 +45,9 @@ from platformio.util import get_api_result
|
||||
help="Manage custom library storage")
|
||||
@click.pass_context
|
||||
def cli(ctx, **options):
|
||||
non_storage_cmds = ("search", "show", "register", "stats", "builtin")
|
||||
# skip commands that don't need storage folder
|
||||
if ctx.invoked_subcommand in ("search", "register") or \
|
||||
if ctx.invoked_subcommand in non_storage_cmds or \
|
||||
(len(ctx.args) == 2 and ctx.args[1] in ("-h", "--help")):
|
||||
return
|
||||
storage_dir = options['storage_dir']
|
||||
@@ -60,6 +63,9 @@ def cli(ctx, **options):
|
||||
"Please use `platformio lib --global %s` command to remove "
|
||||
"this warning." % ctx.invoked_subcommand,
|
||||
fg="yellow")
|
||||
elif util.is_platformio_project(storage_dir):
|
||||
with util.cd(storage_dir):
|
||||
storage_dir = util.get_projectlibdeps_dir()
|
||||
|
||||
if not storage_dir and not util.is_platformio_project():
|
||||
raise exception.NotGlobalLibDir(util.get_project_dir(),
|
||||
@@ -84,11 +90,17 @@ def cli(ctx, **options):
|
||||
"--interactive",
|
||||
is_flag=True,
|
||||
help="Allow to make a choice for all prompts")
|
||||
@click.option(
|
||||
"-f",
|
||||
"--force",
|
||||
is_flag=True,
|
||||
help="Reinstall/redownload library if exists")
|
||||
@click.pass_obj
|
||||
def lib_install(lm, libraries, silent, interactive):
|
||||
# @TODO "save" option
|
||||
def lib_install(lm, libraries, silent, interactive, force):
|
||||
# @TODO: "save" option
|
||||
for library in libraries:
|
||||
lm.install(library, silent=silent, interactive=interactive)
|
||||
lm.install(
|
||||
library, silent=silent, interactive=interactive, force=force)
|
||||
|
||||
|
||||
@cli.command("uninstall", short_help="Uninstall libraries")
|
||||
@@ -106,60 +118,75 @@ def lib_uninstall(lm, libraries):
|
||||
"--only-check",
|
||||
is_flag=True,
|
||||
help="Do not update, only check for new version")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@click.pass_obj
|
||||
def lib_update(lm, libraries, only_check):
|
||||
def lib_update(lm, libraries, only_check, json_output):
|
||||
if not libraries:
|
||||
libraries = [str(m.get("id", m['name'])) for m in lm.get_installed()]
|
||||
for library in libraries:
|
||||
lm.update(library, only_check=only_check)
|
||||
libraries = [manifest['__pkg_dir'] for manifest in lm.get_installed()]
|
||||
|
||||
if only_check and json_output:
|
||||
result = []
|
||||
for library in libraries:
|
||||
pkg_dir = library if isdir(library) else None
|
||||
requirements = None
|
||||
url = None
|
||||
if not pkg_dir:
|
||||
name, requirements, url = lm.parse_pkg_uri(library)
|
||||
pkg_dir = lm.get_package_dir(name, requirements, url)
|
||||
if not pkg_dir:
|
||||
continue
|
||||
latest = lm.outdated(pkg_dir, requirements)
|
||||
if not latest:
|
||||
continue
|
||||
manifest = lm.load_manifest(pkg_dir)
|
||||
manifest['versionLatest'] = latest
|
||||
result.append(manifest)
|
||||
return click.echo(json.dumps(result))
|
||||
else:
|
||||
for library in libraries:
|
||||
lm.update(library, only_check=only_check)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
#######
|
||||
def print_lib_item(item):
|
||||
click.secho(item['name'], fg="cyan")
|
||||
click.echo("=" * len(item['name']))
|
||||
if "id" in item:
|
||||
click.secho("#ID: %d" % item['id'], bold=True)
|
||||
if "description" in item or "url" in item:
|
||||
click.echo(item.get("description", item.get("url", "")))
|
||||
click.echo()
|
||||
|
||||
LIBLIST_TPL = ("[{id:^14}] {name:<25} {compatibility:<30} "
|
||||
"\"{authornames}\": {description}")
|
||||
for key in ("version", "homepage", "license", "keywords"):
|
||||
if key not in item or not item[key]:
|
||||
continue
|
||||
if isinstance(item[key], list):
|
||||
click.echo("%s: %s" % (key.title(), ", ".join(item[key])))
|
||||
else:
|
||||
click.echo("%s: %s" % (key.title(), item[key]))
|
||||
|
||||
for key in ("frameworks", "platforms"):
|
||||
if key not in item:
|
||||
continue
|
||||
click.echo("Compatible %s: %s" % (key, ", ".join(
|
||||
[i['title'] if isinstance(i, dict) else i for i in item[key]])))
|
||||
|
||||
if "authors" in item or "authornames" in item:
|
||||
click.echo("Authors: %s" % ", ".join(
|
||||
item.get("authornames",
|
||||
[a.get("name", "") for a in item.get("authors", [])])))
|
||||
|
||||
if "__src_url" in item:
|
||||
click.secho("Source: %s" % item['__src_url'])
|
||||
click.echo()
|
||||
|
||||
|
||||
def echo_liblist_header():
|
||||
click.echo(
|
||||
LIBLIST_TPL.format(
|
||||
id=click.style(
|
||||
"ID", fg="green"),
|
||||
name=click.style(
|
||||
"Name", fg="cyan"),
|
||||
compatibility=click.style(
|
||||
"Compatibility", fg="yellow"),
|
||||
authornames="Authors",
|
||||
description="Description"))
|
||||
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
click.echo("-" * terminal_width)
|
||||
|
||||
|
||||
def echo_liblist_item(item):
|
||||
description = item.get("description", item.get("url", "")).encode("utf-8")
|
||||
if "version" in item:
|
||||
description += " | @" + click.style(item['version'], fg="yellow")
|
||||
|
||||
click.echo(
|
||||
LIBLIST_TPL.format(
|
||||
id=click.style(
|
||||
str(item.get("id", "-")), fg="green"),
|
||||
name=click.style(
|
||||
item['name'], fg="cyan"),
|
||||
compatibility=click.style(
|
||||
", ".join(
|
||||
item.get("frameworks", ["-"]) + item.get("platforms", [])),
|
||||
fg="yellow"),
|
||||
authornames=", ".join(item.get("authornames", ["Unknown"])).encode(
|
||||
"utf-8"),
|
||||
description=description))
|
||||
|
||||
|
||||
@cli.command("search", short_help="Search for library")
|
||||
@cli.command("search", short_help="Search for a library")
|
||||
@click.argument("query", required=False, nargs=-1)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@click.option("--page", type=click.INT, default=1)
|
||||
@click.option("--id", multiple=True)
|
||||
@click.option("-n", "--name", multiple=True)
|
||||
@click.option("-a", "--author", multiple=True)
|
||||
@click.option("-k", "--keyword", multiple=True)
|
||||
@@ -181,10 +208,9 @@ def lib_search(query, json_output, page, noninteractive, **filters):
|
||||
query.append('%s:"%s"' % (key, value))
|
||||
|
||||
result = get_api_result(
|
||||
"/lib/search",
|
||||
dict(
|
||||
query=" ".join(query), page=page),
|
||||
cache_valid="3d")
|
||||
"/v2/lib/search",
|
||||
dict(query=" ".join(query), page=page),
|
||||
cache_valid="1d")
|
||||
|
||||
if json_output:
|
||||
click.echo(json.dumps(result))
|
||||
@@ -210,15 +236,12 @@ def lib_search(query, json_output, page, noninteractive, **filters):
|
||||
"Found %d libraries:\n" % result['total'],
|
||||
fg="green" if result['total'] else "yellow")
|
||||
|
||||
if result['total']:
|
||||
echo_liblist_header()
|
||||
|
||||
while True:
|
||||
for item in result['items']:
|
||||
echo_liblist_item(item)
|
||||
print_lib_item(item)
|
||||
|
||||
if (int(result['page']) * int(result['perpage']) >=
|
||||
int(result['total'])):
|
||||
if (int(result['page']) * int(result['perpage']) >= int(
|
||||
result['total'])):
|
||||
break
|
||||
|
||||
if noninteractive:
|
||||
@@ -228,14 +251,15 @@ def lib_search(query, json_output, page, noninteractive, **filters):
|
||||
result['perpage'],
|
||||
fg="yellow")
|
||||
click.echo()
|
||||
sleep(5)
|
||||
time.sleep(5)
|
||||
elif not click.confirm("Show next libraries?"):
|
||||
break
|
||||
result = get_api_result(
|
||||
"/lib/search",
|
||||
dict(
|
||||
query=" ".join(query), page=int(result['page']) + 1),
|
||||
cache_valid="3d")
|
||||
"/v2/lib/search", {
|
||||
"query": " ".join(query),
|
||||
"page": int(result['page']) + 1
|
||||
},
|
||||
cache_valid="1d")
|
||||
|
||||
|
||||
@cli.command("list", short_help="List installed libraries")
|
||||
@@ -245,41 +269,78 @@ def lib_list(lm, json_output):
|
||||
items = lm.get_installed()
|
||||
|
||||
if json_output:
|
||||
click.echo(json.dumps(items))
|
||||
return
|
||||
return click.echo(json.dumps(items))
|
||||
|
||||
if not items:
|
||||
return
|
||||
return None
|
||||
|
||||
echo_liblist_header()
|
||||
for item in sorted(items, key=lambda i: i['name']):
|
||||
if "authors" in item:
|
||||
item['authornames'] = [i['name'] for i in item['authors']]
|
||||
echo_liblist_item(item)
|
||||
print_lib_item(item)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@cli.command("show", short_help="Show details about installed library")
|
||||
@click.pass_obj
|
||||
@cli.command("builtin", short_help="List built-in libraries")
|
||||
@click.option("--storage", multiple=True)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def lib_builtin(storage, json_output):
|
||||
items = get_builtin_libs(storage)
|
||||
if json_output:
|
||||
return click.echo(json.dumps(items))
|
||||
|
||||
for storage_ in items:
|
||||
if not storage_['items']:
|
||||
continue
|
||||
click.secho(storage_['name'], fg="green")
|
||||
click.echo("*" * len(storage_['name']))
|
||||
click.echo()
|
||||
|
||||
for item in sorted(storage_['items'], key=lambda i: i['name']):
|
||||
print_lib_item(item)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@cli.command("show", short_help="Show detailed info about a library")
|
||||
@click.argument("library", metavar="[LIBRARY]")
|
||||
def lib_show(lm, library): # pylint: disable=too-many-branches
|
||||
name, requirements, url = lm.parse_pkg_name(library)
|
||||
package_dir = lm.get_package_dir(name, requirements, url)
|
||||
if not package_dir:
|
||||
click.secho(
|
||||
"%s @ %s is not installed" % (name, requirements or "*"),
|
||||
fg="yellow")
|
||||
return
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def lib_show(library, json_output):
|
||||
lm = LibraryManager()
|
||||
name, requirements, _ = lm.parse_pkg_uri(library)
|
||||
lib_id = lm.search_lib_id(
|
||||
{
|
||||
"name": name,
|
||||
"requirements": requirements
|
||||
},
|
||||
silent=json_output,
|
||||
interactive=not json_output)
|
||||
lib = get_api_result("/lib/info/%d" % lib_id, cache_valid="1d")
|
||||
if json_output:
|
||||
return click.echo(json.dumps(lib))
|
||||
|
||||
manifest = lm.load_manifest(package_dir)
|
||||
|
||||
click.secho(manifest['name'], fg="cyan")
|
||||
click.echo("=" * len(manifest['name']))
|
||||
if "description" in manifest:
|
||||
click.echo(manifest['description'])
|
||||
click.secho(lib['name'], fg="cyan")
|
||||
click.echo("=" * len(lib['name']))
|
||||
click.secho("#ID: %d" % lib['id'], bold=True)
|
||||
click.echo(lib['description'])
|
||||
click.echo()
|
||||
|
||||
click.echo(
|
||||
"Version: %s, released %s" %
|
||||
(lib['version']['name'],
|
||||
time.strftime("%c", util.parse_date(lib['version']['released']))))
|
||||
click.echo("Manifest: %s" % lib['confurl'])
|
||||
for key in ("homepage", "repository", "license"):
|
||||
if key not in lib or not lib[key]:
|
||||
continue
|
||||
if isinstance(lib[key], list):
|
||||
click.echo("%s: %s" % (key.title(), ", ".join(lib[key])))
|
||||
else:
|
||||
click.echo("%s: %s" % (key.title(), lib[key]))
|
||||
|
||||
blocks = []
|
||||
|
||||
_authors = []
|
||||
for author in manifest.get("authors", []):
|
||||
for author in lib.get("authors", []):
|
||||
_data = []
|
||||
for key in ("name", "email", "url", "maintainer"):
|
||||
if not author[key]:
|
||||
@@ -292,23 +353,41 @@ def lib_show(lm, library): # pylint: disable=too-many-branches
|
||||
_data.append(author[key])
|
||||
_authors.append(" ".join(_data))
|
||||
if _authors:
|
||||
click.echo("Authors: %s" % ", ".join(_authors))
|
||||
blocks.append(("Authors", _authors))
|
||||
|
||||
for key in ("keywords", "frameworks", "platforms", "license", "url",
|
||||
"version"):
|
||||
if key not in manifest:
|
||||
blocks.append(("Keywords", lib['keywords']))
|
||||
for key in ("frameworks", "platforms"):
|
||||
if key not in lib or not lib[key]:
|
||||
continue
|
||||
if isinstance(manifest[key], list):
|
||||
click.echo("%s: %s" % (key.title(), ", ".join(manifest[key])))
|
||||
else:
|
||||
click.echo("%s: %s" % (key.title(), manifest[key]))
|
||||
blocks.append(("Compatible %s" % key, [i['title'] for i in lib[key]]))
|
||||
blocks.append(("Headers", lib['headers']))
|
||||
blocks.append(("Examples", lib['examples']))
|
||||
blocks.append(("Versions", [
|
||||
"%s, released %s" %
|
||||
(v['name'], time.strftime("%c", util.parse_date(v['released'])))
|
||||
for v in lib['versions']
|
||||
]))
|
||||
blocks.append(("Unique Downloads", [
|
||||
"Today: %s" % lib['dlstats']['day'],
|
||||
"Week: %s" % lib['dlstats']['week'],
|
||||
"Month: %s" % lib['dlstats']['month']
|
||||
]))
|
||||
|
||||
for (title, rows) in blocks:
|
||||
click.echo()
|
||||
click.secho(title, bold=True)
|
||||
click.echo("-" * len(title))
|
||||
for row in rows:
|
||||
click.echo(row)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@cli.command("register", short_help="Register new library")
|
||||
@cli.command("register", short_help="Register a new library")
|
||||
@click.argument("config_url")
|
||||
def lib_register(config_url):
|
||||
if (not config_url.startswith("http://") and
|
||||
not config_url.startswith("https://")):
|
||||
if (not config_url.startswith("http://")
|
||||
and not config_url.startswith("https://")):
|
||||
raise exception.InvalidLibConfURL(config_url)
|
||||
|
||||
result = get_api_result("/lib/register", data=dict(config_url=config_url))
|
||||
@@ -317,3 +396,78 @@ def lib_register(config_url):
|
||||
result['message'],
|
||||
fg="green"
|
||||
if "successed" in result and result['successed'] else "red")
|
||||
|
||||
|
||||
@cli.command("stats", short_help="Library Registry Statistics")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def lib_stats(json_output):
|
||||
result = get_api_result("/lib/stats", cache_valid="1h")
|
||||
|
||||
if json_output:
|
||||
return click.echo(json.dumps(result))
|
||||
|
||||
printitem_tpl = "{name:<33} {url}"
|
||||
printitemdate_tpl = "{name:<33} {date:23} {url}"
|
||||
|
||||
def _print_title(title):
|
||||
click.secho(title.upper(), bold=True)
|
||||
click.echo("*" * len(title))
|
||||
|
||||
def _print_header(with_date=False):
|
||||
click.echo((printitemdate_tpl if with_date else printitem_tpl).format(
|
||||
name=click.style("Name", fg="cyan"),
|
||||
date="Date",
|
||||
url=click.style("Url", fg="blue")))
|
||||
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
click.echo("-" * terminal_width)
|
||||
|
||||
def _print_lib_item(item):
|
||||
click.echo((printitemdate_tpl
|
||||
if "date" in item else printitem_tpl).format(
|
||||
name=click.style(item['name'], fg="cyan"),
|
||||
date=str(
|
||||
time.strftime("%c", util.parse_date(item['date']))
|
||||
if "date" in item else ""),
|
||||
url=click.style(
|
||||
"https://platformio.org/lib/show/%s/%s" %
|
||||
(item['id'], quote(item['name'])),
|
||||
fg="blue")))
|
||||
|
||||
def _print_tag_item(name):
|
||||
click.echo(
|
||||
printitem_tpl.format(
|
||||
name=click.style(name, fg="cyan"),
|
||||
url=click.style(
|
||||
"https://platformio.org/lib/search?query=" + quote(
|
||||
"keyword:%s" % name),
|
||||
fg="blue")))
|
||||
|
||||
for key in ("updated", "added"):
|
||||
_print_title("Recently " + key)
|
||||
_print_header(with_date=True)
|
||||
for item in result.get(key, []):
|
||||
_print_lib_item(item)
|
||||
click.echo()
|
||||
|
||||
_print_title("Recent keywords")
|
||||
_print_header(with_date=False)
|
||||
for item in result.get("lastkeywords"):
|
||||
_print_tag_item(item)
|
||||
click.echo()
|
||||
|
||||
_print_title("Popular keywords")
|
||||
_print_header(with_date=False)
|
||||
for item in result.get("topkeywords"):
|
||||
_print_tag_item(item)
|
||||
click.echo()
|
||||
|
||||
for key, title in (("dlday", "Today"), ("dlweek", "Week"), ("dlmonth",
|
||||
"Month")):
|
||||
_print_title("Featured: " + title)
|
||||
_print_header(with_date=False)
|
||||
for item in result.get(key, []):
|
||||
_print_lib_item(item)
|
||||
click.echo()
|
||||
|
||||
return True
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -13,10 +13,12 @@
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
from os.path import dirname, isdir
|
||||
|
||||
import click
|
||||
|
||||
from platformio import exception, util
|
||||
from platformio import app, exception, util
|
||||
from platformio.commands.boards import print_boards
|
||||
from platformio.managers.platform import PlatformFactory, PlatformManager
|
||||
|
||||
|
||||
@@ -28,40 +30,153 @@ def cli():
|
||||
def _print_platforms(platforms):
|
||||
for platform in platforms:
|
||||
click.echo("{name} ~ {title}".format(
|
||||
name=click.style(
|
||||
platform['name'], fg="cyan"),
|
||||
name=click.style(platform['name'], fg="cyan"),
|
||||
title=platform['title']))
|
||||
click.echo("=" * (3 + len(platform['name'] + platform['title'])))
|
||||
click.echo(platform['description'])
|
||||
click.echo()
|
||||
click.echo("Home: %s" % "http://platformio.org/platforms/" + platform[
|
||||
'name'])
|
||||
if platform['packages']:
|
||||
if "homepage" in platform:
|
||||
click.echo("Home: %s" % platform['homepage'])
|
||||
if "frameworks" in platform and platform['frameworks']:
|
||||
click.echo("Frameworks: %s" % ", ".join(platform['frameworks']))
|
||||
if "packages" in platform:
|
||||
click.echo("Packages: %s" % ", ".join(platform['packages']))
|
||||
if "version" in platform:
|
||||
click.echo("Version: " + platform['version'])
|
||||
click.echo()
|
||||
|
||||
|
||||
def _get_registry_platforms():
|
||||
platforms = util.get_api_result("/platforms", cache_valid="7d")
|
||||
pm = PlatformManager()
|
||||
for platform in platforms or []:
|
||||
platform['versions'] = pm.get_all_repo_versions(platform['name'])
|
||||
return platforms
|
||||
|
||||
|
||||
def _original_version(version):
|
||||
if version.count(".") != 2:
|
||||
return None
|
||||
_, y = version.split(".")[:2]
|
||||
if int(y) < 100:
|
||||
return None
|
||||
if len(y) % 2 != 0:
|
||||
y = "0" + y
|
||||
parts = [str(int(y[i * 2:i * 2 + 2])) for i in range(len(y) / 2)]
|
||||
return ".".join(parts)
|
||||
|
||||
|
||||
def _get_platform_data(*args, **kwargs):
|
||||
try:
|
||||
return _get_installed_platform_data(*args, **kwargs)
|
||||
except exception.UnknownPlatform:
|
||||
return _get_registry_platform_data(*args, **kwargs)
|
||||
|
||||
|
||||
def _get_installed_platform_data(platform,
|
||||
with_boards=True,
|
||||
expose_packages=True):
|
||||
p = PlatformFactory.newPlatform(platform)
|
||||
data = dict(
|
||||
name=p.name,
|
||||
title=p.title,
|
||||
description=p.description,
|
||||
version=p.version,
|
||||
homepage=p.homepage,
|
||||
repository=p.repository_url,
|
||||
url=p.vendor_url,
|
||||
docs=p.docs_url,
|
||||
license=p.license,
|
||||
forDesktop=not p.is_embedded(),
|
||||
frameworks=sorted(p.frameworks.keys() if p.frameworks else []),
|
||||
packages=p.packages.keys() if p.packages else [])
|
||||
|
||||
# if dump to API
|
||||
# del data['version']
|
||||
# return data
|
||||
|
||||
# overwrite VCS version and add extra fields
|
||||
manifest = PlatformManager().load_manifest(dirname(p.manifest_path))
|
||||
assert manifest
|
||||
for key in manifest:
|
||||
if key == "version" or key.startswith("__"):
|
||||
data[key] = manifest[key]
|
||||
|
||||
if with_boards:
|
||||
data['boards'] = [c.get_brief_data() for c in p.get_boards().values()]
|
||||
|
||||
if not data['packages'] or not expose_packages:
|
||||
return data
|
||||
|
||||
data['packages'] = []
|
||||
installed_pkgs = p.get_installed_packages()
|
||||
for name, opts in p.packages.items():
|
||||
item = dict(
|
||||
name=name,
|
||||
type=p.get_package_type(name),
|
||||
requirements=opts.get("version"),
|
||||
optional=opts.get("optional") is True)
|
||||
if name in installed_pkgs:
|
||||
for key, value in installed_pkgs[name].items():
|
||||
if key not in ("url", "version", "description"):
|
||||
continue
|
||||
item[key] = value
|
||||
if key == "version":
|
||||
item["originalVersion"] = _original_version(value)
|
||||
data['packages'].append(item)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def _get_registry_platform_data( # pylint: disable=unused-argument
|
||||
platform,
|
||||
with_boards=True,
|
||||
expose_packages=True):
|
||||
_data = None
|
||||
for p in _get_registry_platforms():
|
||||
if p['name'] == platform:
|
||||
_data = p
|
||||
break
|
||||
|
||||
if not _data:
|
||||
return None
|
||||
|
||||
data = dict(
|
||||
name=_data['name'],
|
||||
title=_data['title'],
|
||||
description=_data['description'],
|
||||
homepage=_data['homepage'],
|
||||
repository=_data['repository'],
|
||||
url=_data['url'],
|
||||
license=_data['license'],
|
||||
forDesktop=_data['forDesktop'],
|
||||
frameworks=_data['frameworks'],
|
||||
packages=_data['packages'],
|
||||
versions=_data['versions'])
|
||||
|
||||
if with_boards:
|
||||
data['boards'] = [
|
||||
board for board in PlatformManager().get_registered_boards()
|
||||
if board['platform'] == _data['name']
|
||||
]
|
||||
|
||||
return data
|
||||
|
||||
|
||||
@cli.command("search", short_help="Search for development platform")
|
||||
@click.argument("query", required=False)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def platform_search(query, json_output):
|
||||
platforms = []
|
||||
for platform in util.get_api_result("/platforms", cache_valid="365d"):
|
||||
for platform in _get_registry_platforms():
|
||||
if query == "all":
|
||||
query = ""
|
||||
|
||||
search_data = json.dumps(platform)
|
||||
if query and query.lower() not in search_data.lower():
|
||||
continue
|
||||
|
||||
platforms.append({
|
||||
"name": platform['name'],
|
||||
"title": platform['title'],
|
||||
"description": platform['description'],
|
||||
"packages": platform['packages']
|
||||
})
|
||||
platforms.append(
|
||||
_get_registry_platform_data(
|
||||
platform['name'], with_boards=False, expose_packages=False))
|
||||
|
||||
if json_output:
|
||||
click.echo(json.dumps(platforms))
|
||||
@@ -69,20 +184,133 @@ def platform_search(query, json_output):
|
||||
_print_platforms(platforms)
|
||||
|
||||
|
||||
@cli.command("frameworks", short_help="List supported frameworks, SDKs")
|
||||
@click.argument("query", required=False)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def platform_frameworks(query, json_output):
|
||||
frameworks = []
|
||||
for framework in util.get_api_result("/frameworks", cache_valid="7d"):
|
||||
if query == "all":
|
||||
query = ""
|
||||
search_data = json.dumps(framework)
|
||||
if query and query.lower() not in search_data.lower():
|
||||
continue
|
||||
framework['homepage'] = (
|
||||
"https://platformio.org/frameworks/" + framework['name'])
|
||||
framework['platforms'] = [
|
||||
platform['name'] for platform in _get_registry_platforms()
|
||||
if framework['name'] in platform['frameworks']
|
||||
]
|
||||
frameworks.append(framework)
|
||||
|
||||
frameworks = sorted(frameworks, key=lambda manifest: manifest['name'])
|
||||
if json_output:
|
||||
click.echo(json.dumps(frameworks))
|
||||
else:
|
||||
_print_platforms(frameworks)
|
||||
|
||||
|
||||
@cli.command("list", short_help="List installed development platforms")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def platform_list(json_output):
|
||||
platforms = []
|
||||
pm = PlatformManager()
|
||||
for manifest in pm.get_installed():
|
||||
platforms.append(
|
||||
_get_installed_platform_data(
|
||||
manifest['__pkg_dir'],
|
||||
with_boards=False,
|
||||
expose_packages=False))
|
||||
|
||||
platforms = sorted(platforms, key=lambda manifest: manifest['name'])
|
||||
if json_output:
|
||||
click.echo(json.dumps(platforms))
|
||||
else:
|
||||
_print_platforms(platforms)
|
||||
|
||||
|
||||
@cli.command("show", short_help="Show details about development platform")
|
||||
@click.argument("platform")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def platform_show(platform, json_output): # pylint: disable=too-many-branches
|
||||
data = _get_platform_data(platform)
|
||||
if not data:
|
||||
raise exception.UnknownPlatform(platform)
|
||||
if json_output:
|
||||
return click.echo(json.dumps(data))
|
||||
|
||||
click.echo("{name} ~ {title}".format(
|
||||
name=click.style(data['name'], fg="cyan"), title=data['title']))
|
||||
click.echo("=" * (3 + len(data['name'] + data['title'])))
|
||||
click.echo(data['description'])
|
||||
click.echo()
|
||||
if "version" in data:
|
||||
click.echo("Version: %s" % data['version'])
|
||||
if data['homepage']:
|
||||
click.echo("Home: %s" % data['homepage'])
|
||||
if data['repository']:
|
||||
click.echo("Repository: %s" % data['repository'])
|
||||
if data['url']:
|
||||
click.echo("Vendor: %s" % data['url'])
|
||||
if data['license']:
|
||||
click.echo("License: %s" % data['license'])
|
||||
if data['frameworks']:
|
||||
click.echo("Frameworks: %s" % ", ".join(data['frameworks']))
|
||||
|
||||
if not data['packages']:
|
||||
return None
|
||||
|
||||
if not isinstance(data['packages'][0], dict):
|
||||
click.echo("Packages: %s" % ", ".join(data['packages']))
|
||||
else:
|
||||
click.echo()
|
||||
click.secho("Packages", bold=True)
|
||||
click.echo("--------")
|
||||
for item in data['packages']:
|
||||
click.echo()
|
||||
click.echo("Package %s" % click.style(item['name'], fg="yellow"))
|
||||
click.echo("-" * (8 + len(item['name'])))
|
||||
if item['type']:
|
||||
click.echo("Type: %s" % item['type'])
|
||||
click.echo("Requirements: %s" % item['requirements'])
|
||||
click.echo("Installed: %s" % ("Yes" if item.get("version") else
|
||||
"No (optional)"))
|
||||
if "version" in item:
|
||||
click.echo("Version: %s" % item['version'])
|
||||
if "originalVersion" in item:
|
||||
click.echo("Original version: %s" % item['originalVersion'])
|
||||
if "description" in item:
|
||||
click.echo("Description: %s" % item['description'])
|
||||
|
||||
if data['boards']:
|
||||
click.echo()
|
||||
click.secho("Boards", bold=True)
|
||||
click.echo("------")
|
||||
print_boards(data['boards'])
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@cli.command("install", short_help="Install new development platform")
|
||||
@click.argument("platforms", nargs=-1, required=True, metavar="[PLATFORM...]")
|
||||
@click.option("--with-package", multiple=True)
|
||||
@click.option("--without-package", multiple=True)
|
||||
@click.option("--skip-default-package", is_flag=True)
|
||||
@click.option(
|
||||
"-f",
|
||||
"--force",
|
||||
is_flag=True,
|
||||
help="Reinstall/redownload dev/platform and its packages if exist")
|
||||
def platform_install(platforms, with_package, without_package,
|
||||
skip_default_package):
|
||||
skip_default_package, force):
|
||||
pm = PlatformManager()
|
||||
for platform in platforms:
|
||||
if pm.install(
|
||||
name=platform,
|
||||
with_packages=with_package,
|
||||
without_packages=without_package,
|
||||
skip_default_package=skip_default_package):
|
||||
skip_default_package=skip_default_package,
|
||||
force=force):
|
||||
click.secho(
|
||||
"The platform '%s' has been successfully installed!\n"
|
||||
"The rest of packages will be installed automatically "
|
||||
@@ -108,99 +336,53 @@ def platform_uninstall(platforms):
|
||||
"-p",
|
||||
"--only-packages",
|
||||
is_flag=True,
|
||||
help="Update only platform packages")
|
||||
help="Update only the platform packages")
|
||||
@click.option(
|
||||
"-c",
|
||||
"--only-check",
|
||||
is_flag=True,
|
||||
help="Do not update, only check for new version")
|
||||
def platform_update(platforms, only_packages, only_check):
|
||||
pm = PlatformManager()
|
||||
if not platforms:
|
||||
platforms = set([m['name'] for m in pm.get_installed()])
|
||||
for platform in platforms:
|
||||
click.echo("Platform %s" % click.style(platform, fg="cyan"))
|
||||
click.echo("--------")
|
||||
pm.update(platform, only_packages=only_packages, only_check=only_check)
|
||||
click.echo()
|
||||
|
||||
|
||||
@cli.command("list", short_help="List installed development platforms")
|
||||
help="Do not update, only check for a new version")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def platform_list(json_output):
|
||||
platforms = []
|
||||
def platform_update(platforms, only_packages, only_check, json_output):
|
||||
pm = PlatformManager()
|
||||
for manifest in pm.get_installed():
|
||||
p = PlatformFactory.newPlatform(
|
||||
pm.get_manifest_path(manifest['__pkg_dir']))
|
||||
platforms.append({
|
||||
"name": p.name,
|
||||
"title": p.title,
|
||||
"description": p.description,
|
||||
"version": p.version,
|
||||
"url": p.vendor_url,
|
||||
"packages": p.get_installed_packages().keys(),
|
||||
'forDesktop': any([
|
||||
p.name.startswith(n) for n in ("native", "linux", "windows")
|
||||
])
|
||||
})
|
||||
pkg_dir_to_name = {}
|
||||
if not platforms:
|
||||
platforms = []
|
||||
for manifest in pm.get_installed():
|
||||
platforms.append(manifest['__pkg_dir'])
|
||||
pkg_dir_to_name[manifest['__pkg_dir']] = manifest.get(
|
||||
"title", manifest['name'])
|
||||
|
||||
if json_output:
|
||||
click.echo(json.dumps(platforms))
|
||||
if only_check and json_output:
|
||||
result = []
|
||||
for platform in platforms:
|
||||
pkg_dir = platform if isdir(platform) else None
|
||||
requirements = None
|
||||
url = None
|
||||
if not pkg_dir:
|
||||
name, requirements, url = pm.parse_pkg_uri(platform)
|
||||
pkg_dir = pm.get_package_dir(name, requirements, url)
|
||||
if not pkg_dir:
|
||||
continue
|
||||
latest = pm.outdated(pkg_dir, requirements)
|
||||
if (not latest and not PlatformFactory.newPlatform(pkg_dir)
|
||||
.are_outdated_packages()):
|
||||
continue
|
||||
data = _get_installed_platform_data(
|
||||
pkg_dir, with_boards=False, expose_packages=False)
|
||||
if latest:
|
||||
data['versionLatest'] = latest
|
||||
result.append(data)
|
||||
return click.echo(json.dumps(result))
|
||||
else:
|
||||
_print_platforms(platforms)
|
||||
# cleanup cached board and platform lists
|
||||
app.clean_cache()
|
||||
for platform in platforms:
|
||||
click.echo("Platform %s" % click.style(
|
||||
pkg_dir_to_name.get(platform, platform), fg="cyan"))
|
||||
click.echo("--------")
|
||||
pm.update(
|
||||
platform, only_packages=only_packages, only_check=only_check)
|
||||
click.echo()
|
||||
|
||||
|
||||
@cli.command("show", short_help="Show details about installed platform")
|
||||
@click.argument("platform")
|
||||
def platform_show(platform):
|
||||
|
||||
def _detail_version(version):
|
||||
if version.count(".") != 2:
|
||||
return version
|
||||
_, y = version.split(".")[:2]
|
||||
if int(y) < 100:
|
||||
return version
|
||||
if len(y) % 2 != 0:
|
||||
y = "0" + y
|
||||
parts = [str(int(y[i * 2:i * 2 + 2])) for i in range(len(y) / 2)]
|
||||
return "%s (%s)" % (version, ".".join(parts))
|
||||
|
||||
try:
|
||||
p = PlatformFactory.newPlatform(platform)
|
||||
except exception.UnknownPlatform:
|
||||
raise exception.PlatformNotInstalledYet(platform)
|
||||
|
||||
click.echo("{name} ~ {title}".format(
|
||||
name=click.style(
|
||||
p.name, fg="cyan"), title=p.title))
|
||||
click.echo("=" * (3 + len(p.name + p.title)))
|
||||
click.echo(p.description)
|
||||
click.echo()
|
||||
click.echo("Version: %s" % p.version)
|
||||
if p.homepage:
|
||||
click.echo("Home: %s" % p.homepage)
|
||||
if p.license:
|
||||
click.echo("License: %s" % p.license)
|
||||
if p.frameworks:
|
||||
click.echo("Frameworks: %s" % ", ".join(p.frameworks.keys()))
|
||||
|
||||
if not p.packages:
|
||||
return
|
||||
|
||||
installed_pkgs = p.get_installed_packages()
|
||||
for name, opts in p.packages.items():
|
||||
click.echo()
|
||||
click.echo("Package %s" % click.style(name, fg="yellow"))
|
||||
click.echo("-" * (8 + len(name)))
|
||||
if p.get_package_type(name):
|
||||
click.echo("Type: %s" % p.get_package_type(name))
|
||||
click.echo("Requirements: %s" % opts.get("version"))
|
||||
click.echo("Installed: %s" % ("Yes" if name in installed_pkgs else
|
||||
"No (optional)"))
|
||||
if name in installed_pkgs:
|
||||
for key, value in installed_pkgs[name].items():
|
||||
if key in ("url", "version", "description"):
|
||||
if key == "version":
|
||||
value = _detail_version(value)
|
||||
click.echo("%s: %s" % (key.title(), value))
|
||||
return True
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -23,7 +23,7 @@ import click
|
||||
|
||||
from platformio import exception, util
|
||||
from platformio.commands.device import device_monitor as cmd_device_monitor
|
||||
from platformio.pioplus import pioplus_call
|
||||
from platformio.managers.core import pioplus_call
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
|
||||
@@ -147,12 +147,12 @@ def device_list(json_output):
|
||||
@click.option(
|
||||
"--rts",
|
||||
default=None,
|
||||
type=click.Choice(["0", "1"]),
|
||||
type=click.IntRange(0, 1),
|
||||
help="Set initial RTS line state")
|
||||
@click.option(
|
||||
"--dtr",
|
||||
default=None,
|
||||
type=click.Choice(["0", "1"]),
|
||||
type=click.IntRange(0, 1),
|
||||
help="Set initial DTR line state")
|
||||
@click.option("--echo", is_flag=True, help="Enable local echo, default=Off")
|
||||
@click.option(
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -12,7 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from datetime import datetime
|
||||
from hashlib import sha1
|
||||
from os import getcwd, makedirs, walk
|
||||
from os.path import getmtime, isdir, isfile, join
|
||||
@@ -21,10 +20,11 @@ from time import time
|
||||
import click
|
||||
|
||||
from platformio import __version__, exception, telemetry, util
|
||||
from platformio.commands.device import device_monitor as cmd_device_monitor
|
||||
from platformio.commands.lib import lib_install as cmd_lib_install
|
||||
from platformio.commands.platform import \
|
||||
platform_install as cmd_platform_install
|
||||
from platformio.managers.lib import LibraryManager
|
||||
from platformio.managers.lib import LibraryManager, is_builtin_lib
|
||||
from platformio.managers.platform import PlatformFactory
|
||||
|
||||
# pylint: disable=too-many-arguments,too-many-locals,too-many-branches
|
||||
@@ -58,27 +58,25 @@ def cli(ctx, environment, target, upload_port, project_dir, silent, verbose,
|
||||
raise exception.NotPlatformIOProject(project_dir)
|
||||
|
||||
with util.cd(project_dir):
|
||||
# clean obsolete .pioenvs dir
|
||||
# clean obsolete build dir
|
||||
if not disable_auto_clean:
|
||||
try:
|
||||
_clean_pioenvs_dir(util.get_projectpioenvs_dir())
|
||||
_clean_build_dir(util.get_projectbuild_dir())
|
||||
except: # pylint: disable=bare-except
|
||||
click.secho(
|
||||
"Can not remove temporary directory `%s`. Please remove "
|
||||
"`.pioenvs` directory from the project manually to avoid "
|
||||
"build issues" % util.get_projectpioenvs_dir(force=True),
|
||||
"it manually to avoid build issues" %
|
||||
util.get_projectbuild_dir(force=True),
|
||||
fg="yellow")
|
||||
|
||||
config = util.load_project_config()
|
||||
check_project_defopts(config)
|
||||
assert check_project_envs(config, environment)
|
||||
|
||||
env_default = None
|
||||
if config.has_option("platformio", "env_default"):
|
||||
env_default = [
|
||||
e.strip()
|
||||
for e in config.get("platformio", "env_default").split(",")
|
||||
]
|
||||
env_default = util.parse_conf_multi_values(
|
||||
config.get("platformio", "env_default"))
|
||||
|
||||
check_project_defopts(config)
|
||||
check_project_envs(config, environment or env_default)
|
||||
|
||||
results = []
|
||||
start_time = time()
|
||||
@@ -88,14 +86,14 @@ def cli(ctx, environment, target, upload_port, project_dir, silent, verbose,
|
||||
|
||||
envname = section[4:]
|
||||
skipenv = any([
|
||||
environment and envname not in environment, not environment and
|
||||
env_default and envname not in env_default
|
||||
environment and envname not in environment, not environment
|
||||
and env_default and envname not in env_default
|
||||
])
|
||||
if skipenv:
|
||||
results.append((envname, None))
|
||||
continue
|
||||
|
||||
if results:
|
||||
if not silent and results:
|
||||
click.echo()
|
||||
|
||||
options = {}
|
||||
@@ -106,31 +104,67 @@ def cli(ctx, environment, target, upload_port, project_dir, silent, verbose,
|
||||
|
||||
ep = EnvironmentProcessor(ctx, envname, options, target,
|
||||
upload_port, silent, verbose)
|
||||
results.append((envname, ep.process()))
|
||||
result = (envname, ep.process())
|
||||
results.append(result)
|
||||
if result[1] and "monitor" in ep.get_build_targets() and \
|
||||
"nobuild" not in ep.get_build_targets():
|
||||
ctx.invoke(cmd_device_monitor)
|
||||
|
||||
if len(results) > 1:
|
||||
found_error = any(status is False for (_, status) in results)
|
||||
|
||||
if (found_error or not silent) and len(results) > 1:
|
||||
click.echo()
|
||||
print_summary(results, start_time)
|
||||
|
||||
if any([status is False for (_, status) in results]):
|
||||
if found_error:
|
||||
raise exception.ReturnErrorCode(1)
|
||||
return True
|
||||
|
||||
|
||||
class EnvironmentProcessor(object):
|
||||
|
||||
KNOWN_OPTIONS = (
|
||||
"platform", "framework", "board", "board_mcu", "board_f_cpu",
|
||||
"board_f_flash", "board_flash_mode", "build_flags", "src_build_flags",
|
||||
"build_unflags", "src_filter", "extra_script", "targets",
|
||||
DEFAULT_DUMP_OPTIONS = ("platform", "framework", "board")
|
||||
|
||||
KNOWN_PLATFORMIO_OPTIONS = [
|
||||
"description", "env_default", "home_dir", "lib_dir", "libdeps_dir",
|
||||
"include_dir", "src_dir", "build_dir", "data_dir", "test_dir",
|
||||
"boards_dir", "lib_extra_dirs"
|
||||
]
|
||||
|
||||
KNOWN_ENV_OPTIONS = [
|
||||
"platform", "framework", "board", "build_flags", "src_build_flags",
|
||||
"build_unflags", "src_filter", "extra_scripts", "targets",
|
||||
"upload_port", "upload_protocol", "upload_speed", "upload_flags",
|
||||
"upload_resetmethod", "lib_install", "lib_deps", "lib_force",
|
||||
"lib_ignore", "lib_extra_dirs", "lib_ldf_mode", "lib_compat_mode",
|
||||
"test_ignore", "test_port", "piotest")
|
||||
"upload_resetmethod", "lib_deps", "lib_ignore", "lib_extra_dirs",
|
||||
"lib_ldf_mode", "lib_compat_mode", "lib_archive", "piotest",
|
||||
"test_transport", "test_filter", "test_ignore", "test_port",
|
||||
"test_speed", "test_build_project_src", "debug_tool", "debug_port",
|
||||
"debug_init_cmds", "debug_extra_cmds", "debug_server",
|
||||
"debug_init_break", "debug_load_cmd", "debug_load_mode",
|
||||
"debug_svd_path", "monitor_port", "monitor_speed", "monitor_rts",
|
||||
"monitor_dtr"
|
||||
]
|
||||
|
||||
IGNORE_BUILD_OPTIONS = [
|
||||
"test_transport", "test_filter", "test_ignore", "test_port",
|
||||
"test_speed", "debug_port", "debug_init_cmds", "debug_extra_cmds",
|
||||
"debug_server", "debug_init_break", "debug_load_cmd",
|
||||
"debug_load_mode", "monitor_port", "monitor_speed", "monitor_rts",
|
||||
"monitor_dtr"
|
||||
]
|
||||
|
||||
REMAPED_OPTIONS = {"framework": "pioframework", "platform": "pioplatform"}
|
||||
|
||||
RENAMED_OPTIONS = {"lib_use": "lib_deps", "lib_force": "lib_deps"}
|
||||
RENAMED_OPTIONS = {
|
||||
"lib_use": "lib_deps",
|
||||
"lib_force": "lib_deps",
|
||||
"extra_script": "extra_scripts",
|
||||
"monitor_baud": "monitor_speed",
|
||||
"board_mcu": "board_build.mcu",
|
||||
"board_f_cpu": "board_build.f_cpu",
|
||||
"board_f_flash": "board_build.f_flash",
|
||||
"board_flash_mode": "board_build.flash_mode"
|
||||
}
|
||||
|
||||
RENAMED_PLATFORMS = {"espressif": "espressif8266"}
|
||||
|
||||
@@ -154,30 +188,32 @@ class EnvironmentProcessor(object):
|
||||
def process(self):
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
start_time = time()
|
||||
env_dump = []
|
||||
|
||||
# multi-line values to one line
|
||||
for k, v in self.options.items():
|
||||
if "\n" in v:
|
||||
self.options[k] = self.options[k].strip().replace("\n", ", ")
|
||||
self.options[k] = self.options[k].strip()
|
||||
if self.verbose or k in self.DEFAULT_DUMP_OPTIONS:
|
||||
env_dump.append(
|
||||
"%s: %s" % (k, ", ".join(util.parse_conf_multi_values(v))))
|
||||
|
||||
click.echo("[%s] Processing %s (%s)" % (
|
||||
datetime.now().strftime("%c"), click.style(
|
||||
self.name, fg="cyan", bold=True),
|
||||
", ".join(["%s: %s" % (k, v) for k, v in self.options.items()])))
|
||||
click.secho("-" * terminal_width, bold=True)
|
||||
if self.silent:
|
||||
click.echo("Please wait...")
|
||||
if not self.silent:
|
||||
click.echo("Processing %s (%s)" % (click.style(
|
||||
self.name, fg="cyan", bold=True), "; ".join(env_dump)))
|
||||
click.secho("-" * terminal_width, bold=True)
|
||||
|
||||
self.options = self._validate_options(self.options)
|
||||
result = self._run()
|
||||
|
||||
is_error = result['returncode'] != 0
|
||||
|
||||
if self.silent and not is_error:
|
||||
return True
|
||||
|
||||
if is_error or "piotest_processor" not in self.cmd_ctx.meta:
|
||||
print_header(
|
||||
"[%s] Took %.2f seconds" % ((click.style(
|
||||
"ERROR", fg="red", bold=True) if is_error else click.style(
|
||||
"SUCCESS", fg="green", bold=True)),
|
||||
time() - start_time),
|
||||
"[%s] Took %.2f seconds" %
|
||||
((click.style("ERROR", fg="red", bold=True)
|
||||
if is_error else click.style(
|
||||
"SUCCESS", fg="green", bold=True)), time() - start_time),
|
||||
is_error=is_error)
|
||||
|
||||
return not is_error
|
||||
@@ -203,43 +239,52 @@ class EnvironmentProcessor(object):
|
||||
v = self.RENAMED_PLATFORMS[v]
|
||||
|
||||
# warn about unknown options
|
||||
if k not in self.KNOWN_OPTIONS:
|
||||
unknown_conditions = [
|
||||
k not in self.KNOWN_ENV_OPTIONS, not k.startswith("custom_"),
|
||||
not k.startswith("board_")
|
||||
]
|
||||
if all(unknown_conditions):
|
||||
click.secho(
|
||||
"Detected non-PlatformIO `%s` option in `[env:]` section" %
|
||||
k,
|
||||
"Detected non-PlatformIO `%s` option in `[env:%s]` section"
|
||||
% (k, self.name),
|
||||
fg="yellow")
|
||||
result[k] = v
|
||||
return result
|
||||
|
||||
def _get_build_variables(self):
|
||||
def get_build_variables(self):
|
||||
variables = {"pioenv": self.name}
|
||||
if self.upload_port:
|
||||
variables['upload_port'] = self.upload_port
|
||||
for k, v in self.options.items():
|
||||
if k in self.REMAPED_OPTIONS:
|
||||
k = self.REMAPED_OPTIONS[k]
|
||||
if k in self.IGNORE_BUILD_OPTIONS:
|
||||
continue
|
||||
if k == "targets" or (k == "upload_port" and self.upload_port):
|
||||
continue
|
||||
variables[k] = v
|
||||
return variables
|
||||
|
||||
def _get_build_targets(self):
|
||||
def get_build_targets(self):
|
||||
targets = []
|
||||
if self.targets:
|
||||
targets = [t for t in self.targets]
|
||||
elif "targets" in self.options:
|
||||
targets = self.options['targets'].split()
|
||||
targets = self.options['targets'].split(", ")
|
||||
return targets
|
||||
|
||||
def _run(self):
|
||||
if "platform" not in self.options:
|
||||
raise exception.UndefinedEnvPlatform(self.name)
|
||||
|
||||
build_vars = self._get_build_variables()
|
||||
build_targets = self._get_build_targets()
|
||||
build_vars = self.get_build_variables()
|
||||
build_targets = self.get_build_targets()
|
||||
|
||||
telemetry.on_run_environment(self.options, build_targets)
|
||||
|
||||
# skip monitor target, we call it above
|
||||
if "monitor" in build_targets:
|
||||
build_targets.remove("monitor")
|
||||
if "nobuild" not in build_targets:
|
||||
# install dependent libraries
|
||||
if "lib_install" in self.options:
|
||||
@@ -249,10 +294,10 @@ class EnvironmentProcessor(object):
|
||||
if d.strip()
|
||||
], self.verbose)
|
||||
if "lib_deps" in self.options:
|
||||
_autoinstall_libdeps(self.cmd_ctx, [
|
||||
d.strip() for d in self.options['lib_deps'].split(", ")
|
||||
if d.strip()
|
||||
], self.verbose)
|
||||
_autoinstall_libdeps(
|
||||
self.cmd_ctx,
|
||||
util.parse_conf_multi_values(self.options['lib_deps']),
|
||||
self.verbose)
|
||||
|
||||
try:
|
||||
p = PlatformFactory.newPlatform(self.options['platform'])
|
||||
@@ -267,6 +312,8 @@ class EnvironmentProcessor(object):
|
||||
|
||||
|
||||
def _autoinstall_libdeps(ctx, libraries, verbose=False):
|
||||
if not libraries:
|
||||
return
|
||||
storage_dir = util.get_projectlibdeps_dir()
|
||||
ctx.obj = LibraryManager(storage_dir)
|
||||
if verbose:
|
||||
@@ -275,28 +322,31 @@ def _autoinstall_libdeps(ctx, libraries, verbose=False):
|
||||
try:
|
||||
ctx.invoke(cmd_lib_install, libraries=[lib], silent=not verbose)
|
||||
except exception.LibNotFound as e:
|
||||
click.secho("Warning! %s" % e, fg="yellow")
|
||||
if verbose or not is_builtin_lib(lib):
|
||||
click.secho("Warning! %s" % e, fg="yellow")
|
||||
except exception.InternetIsOffline as e:
|
||||
click.secho(str(e), fg="yellow")
|
||||
|
||||
|
||||
def _clean_pioenvs_dir(pioenvs_dir):
|
||||
structhash_file = join(pioenvs_dir, "structure.hash")
|
||||
def _clean_build_dir(build_dir):
|
||||
structhash_file = join(build_dir, "structure.hash")
|
||||
proj_hash = calculate_project_hash()
|
||||
|
||||
# if project's config is modified
|
||||
if (isdir(pioenvs_dir) and
|
||||
getmtime(join(util.get_project_dir(), "platformio.ini")) >
|
||||
getmtime(pioenvs_dir)):
|
||||
util.rmtree_(pioenvs_dir)
|
||||
if (isdir(build_dir)
|
||||
and getmtime(join(util.get_project_dir(),
|
||||
"platformio.ini")) > getmtime(build_dir)):
|
||||
util.rmtree_(build_dir)
|
||||
|
||||
# check project structure
|
||||
if isdir(pioenvs_dir) and isfile(structhash_file):
|
||||
if isdir(build_dir) and isfile(structhash_file):
|
||||
with open(structhash_file) as f:
|
||||
if f.read() == proj_hash:
|
||||
return
|
||||
util.rmtree_(pioenvs_dir)
|
||||
util.rmtree_(build_dir)
|
||||
|
||||
if not isdir(pioenvs_dir):
|
||||
makedirs(pioenvs_dir)
|
||||
if not isdir(build_dir):
|
||||
makedirs(build_dir)
|
||||
|
||||
with open(structhash_file, "w") as f:
|
||||
f.write(proj_hash)
|
||||
@@ -329,52 +379,56 @@ def print_summary(results, start_time):
|
||||
format_str = (
|
||||
"Environment {0:<" + str(envname_max_len + 9) + "}\t[{1}]")
|
||||
click.echo(
|
||||
format_str.format(
|
||||
click.style(
|
||||
envname, fg="cyan"), status_str),
|
||||
format_str.format(click.style(envname, fg="cyan"), status_str),
|
||||
err=status is False)
|
||||
|
||||
print_header(
|
||||
"[%s] Took %.2f seconds" % ((click.style(
|
||||
"SUCCESS", fg="green", bold=True) if successed else click.style(
|
||||
"ERROR", fg="red", bold=True)), time() - start_time),
|
||||
"[%s] Took %.2f seconds" % (
|
||||
(click.style("SUCCESS", fg="green", bold=True)
|
||||
if successed else click.style("ERROR", fg="red", bold=True)),
|
||||
time() - start_time),
|
||||
is_error=not successed)
|
||||
|
||||
|
||||
def check_project_defopts(config):
|
||||
if not config.has_section("platformio"):
|
||||
return True
|
||||
known = ("home_dir", "lib_dir", "libdeps_dir", "src_dir", "envs_dir",
|
||||
"data_dir", "test_dir", "env_default", "lib_extra_dirs")
|
||||
unknown = set([k for k, _ in config.items("platformio")]) - set(known)
|
||||
unknown = set([k for k, _ in config.items("platformio")]) - set(
|
||||
EnvironmentProcessor.KNOWN_PLATFORMIO_OPTIONS)
|
||||
if not unknown:
|
||||
return True
|
||||
click.secho(
|
||||
"Warning! Ignore unknown `%s` option from `[platformio]` section" %
|
||||
"Warning! Ignore unknown `%s` option in `[platformio]` section" %
|
||||
", ".join(unknown),
|
||||
fg="yellow")
|
||||
return False
|
||||
|
||||
|
||||
def check_project_envs(config, environments):
|
||||
def check_project_envs(config, environments=None):
|
||||
if not config.sections():
|
||||
raise exception.ProjectEnvsNotAvailable()
|
||||
|
||||
known = set([s[4:] for s in config.sections() if s.startswith("env:")])
|
||||
unknown = set(environments) - known
|
||||
unknown = set(environments or []) - known
|
||||
if unknown:
|
||||
raise exception.UnknownEnvNames(", ".join(unknown), ", ".join(known))
|
||||
return True
|
||||
|
||||
|
||||
def calculate_project_hash():
|
||||
structure = [__version__]
|
||||
check_suffixes = (".c", ".cc", ".cpp", ".h", ".hpp", ".s", ".S")
|
||||
chunks = [__version__]
|
||||
for d in (util.get_projectsrc_dir(), util.get_projectlib_dir()):
|
||||
if not isdir(d):
|
||||
continue
|
||||
for root, _, files in walk(d):
|
||||
for f in files:
|
||||
path = join(root, f)
|
||||
if not any([s in path for s in (".git", ".svn", ".pioenvs")]):
|
||||
structure.append(path)
|
||||
return sha1(",".join(sorted(structure))).hexdigest() if structure else ""
|
||||
if path.endswith(check_suffixes):
|
||||
chunks.append(path)
|
||||
chunks_to_str = ",".join(sorted(chunks))
|
||||
if "windows" in util.get_systype():
|
||||
# Fix issue with useless project rebuilding for case insensitive FS.
|
||||
# A case of disk drive can differ...
|
||||
chunks_to_str = chunks_to_str.lower()
|
||||
return sha1(chunks_to_str).hexdigest()
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -31,11 +31,9 @@ def settings_get(name):
|
||||
|
||||
click.echo(
|
||||
list_tpl.format(
|
||||
name=click.style(
|
||||
"Name", fg="cyan"),
|
||||
value=(click.style(
|
||||
"Value", fg="green") + click.style(
|
||||
" [Default]", fg="yellow")),
|
||||
name=click.style("Name", fg="cyan"),
|
||||
value=(click.style("Value", fg="green") + click.style(
|
||||
" [Default]", fg="yellow")),
|
||||
description="Description"))
|
||||
click.echo("-" * terminal_width)
|
||||
|
||||
@@ -59,8 +57,7 @@ def settings_get(name):
|
||||
|
||||
click.echo(
|
||||
list_tpl.format(
|
||||
name=click.style(
|
||||
_name, fg="cyan"),
|
||||
name=click.style(_name, fg="cyan"),
|
||||
value=_value_str,
|
||||
description=_data['description']))
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -17,12 +17,23 @@ from os import getcwd
|
||||
|
||||
import click
|
||||
|
||||
from platformio.pioplus import pioplus_call
|
||||
from platformio.managers.core import pioplus_call
|
||||
|
||||
|
||||
@click.command("test", short_help="Local Unit Testing")
|
||||
@click.option("--environment", "-e", multiple=True, metavar="<environment>")
|
||||
@click.option("--ignore", "-i", multiple=True, metavar="<pattern>")
|
||||
@click.option(
|
||||
"--filter",
|
||||
"-f",
|
||||
multiple=True,
|
||||
metavar="<pattern>",
|
||||
help="Filter tests by a pattern")
|
||||
@click.option(
|
||||
"--ignore",
|
||||
"-i",
|
||||
multiple=True,
|
||||
metavar="<pattern>",
|
||||
help="Ignore tests by a pattern")
|
||||
@click.option("--upload-port")
|
||||
@click.option("--test-port")
|
||||
@click.option(
|
||||
@@ -37,6 +48,20 @@ from platformio.pioplus import pioplus_call
|
||||
resolve_path=True))
|
||||
@click.option("--without-building", is_flag=True)
|
||||
@click.option("--without-uploading", is_flag=True)
|
||||
@click.option(
|
||||
"--no-reset",
|
||||
is_flag=True,
|
||||
help="Disable software reset via Serial.DTR/RST")
|
||||
@click.option(
|
||||
"--monitor-rts",
|
||||
default=None,
|
||||
type=click.IntRange(0, 1),
|
||||
help="Set initial RTS line state for Serial Monitor")
|
||||
@click.option(
|
||||
"--monitor-dtr",
|
||||
default=None,
|
||||
type=click.IntRange(0, 1),
|
||||
help="Set initial DTR line state for Serial Monitor")
|
||||
@click.option("--verbose", "-v", is_flag=True)
|
||||
def cli(*args, **kwargs): # pylint: disable=unused-argument
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -14,25 +14,36 @@
|
||||
|
||||
import click
|
||||
|
||||
from platformio import app
|
||||
from platformio.commands.lib import lib_update as cmd_lib_update
|
||||
from platformio.commands.platform import platform_update as cmd_platform_update
|
||||
from platformio.managers.core import update_core_packages
|
||||
from platformio.managers.lib import LibraryManager
|
||||
from platformio.pioplus import pioplus_update
|
||||
|
||||
|
||||
@click.command(
|
||||
"update", short_help="Update installed Platforms, Packages and Libraries")
|
||||
"update", short_help="Update installed platforms, packages and libraries")
|
||||
@click.option(
|
||||
"--core-packages", is_flag=True, help="Update only the core packages")
|
||||
@click.option(
|
||||
"-c",
|
||||
"--only-check",
|
||||
is_flag=True,
|
||||
help="Do not update, only check for new version")
|
||||
@click.pass_context
|
||||
def cli(ctx, only_check):
|
||||
def cli(ctx, core_packages, only_check):
|
||||
update_core_packages(only_check)
|
||||
|
||||
if core_packages:
|
||||
return
|
||||
|
||||
# cleanup lib search results, cached board and platform lists
|
||||
app.clean_cache()
|
||||
|
||||
click.echo()
|
||||
click.echo("Platform Manager")
|
||||
click.echo("================")
|
||||
ctx.invoke(cmd_platform_update, only_check=only_check)
|
||||
pioplus_update()
|
||||
|
||||
click.echo()
|
||||
click.echo("Library Manager")
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -12,64 +12,66 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import re
|
||||
from zipfile import ZipFile
|
||||
|
||||
import click
|
||||
import requests
|
||||
|
||||
from platformio import VERSION, __version__, exception, util
|
||||
from platformio.commands.home import shutdown_servers
|
||||
|
||||
|
||||
@click.command(
|
||||
"upgrade", short_help="Upgrade PlatformIO to the latest version")
|
||||
def cli():
|
||||
latest = get_latest_version()
|
||||
if __version__ == latest:
|
||||
@click.option("--dev", is_flag=True, help="Use development branch")
|
||||
def cli(dev):
|
||||
if not dev and __version__ == get_latest_version():
|
||||
return click.secho(
|
||||
"You're up-to-date!\nPlatformIO %s is currently the "
|
||||
"newest version available." % __version__,
|
||||
fg="green")
|
||||
else:
|
||||
click.secho("Please wait while upgrading PlatformIO ...", fg="yellow")
|
||||
|
||||
to_develop = not all([c.isdigit() for c in latest if c != "."])
|
||||
cmds = ([
|
||||
"pip", "install", "--upgrade",
|
||||
"https://github.com/platformio/platformio-core/archive/develop.zip"
|
||||
if to_develop else "platformio"
|
||||
], ["platformio", "--version"])
|
||||
click.secho("Please wait while upgrading PlatformIO ...", fg="yellow")
|
||||
|
||||
cmd = None
|
||||
r = None
|
||||
try:
|
||||
for cmd in cmds:
|
||||
cmd = [util.get_pythonexe_path(), "-m"] + cmd
|
||||
r = None
|
||||
# kill all PIO Home servers, they block `pioplus` binary
|
||||
shutdown_servers()
|
||||
|
||||
to_develop = dev or not all(c.isdigit() for c in __version__ if c != ".")
|
||||
cmds = (["pip", "install", "--upgrade",
|
||||
get_pip_package(to_develop)], ["platformio", "--version"])
|
||||
|
||||
cmd = None
|
||||
r = None
|
||||
try:
|
||||
for cmd in cmds:
|
||||
cmd = [util.get_pythonexe_path(), "-m"] + cmd
|
||||
r = None
|
||||
r = util.exec_command(cmd)
|
||||
|
||||
# try pip with disabled cache
|
||||
if r['returncode'] != 0 and cmd[2] == "pip":
|
||||
cmd.insert(3, "--no-cache-dir")
|
||||
r = util.exec_command(cmd)
|
||||
|
||||
# try pip with disabled cache
|
||||
if r['returncode'] != 0 and cmd[2] == "pip":
|
||||
cmd.insert(3, "--no-cache-dir")
|
||||
r = util.exec_command(cmd)
|
||||
|
||||
assert r['returncode'] == 0
|
||||
assert "version" in r['out']
|
||||
actual_version = r['out'].strip().split("version", 1)[1].strip()
|
||||
assert r['returncode'] == 0
|
||||
assert "version" in r['out']
|
||||
actual_version = r['out'].strip().split("version", 1)[1].strip()
|
||||
click.secho(
|
||||
"PlatformIO has been successfully upgraded to %s" % actual_version,
|
||||
fg="green")
|
||||
click.echo("Release notes: ", nl=False)
|
||||
click.secho(
|
||||
"http://docs.platformio.org/en/latest/history.html", fg="cyan")
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
if not r:
|
||||
raise exception.UpgradeError("\n".join([str(cmd), str(e)]))
|
||||
permission_errors = ("permission denied", "not permitted")
|
||||
if (any(m in r['err'].lower() for m in permission_errors)
|
||||
and "windows" not in util.get_systype()):
|
||||
click.secho(
|
||||
"PlatformIO has been successfully upgraded to %s" %
|
||||
actual_version,
|
||||
fg="green")
|
||||
click.echo("Release notes: ", nl=False)
|
||||
click.secho(
|
||||
"http://docs.platformio.org/en/latest/history.html", fg="cyan")
|
||||
except Exception as e: # pylint: disable=W0703
|
||||
if not r:
|
||||
raise exception.UpgradeError("\n".join([str(cmd), str(e)]))
|
||||
permission_errors = ("permission denied", "not permitted")
|
||||
if (any([m in r['err'].lower() for m in permission_errors]) and
|
||||
"windows" not in util.get_systype()):
|
||||
click.secho(
|
||||
"""
|
||||
"""
|
||||
-----------------
|
||||
Permission denied
|
||||
-----------------
|
||||
@@ -79,12 +81,37 @@ You need the `sudo` permission to install Python packages. Try
|
||||
|
||||
WARNING! Don't use `sudo` for the rest PlatformIO commands.
|
||||
""",
|
||||
fg="yellow",
|
||||
err=True)
|
||||
raise exception.ReturnErrorCode(1)
|
||||
else:
|
||||
raise exception.UpgradeError("\n".join(
|
||||
[str(cmd), r['out'], r['err']]))
|
||||
fg="yellow",
|
||||
err=True)
|
||||
raise exception.ReturnErrorCode(1)
|
||||
else:
|
||||
raise exception.UpgradeError("\n".join(
|
||||
[str(cmd), r['out'], r['err']]))
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def get_pip_package(to_develop):
|
||||
if not to_develop:
|
||||
return "platformio"
|
||||
dl_url = ("https://github.com/platformio/"
|
||||
"platformio-core/archive/develop.zip")
|
||||
cache_dir = util.get_cache_dir()
|
||||
if not os.path.isdir(cache_dir):
|
||||
os.makedirs(cache_dir)
|
||||
pkg_name = os.path.join(cache_dir, "piocoredevelop.zip")
|
||||
try:
|
||||
with open(pkg_name, "w") as fp:
|
||||
r = util.exec_command(
|
||||
["curl", "-fsSL", dl_url], stdout=fp, universal_newlines=True)
|
||||
assert r['returncode'] == 0
|
||||
# check ZIP structure
|
||||
with ZipFile(pkg_name) as zp:
|
||||
assert zp.testzip() is None
|
||||
return pkg_name
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
return dl_url
|
||||
|
||||
|
||||
def get_latest_version():
|
||||
@@ -101,9 +128,10 @@ def get_latest_version():
|
||||
|
||||
def get_develop_latest_version():
|
||||
version = None
|
||||
r = requests.get("https://raw.githubusercontent.com/platformio/platformio"
|
||||
"/develop/platformio/__init__.py",
|
||||
headers=util.get_request_defheaders())
|
||||
r = requests.get(
|
||||
"https://raw.githubusercontent.com/platformio/platformio"
|
||||
"/develop/platformio/__init__.py",
|
||||
headers=util.get_request_defheaders())
|
||||
r.raise_for_status()
|
||||
for line in r.text.split("\n"):
|
||||
line = line.strip()
|
||||
@@ -121,7 +149,8 @@ def get_develop_latest_version():
|
||||
|
||||
|
||||
def get_pypi_latest_version():
|
||||
r = requests.get("https://pypi.python.org/pypi/platformio/json",
|
||||
headers=util.get_request_defheaders())
|
||||
r = requests.get(
|
||||
"https://pypi.org/pypi/platformio/json",
|
||||
headers=util.get_request_defheaders())
|
||||
r.raise_for_status()
|
||||
return r.json()['info']['version']
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -15,12 +15,13 @@
|
||||
from email.utils import parsedate_tz
|
||||
from math import ceil
|
||||
from os.path import getsize, join
|
||||
from sys import getfilesystemencoding, version_info
|
||||
from time import mktime
|
||||
|
||||
import click
|
||||
import requests
|
||||
|
||||
from platformio import app, util
|
||||
from platformio import util
|
||||
from platformio.exception import (FDSHASumMismatch, FDSizeMismatch,
|
||||
FDUnrecognizedStatusCode)
|
||||
|
||||
@@ -30,10 +31,13 @@ class FileDownloader(object):
|
||||
CHUNK_SIZE = 1024
|
||||
|
||||
def __init__(self, url, dest_dir=None):
|
||||
self._request = None
|
||||
# make connection
|
||||
self._request = requests.get(url,
|
||||
stream=True,
|
||||
headers=util.get_request_defheaders())
|
||||
self._request = requests.get(
|
||||
url,
|
||||
stream=True,
|
||||
headers=util.get_request_defheaders(),
|
||||
verify=version_info >= (2, 7, 9))
|
||||
if self._request.status_code != 200:
|
||||
raise FDUnrecognizedStatusCode(self._request.status_code, url)
|
||||
|
||||
@@ -43,12 +47,12 @@ class FileDownloader(object):
|
||||
9:].replace('"', "").replace("'", "")
|
||||
self._fname = self._fname.encode("utf8")
|
||||
else:
|
||||
self._fname = url.split("/")[-1]
|
||||
self._fname = [p for p in url.split("/") if p][-1]
|
||||
|
||||
self._progressbar = None
|
||||
self._destination = self._fname
|
||||
if dest_dir:
|
||||
self.set_destination(join(dest_dir, self._fname))
|
||||
self.set_destination(
|
||||
join(dest_dir.decode(getfilesystemencoding()), self._fname))
|
||||
|
||||
def set_destination(self, destination):
|
||||
self._destination = destination
|
||||
@@ -64,26 +68,30 @@ class FileDownloader(object):
|
||||
return -1
|
||||
return int(self._request.headers['content-length'])
|
||||
|
||||
def start(self):
|
||||
def start(self, with_progress=True):
|
||||
label = "Downloading"
|
||||
itercontent = self._request.iter_content(chunk_size=self.CHUNK_SIZE)
|
||||
f = open(self._destination, "wb")
|
||||
|
||||
if app.is_disabled_progressbar() or self.get_size() == -1:
|
||||
click.echo("Downloading...")
|
||||
for chunk in itercontent:
|
||||
if chunk:
|
||||
f.write(chunk)
|
||||
else:
|
||||
chunks = int(ceil(self.get_size() / float(self.CHUNK_SIZE)))
|
||||
with click.progressbar(length=chunks, label="Downloading") as pb:
|
||||
for _ in pb:
|
||||
f.write(next(itercontent))
|
||||
f.close()
|
||||
self._request.close()
|
||||
try:
|
||||
if not with_progress or self.get_size() == -1:
|
||||
click.echo("%s..." % label)
|
||||
for chunk in itercontent:
|
||||
if chunk:
|
||||
f.write(chunk)
|
||||
else:
|
||||
chunks = int(ceil(self.get_size() / float(self.CHUNK_SIZE)))
|
||||
with click.progressbar(length=chunks, label=label) as pb:
|
||||
for _ in pb:
|
||||
f.write(next(itercontent))
|
||||
finally:
|
||||
f.close()
|
||||
self._request.close()
|
||||
|
||||
if self.get_lmtime():
|
||||
self._preserve_filemtime(self.get_lmtime())
|
||||
|
||||
return True
|
||||
|
||||
def verify(self, sha1=None):
|
||||
_dlsize = getsize(self._destination)
|
||||
if self.get_size() != -1 and _dlsize != self.get_size():
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -20,8 +20,7 @@ class PlatformioException(Exception):
|
||||
def __str__(self): # pragma: no cover
|
||||
if self.MESSAGE:
|
||||
return self.MESSAGE.format(*self.args)
|
||||
else:
|
||||
return Exception.__str__(self)
|
||||
return Exception.__str__(self)
|
||||
|
||||
|
||||
class ReturnErrorCode(PlatformioException):
|
||||
@@ -29,10 +28,18 @@ class ReturnErrorCode(PlatformioException):
|
||||
MESSAGE = "{0}"
|
||||
|
||||
|
||||
class LockFileTimeoutError(PlatformioException):
|
||||
pass
|
||||
|
||||
|
||||
class MinitermException(PlatformioException):
|
||||
pass
|
||||
|
||||
|
||||
class UserSideException(PlatformioException):
|
||||
pass
|
||||
|
||||
|
||||
class AbortedByUser(PlatformioException):
|
||||
|
||||
MESSAGE = "Aborted by user"
|
||||
@@ -40,20 +47,25 @@ class AbortedByUser(PlatformioException):
|
||||
|
||||
class UnknownPlatform(PlatformioException):
|
||||
|
||||
MESSAGE = "Unknown platform '{0}'"
|
||||
MESSAGE = "Unknown development platform '{0}'"
|
||||
|
||||
|
||||
class IncompatiblePlatform(PlatformioException):
|
||||
|
||||
MESSAGE = "Development platform '{0}' is not compatible with PIO Core v{1}"
|
||||
|
||||
|
||||
class PlatformNotInstalledYet(PlatformioException):
|
||||
|
||||
MESSAGE = "The platform '{0}' has not been installed yet. "\
|
||||
"Use `platformio platform install {0}` command"
|
||||
MESSAGE = ("The platform '{0}' has not been installed yet. "
|
||||
"Use `platformio platform install {0}` command")
|
||||
|
||||
|
||||
class BoardNotDefined(PlatformioException):
|
||||
|
||||
MESSAGE = "You need to specify board ID using `-b` or `--board` "\
|
||||
"option. Supported boards list is available via "\
|
||||
" `platformio boards` command"
|
||||
MESSAGE = (
|
||||
"You need to specify board ID using `-b` or `--board` option. "
|
||||
"Supported boards list is available via `platformio boards` command")
|
||||
|
||||
|
||||
class UnknownBoard(PlatformioException):
|
||||
@@ -78,19 +90,27 @@ class UnknownPackage(PlatformioException):
|
||||
|
||||
class MissingPackageManifest(PlatformioException):
|
||||
|
||||
MESSAGE = "Could not find '{0}' manifest file in the package"
|
||||
MESSAGE = "Could not find one of '{0}' manifest files in the package"
|
||||
|
||||
|
||||
class UndefinedPackageVersion(PlatformioException):
|
||||
|
||||
MESSAGE = "Could not find a version that satisfies the requirement '{0}'"\
|
||||
" for your system '{1}'"
|
||||
MESSAGE = ("Could not find a version that satisfies the requirement '{0}'"
|
||||
" for your system '{1}'")
|
||||
|
||||
|
||||
class PackageInstallError(PlatformioException):
|
||||
|
||||
MESSAGE = "Can not install '{0}' with version requirements '{1}' "\
|
||||
"for your system '{2}'"
|
||||
MESSAGE = ("Could not install '{0}' with version requirements '{1}' "
|
||||
"for your system '{2}'.\n\n"
|
||||
"Please try this solution -> http://bit.ly/faq-package-manager")
|
||||
|
||||
|
||||
class ExtractArchiveItemError(PlatformioException):
|
||||
|
||||
MESSAGE = (
|
||||
"Could not extract `{0}` to `{1}`. Try to disable antivirus "
|
||||
"tool or check this solution -> http://bit.ly/faq-package-manager")
|
||||
|
||||
|
||||
class FDUnrecognizedStatusCode(PlatformioException):
|
||||
@@ -100,21 +120,22 @@ class FDUnrecognizedStatusCode(PlatformioException):
|
||||
|
||||
class FDSizeMismatch(PlatformioException):
|
||||
|
||||
MESSAGE = "The size ({0:d} bytes) of downloaded file '{1}' "\
|
||||
"is not equal to remote size ({2:d} bytes)"
|
||||
MESSAGE = ("The size ({0:d} bytes) of downloaded file '{1}' "
|
||||
"is not equal to remote size ({2:d} bytes)")
|
||||
|
||||
|
||||
class FDSHASumMismatch(PlatformioException):
|
||||
|
||||
MESSAGE = "The 'sha1' sum '{0}' of downloaded file '{1}' "\
|
||||
"is not equal to remote '{2}'"
|
||||
MESSAGE = ("The 'sha1' sum '{0}' of downloaded file '{1}' "
|
||||
"is not equal to remote '{2}'")
|
||||
|
||||
|
||||
class NotPlatformIOProject(PlatformioException):
|
||||
|
||||
MESSAGE = "Not a PlatformIO project. `platformio.ini` file has not been "\
|
||||
"found in current working directory ({0}). To initialize new project "\
|
||||
"please use `platformio init` command"
|
||||
MESSAGE = (
|
||||
"Not a PlatformIO project. `platformio.ini` file has not been "
|
||||
"found in current working directory ({0}). To initialize new project "
|
||||
"please use `platformio init` command")
|
||||
|
||||
|
||||
class UndefinedEnvPlatform(PlatformioException):
|
||||
@@ -154,24 +175,27 @@ class APIRequestError(PlatformioException):
|
||||
|
||||
class InternetIsOffline(PlatformioException):
|
||||
|
||||
MESSAGE = "You are not connected to the Internet"
|
||||
MESSAGE = (
|
||||
"You are not connected to the Internet.\n"
|
||||
"If you build a project first time, we need Internet connection "
|
||||
"to install all dependencies and toolchain.")
|
||||
|
||||
|
||||
class LibNotFound(PlatformioException):
|
||||
|
||||
MESSAGE = "Library `{0}` has not been found in PlatformIO Registry.\n"\
|
||||
"You can ignore this message, if `{0}` is a built-in library "\
|
||||
"(included in framework, SDK). E.g., SPI, Wire, etc."
|
||||
MESSAGE = ("Library `{0}` has not been found in PlatformIO Registry.\n"
|
||||
"You can ignore this message, if `{0}` is a built-in library "
|
||||
"(included in framework, SDK). E.g., SPI, Wire, etc.")
|
||||
|
||||
|
||||
class NotGlobalLibDir(PlatformioException):
|
||||
|
||||
MESSAGE = "The `{0}` is not a PlatformIO project.\n\n"\
|
||||
"To manage libraries "\
|
||||
"in global storage `{1}`,\n"\
|
||||
"please use `platformio lib --global {2}` or specify custom "\
|
||||
"storage `platformio lib --storage-dir /path/to/storage/ {2}`."\
|
||||
"\nCheck `platformio lib --help` for details."
|
||||
MESSAGE = (
|
||||
"The `{0}` is not a PlatformIO project.\n\n"
|
||||
"To manage libraries in global storage `{1}`,\n"
|
||||
"please use `platformio lib --global {2}` or specify custom storage "
|
||||
"`platformio lib --storage-dir /path/to/storage/ {2}`.\n"
|
||||
"Check `platformio lib --help` for details.")
|
||||
|
||||
|
||||
class InvalidLibConfURL(PlatformioException):
|
||||
@@ -179,6 +203,11 @@ class InvalidLibConfURL(PlatformioException):
|
||||
MESSAGE = "Invalid library config URL '{0}'"
|
||||
|
||||
|
||||
class InvalidProjectConf(PlatformioException):
|
||||
|
||||
MESSAGE = "Invalid `platformio.ini`, project configuration file: '{0}'"
|
||||
|
||||
|
||||
class BuildScriptNotFound(PlatformioException):
|
||||
|
||||
MESSAGE = "Invalid path '{0}' to build script"
|
||||
@@ -194,11 +223,16 @@ class InvalidSettingValue(PlatformioException):
|
||||
MESSAGE = "Invalid value '{0}' for the setting '{1}'"
|
||||
|
||||
|
||||
class InvalidJSONFile(PlatformioException):
|
||||
|
||||
MESSAGE = "Could not load broken JSON: {0}"
|
||||
|
||||
|
||||
class CIBuildEnvsEmpty(PlatformioException):
|
||||
|
||||
MESSAGE = "Can't find PlatformIO build environments.\n"\
|
||||
"Please specify `--board` or path to `platformio.ini` with "\
|
||||
"predefined environments using `--project-conf` option"
|
||||
MESSAGE = ("Can't find PlatformIO build environments.\n"
|
||||
"Please specify `--board` or path to `platformio.ini` with "
|
||||
"predefined environments using `--project-conf` option")
|
||||
|
||||
|
||||
class UpgradeError(PlatformioException):
|
||||
@@ -211,7 +245,29 @@ class UpgradeError(PlatformioException):
|
||||
"""
|
||||
|
||||
|
||||
class HomeDirPermissionsError(PlatformioException):
|
||||
|
||||
MESSAGE = (
|
||||
"The directory `{0}` or its parent directory is not owned by the "
|
||||
"current user and PlatformIO can not store configuration data.\n"
|
||||
"Please check the permissions and owner of that directory.\n"
|
||||
"Otherwise, please remove manually `{0}` directory and PlatformIO "
|
||||
"will create new from the current user.")
|
||||
|
||||
|
||||
class CygwinEnvDetected(PlatformioException):
|
||||
|
||||
MESSAGE = "PlatformIO does not work within Cygwin environment. "\
|
||||
"Use native Terminal instead."
|
||||
MESSAGE = ("PlatformIO does not work within Cygwin environment. "
|
||||
"Use native Terminal instead.")
|
||||
|
||||
|
||||
class DebugSupportError(PlatformioException):
|
||||
|
||||
MESSAGE = ("Currently, PlatformIO does not support debugging for `{0}`.\n"
|
||||
"Please contact support@pioplus.com or visit "
|
||||
"< http://docs.platformio.org/page/plus/debugging.html >")
|
||||
|
||||
|
||||
class DebugInvalidOptions(PlatformioException):
|
||||
|
||||
pass
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -18,21 +18,20 @@ import re
|
||||
from os.path import abspath, basename, expanduser, isdir, isfile, join, relpath
|
||||
|
||||
import bottle
|
||||
from click.testing import CliRunner
|
||||
|
||||
from platformio import app, exception, util
|
||||
from platformio import exception, util
|
||||
from platformio.commands.run import cli as cmd_run
|
||||
|
||||
|
||||
class ProjectGenerator(object):
|
||||
|
||||
def __init__(self, project_dir, ide, board):
|
||||
def __init__(self, project_dir, ide, env_name):
|
||||
self.project_dir = project_dir
|
||||
self.ide = ide
|
||||
self.board = board
|
||||
self.env_name = env_name
|
||||
|
||||
self._tplvars = {}
|
||||
|
||||
with util.cd(self.project_dir):
|
||||
self.project_src_dir = util.get_projectsrc_dir()
|
||||
|
||||
self._gather_tplvars()
|
||||
|
||||
@staticmethod
|
||||
@@ -41,38 +40,43 @@ class ProjectGenerator(object):
|
||||
return sorted(
|
||||
[d for d in os.listdir(tpls_dir) if isdir(join(tpls_dir, d))])
|
||||
|
||||
@util.memoized
|
||||
@util.memoized()
|
||||
def get_project_env(self):
|
||||
data = {"env_name": "PlatformIO"}
|
||||
data = {}
|
||||
config = util.load_project_config(self.project_dir)
|
||||
for section in config.sections():
|
||||
if not section.startswith("env:"):
|
||||
continue
|
||||
if self.env_name != section[4:]:
|
||||
continue
|
||||
data = {"env_name": section[4:]}
|
||||
for k, v in config.items(section):
|
||||
data[k] = v
|
||||
if self.board == data.get("board"):
|
||||
break
|
||||
return data
|
||||
|
||||
@util.memoized
|
||||
def get_project_build_data(self):
|
||||
data = {"defines": [], "includes": [], "cxx_path": None}
|
||||
data = {
|
||||
"defines": [],
|
||||
"includes": [],
|
||||
"cxx_path": None,
|
||||
"prog_path": None
|
||||
}
|
||||
envdata = self.get_project_env()
|
||||
if "env_name" not in envdata:
|
||||
if not envdata:
|
||||
return data
|
||||
cmd = [util.get_pythonexe_path(), "-m", "platformio", "-f"]
|
||||
if app.get_session_var("caller_id"):
|
||||
cmd.extend(["-c", app.get_session_var("caller_id")])
|
||||
cmd.extend(["run", "-t", "idedata", "-e", envdata['env_name']])
|
||||
cmd.extend(["-d", self.project_dir])
|
||||
result = util.exec_command(cmd)
|
||||
|
||||
if result['returncode'] != 0 or '"includes":' not in result['out']:
|
||||
raise exception.PlatformioException("\n".join(
|
||||
[result['out'], result['err']]))
|
||||
result = CliRunner().invoke(cmd_run, [
|
||||
"--project-dir", self.project_dir, "--environment",
|
||||
envdata['env_name'], "--target", "idedata"
|
||||
])
|
||||
|
||||
for line in result['out'].split("\n"):
|
||||
if result.exit_code != 0 and not isinstance(result.exception,
|
||||
exception.ReturnErrorCode):
|
||||
raise result.exception
|
||||
if '"includes":' not in result.output:
|
||||
raise exception.PlatformioException(result.output)
|
||||
|
||||
for line in result.output.split("\n"):
|
||||
line = line.strip()
|
||||
if line.startswith('{"') and line.endswith("}"):
|
||||
data = json.loads(line)
|
||||
@@ -84,7 +88,7 @@ class ProjectGenerator(object):
|
||||
def get_src_files(self):
|
||||
result = []
|
||||
with util.cd(self.project_dir):
|
||||
for root, _, files in os.walk(self.project_src_dir):
|
||||
for root, _, files in os.walk(util.get_projectsrc_dir()):
|
||||
for f in files:
|
||||
result.append(relpath(join(root, f)))
|
||||
return result
|
||||
@@ -145,18 +149,21 @@ class ProjectGenerator(object):
|
||||
def _gather_tplvars(self):
|
||||
self._tplvars.update(self.get_project_env())
|
||||
self._tplvars.update(self.get_project_build_data())
|
||||
self._tplvars.update({
|
||||
"project_name": self.get_project_name(),
|
||||
"src_files": self.get_src_files(),
|
||||
"user_home_dir": abspath(expanduser("~")),
|
||||
"project_dir": self.project_dir,
|
||||
"project_src_dir": self.project_src_dir,
|
||||
"systype": util.get_systype(),
|
||||
"platformio_path":
|
||||
self._fix_os_path(util.where_is_program("platformio")),
|
||||
"env_pathsep": os.pathsep,
|
||||
"env_path": self._fix_os_path(os.getenv("PATH"))
|
||||
})
|
||||
with util.cd(self.project_dir):
|
||||
self._tplvars.update({
|
||||
"project_name": self.get_project_name(),
|
||||
"src_files": self.get_src_files(),
|
||||
"user_home_dir": abspath(expanduser("~")),
|
||||
"project_dir": self.project_dir,
|
||||
"project_src_dir": util.get_projectsrc_dir(),
|
||||
"project_lib_dir": util.get_projectlib_dir(),
|
||||
"project_libdeps_dir": util.get_projectlibdeps_dir(),
|
||||
"systype": util.get_systype(),
|
||||
"platformio_path": self._fix_os_path(
|
||||
util.where_is_program("platformio")),
|
||||
"env_pathsep": os.pathsep,
|
||||
"env_path": self._fix_os_path(os.getenv("PATH"))
|
||||
}) # yapf: disable
|
||||
|
||||
@staticmethod
|
||||
def _fix_os_path(path):
|
||||
|
||||
16
platformio/ide/tpls/clion/.idea/misc.xml.tpl
generated
Normal file
16
platformio/ide/tpls/clion/.idea/misc.xml.tpl
generated
Normal file
@@ -0,0 +1,16 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="CMakeWorkspace" PROJECT_DIR="$PROJECT_DIR$" />
|
||||
<component name="CidrRootsConfiguration">
|
||||
<sourceRoots>
|
||||
<file path="$PROJECT_DIR$/src" />
|
||||
</sourceRoots>
|
||||
<libraryRoots>
|
||||
<file path="$PROJECT_DIR$/lib" />
|
||||
<file path="$PROJECT_DIR$/.piolibdeps" />
|
||||
</libraryRoots>
|
||||
<excludeRoots>
|
||||
<file path="$PROJECT_DIR$/.pioenvs" />
|
||||
</excludeRoots>
|
||||
</component>
|
||||
</project>
|
||||
@@ -5,9 +5,11 @@ SET(CMAKE_C_COMPILER "{{cc_path.replace("\\", "/")}}")
|
||||
SET(CMAKE_CXX_COMPILER "{{cxx_path.replace("\\", "/")}}")
|
||||
SET(CMAKE_CXX_FLAGS_DISTRIBUTION "{{cxx_flags}}")
|
||||
SET(CMAKE_C_FLAGS_DISTRIBUTION "{{cc_flags}}")
|
||||
set(CMAKE_CXX_STANDARD 11)
|
||||
|
||||
% import re
|
||||
% for define in defines:
|
||||
add_definitions(-D{{!define}})
|
||||
add_definitions(-D'{{!re.sub(r"([\"\(\)#])", r"\\\1", define)}}')
|
||||
% end
|
||||
|
||||
% for include in includes:
|
||||
@@ -22,4 +24,4 @@ include_directories("{{include.replace("\\", "/")}}")
|
||||
% end
|
||||
% end
|
||||
|
||||
FILE(GLOB_RECURSE SRC_LIST "{{project_src_dir.replace("\\", "/")}}/*.*")
|
||||
FILE(GLOB_RECURSE SRC_LIST "{{project_src_dir.replace("\\", "/")}}/*.*" "{{project_lib_dir.replace("\\", "/")}}/*.*" "{{project_libdeps_dir.replace("\\", "/")}}/*.*")
|
||||
|
||||
@@ -5,13 +5,13 @@
|
||||
<storageModule buildSystemId="org.eclipse.cdt.managedbuilder.core.configurationDataProvider" id="0.910961921" moduleId="org.eclipse.cdt.core.settings" name="Default">
|
||||
<externalSettings/>
|
||||
<extensions>
|
||||
<extension id="org.eclipse.cdt.core.ELF" point="org.eclipse.cdt.core.BinaryParser"/>
|
||||
<extension id="org.eclipse.cdt.core.VCErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
|
||||
<extension id="org.eclipse.cdt.core.GmakeErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
|
||||
<extension id="org.eclipse.cdt.core.CWDLocator" point="org.eclipse.cdt.core.ErrorParser"/>
|
||||
<extension id="org.eclipse.cdt.core.GCCErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
|
||||
<extension id="org.eclipse.cdt.core.GASErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
|
||||
<extension id="org.eclipse.cdt.core.GLDErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
|
||||
<extension id="org.eclipse.cdt.core.ELF" point="org.eclipse.cdt.core.BinaryParser"/>
|
||||
</extensions>
|
||||
</storageModule>
|
||||
<storageModule moduleId="cdtBuildSystem" version="4.0.0">
|
||||
@@ -99,6 +99,104 @@
|
||||
</storageModule>
|
||||
<storageModule moduleId="org.eclipse.cdt.core.externalSettings"/>
|
||||
</cconfiguration>
|
||||
<cconfiguration id="0.910961921.1363900502">
|
||||
<storageModule buildSystemId="org.eclipse.cdt.managedbuilder.core.configurationDataProvider" id="0.910961921.1363900502" moduleId="org.eclipse.cdt.core.settings" name="Debug">
|
||||
<externalSettings/>
|
||||
<extensions>
|
||||
<extension id="org.eclipse.cdt.core.ELF" point="org.eclipse.cdt.core.BinaryParser"/>
|
||||
<extension id="org.eclipse.cdt.core.VCErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
|
||||
<extension id="org.eclipse.cdt.core.GmakeErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
|
||||
<extension id="org.eclipse.cdt.core.CWDLocator" point="org.eclipse.cdt.core.ErrorParser"/>
|
||||
<extension id="org.eclipse.cdt.core.GCCErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
|
||||
<extension id="org.eclipse.cdt.core.GASErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
|
||||
<extension id="org.eclipse.cdt.core.GLDErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
|
||||
</extensions>
|
||||
</storageModule>
|
||||
<storageModule moduleId="cdtBuildSystem" version="4.0.0">
|
||||
<configuration artifactName="mbed" buildProperties="" description="" id="0.910961921.1363900502" name="Debug" parent="org.eclipse.cdt.build.core.prefbase.cfg">
|
||||
<folderInfo id="0.910961921.1363900502." name="/" resourcePath="">
|
||||
<toolChain id="org.eclipse.cdt.build.core.prefbase.toolchain.2116690625" name="No ToolChain" resourceTypeBasedDiscovery="false" superClass="org.eclipse.cdt.build.core.prefbase.toolchain">
|
||||
<targetPlatform binaryParser="org.eclipse.cdt.core.ELF" id="org.eclipse.cdt.build.core.prefbase.toolchain.2116690625.848954921" name=""/>
|
||||
<builder arguments="-f -c eclipse debug" cleanBuildTarget="run --target clean" command="platformio" enableCleanBuild="false" id="org.eclipse.cdt.build.core.settings.default.builder.985867833" incrementalBuildTarget="" keepEnvironmentInBuildfile="false" managedBuildOn="false" name="Gnu Make Builder" superClass="org.eclipse.cdt.build.core.settings.default.builder"/>
|
||||
<tool id="org.eclipse.cdt.build.core.settings.holder.libs.1855678035" name="holder for library settings" superClass="org.eclipse.cdt.build.core.settings.holder.libs"/>
|
||||
<tool id="org.eclipse.cdt.build.core.settings.holder.30528994" name="Assembly" superClass="org.eclipse.cdt.build.core.settings.holder">
|
||||
<option id="org.eclipse.cdt.build.core.settings.holder.incpaths.794801023" name="Include Paths" superClass="org.eclipse.cdt.build.core.settings.holder.incpaths" valueType="includePath">
|
||||
% for include in includes:
|
||||
% if "toolchain" in include:
|
||||
% continue
|
||||
% end
|
||||
% if include.startswith(user_home_dir):
|
||||
% if "windows" in systype:
|
||||
<listOptionValue builtIn="false" value="${USERPROFILE}{{include.replace(user_home_dir, '')}}"/>
|
||||
% else:
|
||||
<listOptionValue builtIn="false" value="${HOME}{{include.replace(user_home_dir, '')}}"/>
|
||||
% end
|
||||
% else:
|
||||
<listOptionValue builtIn="false" value="{{include}}"/>
|
||||
% end
|
||||
% end
|
||||
</option>
|
||||
<option id="org.eclipse.cdt.build.core.settings.holder.symbols.1743427839" name="Symbols" superClass="org.eclipse.cdt.build.core.settings.holder.symbols" valueType="definedSymbols">
|
||||
% for define in defines:
|
||||
<listOptionValue builtIn="false" value="{{define}}"/>
|
||||
% end
|
||||
</option>
|
||||
<inputType id="org.eclipse.cdt.build.core.settings.holder.inType.919136836" languageId="org.eclipse.cdt.core.assembly" languageName="Assembly" sourceContentType="org.eclipse.cdt.core.asmSource" superClass="org.eclipse.cdt.build.core.settings.holder.inType"/>
|
||||
</tool>
|
||||
<tool id="org.eclipse.cdt.build.core.settings.holder.1146422798" name="GNU C++" superClass="org.eclipse.cdt.build.core.settings.holder">
|
||||
<option id="org.eclipse.cdt.build.core.settings.holder.incpaths.650084869" name="Include Paths" superClass="org.eclipse.cdt.build.core.settings.holder.incpaths" useByScannerDiscovery="false" valueType="includePath">
|
||||
% for include in includes:
|
||||
% if "toolchain" in include:
|
||||
% continue
|
||||
% end
|
||||
% if include.startswith(user_home_dir):
|
||||
% if "windows" in systype:
|
||||
<listOptionValue builtIn="false" value="${USERPROFILE}{{include.replace(user_home_dir, '')}}"/>
|
||||
% else:
|
||||
<listOptionValue builtIn="false" value="${HOME}{{include.replace(user_home_dir, '')}}"/>
|
||||
% end
|
||||
% else:
|
||||
<listOptionValue builtIn="false" value="{{include}}"/>
|
||||
% end
|
||||
% end
|
||||
</option>
|
||||
<option id="org.eclipse.cdt.build.core.settings.holder.symbols.2055633423" name="Symbols" superClass="org.eclipse.cdt.build.core.settings.holder.symbols" useByScannerDiscovery="false" valueType="definedSymbols">
|
||||
% for define in defines:
|
||||
<listOptionValue builtIn="false" value="{{define}}"/>
|
||||
% end
|
||||
</option>
|
||||
<inputType id="org.eclipse.cdt.build.core.settings.holder.inType.445650141" languageId="org.eclipse.cdt.core.g++" languageName="GNU C++" sourceContentType="org.eclipse.cdt.core.cxxSource,org.eclipse.cdt.core.cxxHeader" superClass="org.eclipse.cdt.build.core.settings.holder.inType"/>
|
||||
</tool>
|
||||
<tool id="org.eclipse.cdt.build.core.settings.holder.1637357529" name="GNU C" superClass="org.eclipse.cdt.build.core.settings.holder">
|
||||
<option id="org.eclipse.cdt.build.core.settings.holder.incpaths.1246337321" name="Include Paths" superClass="org.eclipse.cdt.build.core.settings.holder.incpaths" useByScannerDiscovery="false" valueType="includePath">
|
||||
% for include in includes:
|
||||
% if "toolchain" in include:
|
||||
% continue
|
||||
% end
|
||||
% if include.startswith(user_home_dir):
|
||||
% if "windows" in systype:
|
||||
<listOptionValue builtIn="false" value="${USERPROFILE}{{include.replace(user_home_dir, '')}}"/>
|
||||
% else:
|
||||
<listOptionValue builtIn="false" value="${HOME}{{include.replace(user_home_dir, '')}}"/>
|
||||
% end
|
||||
% else:
|
||||
<listOptionValue builtIn="false" value="{{include}}"/>
|
||||
% end
|
||||
% end
|
||||
</option>
|
||||
<option id="org.eclipse.cdt.build.core.settings.holder.symbols.2122043341" name="Symbols" superClass="org.eclipse.cdt.build.core.settings.holder.symbols" useByScannerDiscovery="false" valueType="definedSymbols">
|
||||
% for define in defines:
|
||||
<listOptionValue builtIn="false" value="{{define}}"/>
|
||||
% end
|
||||
</option>
|
||||
<inputType id="org.eclipse.cdt.build.core.settings.holder.inType.207004812" languageId="org.eclipse.cdt.core.gcc" languageName="GNU C" sourceContentType="org.eclipse.cdt.core.cSource,org.eclipse.cdt.core.cHeader" superClass="org.eclipse.cdt.build.core.settings.holder.inType"/>
|
||||
</tool>
|
||||
</toolChain>
|
||||
</folderInfo>
|
||||
</configuration>
|
||||
</storageModule>
|
||||
<storageModule moduleId="org.eclipse.cdt.core.externalSettings"/>
|
||||
</cconfiguration>
|
||||
</storageModule>
|
||||
<storageModule moduleId="cdtBuildSystem" version="4.0.0">
|
||||
<project id="{{project_name}}.null.189551033" name="{{project_name}}"/>
|
||||
|
||||
@@ -0,0 +1,38 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<launchConfiguration type="org.eclipse.cdt.launch.applicationLaunchType">
|
||||
<booleanAttribute key="org.eclipse.cdt.dsf.gdb.AUTO_SOLIB" value="true"/>
|
||||
<listAttribute key="org.eclipse.cdt.dsf.gdb.AUTO_SOLIB_LIST"/>
|
||||
<stringAttribute key="org.eclipse.cdt.dsf.gdb.DEBUG_NAME" value="piodebuggdb"/>
|
||||
<booleanAttribute key="org.eclipse.cdt.dsf.gdb.DEBUG_ON_FORK" value="false"/>
|
||||
<stringAttribute key="org.eclipse.cdt.dsf.gdb.GDB_INIT" value=".pioinit"/>
|
||||
<booleanAttribute key="org.eclipse.cdt.dsf.gdb.NON_STOP" value="false"/>
|
||||
<booleanAttribute key="org.eclipse.cdt.dsf.gdb.REVERSE" value="false"/>
|
||||
<stringAttribute key="org.eclipse.cdt.dsf.gdb.REVERSE_MODE" value="UseSoftTrace"/>
|
||||
<listAttribute key="org.eclipse.cdt.dsf.gdb.SOLIB_PATH"/>
|
||||
<stringAttribute key="org.eclipse.cdt.dsf.gdb.TRACEPOINT_MODE" value="TP_NORMAL_ONLY"/>
|
||||
<booleanAttribute key="org.eclipse.cdt.dsf.gdb.UPDATE_THREADLIST_ON_SUSPEND" value="false"/>
|
||||
<booleanAttribute key="org.eclipse.cdt.dsf.gdb.internal.ui.launching.LocalApplicationCDebuggerTab.DEFAULTS_SET" value="true"/>
|
||||
<intAttribute key="org.eclipse.cdt.launch.ATTR_BUILD_BEFORE_LAUNCH_ATTR" value="1"/>
|
||||
<stringAttribute key="org.eclipse.cdt.launch.COREFILE_PATH" value=""/>
|
||||
<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_ID" value="gdb"/>
|
||||
<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_REGISTER_GROUPS" value=""/>
|
||||
<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_START_MODE" value="run"/>
|
||||
<booleanAttribute key="org.eclipse.cdt.launch.DEBUGGER_STOP_AT_MAIN" value="false"/>
|
||||
<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_STOP_AT_MAIN_SYMBOL" value=""/>
|
||||
<stringAttribute key="org.eclipse.cdt.launch.PROGRAM_NAME" value="{{prog_path}}"/>
|
||||
<stringAttribute key="org.eclipse.cdt.launch.PROJECT_ATTR" value="{{project_name}}"/>
|
||||
<booleanAttribute key="org.eclipse.cdt.launch.PROJECT_BUILD_CONFIG_AUTO_ATTR" value="false"/>
|
||||
<stringAttribute key="org.eclipse.cdt.launch.PROJECT_BUILD_CONFIG_ID_ATTR" value="0.910961921.1363900502"/>
|
||||
<listAttribute key="org.eclipse.debug.core.MAPPED_RESOURCE_PATHS">
|
||||
<listEntry value="/{{project_name}}"/>
|
||||
</listAttribute>
|
||||
<listAttribute key="org.eclipse.debug.core.MAPPED_RESOURCE_TYPES">
|
||||
<listEntry value="4"/>
|
||||
</listAttribute>
|
||||
<listAttribute key="org.eclipse.debug.ui.favoriteGroups">
|
||||
<listEntry value="org.eclipse.debug.ui.launchGroup.debug"/>
|
||||
</listAttribute>
|
||||
<stringAttribute key="org.eclipse.dsf.launch.MEMORY_BLOCKS" value="<?xml version="1.0" encoding="UTF-8" standalone="no"?> <memoryBlockExpressionList context="reserved-for-future-use"/> "/>
|
||||
<stringAttribute key="process_factory_id" value="org.eclipse.cdt.dsf.gdb.GdbProcessFactory"/>
|
||||
<stringAttribute key="saved_expressions<seperator>Unknown" value="0x55f4"/>
|
||||
</launchConfiguration>
|
||||
@@ -15,4 +15,19 @@
|
||||
</provider>
|
||||
</extension>
|
||||
</configuration>
|
||||
<configuration id="0.910961921.1363900502" name="Debug">
|
||||
<extension point="org.eclipse.cdt.core.LanguageSettingsProvider">
|
||||
<provider copy-of="extension" id="org.eclipse.cdt.ui.UserLanguageSettingsProvider"/>
|
||||
<provider-reference id="org.eclipse.cdt.core.ReferencedProjectsLanguageSettingsProvider" ref="shared-provider"/>
|
||||
<provider-reference id="org.eclipse.cdt.managedbuilder.core.MBSLanguageSettingsProvider" ref="shared-provider"/>
|
||||
% if "windows" in systype:
|
||||
<provider class="org.eclipse.cdt.internal.build.crossgcc.CrossGCCBuiltinSpecsDetector" console="false" env-hash="1291887707783033084" id="org.eclipse.cdt.build.crossgcc.CrossGCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT Cross GCC Built-in Compiler Settings" parameter="${USERPROFILE}{{cxx_path.replace(user_home_dir, '')}} ${FLAGS} -E -P -v -dD "${INPUTS}"" prefer-non-shared="true">
|
||||
% else:
|
||||
<provider class="org.eclipse.cdt.internal.build.crossgcc.CrossGCCBuiltinSpecsDetector" console="false" env-hash="-869785120007741010" id="org.eclipse.cdt.build.crossgcc.CrossGCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT Cross GCC Built-in Compiler Settings" parameter="${HOME}{{cxx_path.replace(user_home_dir, '')}} ${FLAGS} -E -P -v -dD "${INPUTS}"" prefer-non-shared="true">
|
||||
% end
|
||||
<language-scope id="org.eclipse.cdt.core.gcc"/>
|
||||
<language-scope id="org.eclipse.cdt.core.g++"/>
|
||||
</provider>
|
||||
</extension>
|
||||
</configuration>
|
||||
</project>
|
||||
|
||||
@@ -3,4 +3,9 @@ environment/project/0.910961921/PATH/delimiter={{env_pathsep.replace(":", "\\:")
|
||||
environment/project/0.910961921/PATH/operation=replace
|
||||
environment/project/0.910961921/PATH/value={{env_path.replace(":", "\\:")}}
|
||||
environment/project/0.910961921/append=true
|
||||
environment/project/0.910961921/appendContributed=true
|
||||
environment/project/0.910961921/appendContributed=true
|
||||
environment/project/0.910961921.1363900502/PATH/delimiter={{env_pathsep.replace(":", "\\:")}}
|
||||
environment/project/0.910961921.1363900502/PATH/operation=replace
|
||||
environment/project/0.910961921.1363900502/PATH/value={{env_path.replace(":", "\\:")}}
|
||||
environment/project/0.910961921.1363900502/append=true
|
||||
environment/project/0.910961921.1363900502/appendContributed=true
|
||||
@@ -14,7 +14,8 @@ INCLUDEPATH += "{{include}}"
|
||||
% end
|
||||
|
||||
% for define in defines:
|
||||
DEFINES += "{{define}}"
|
||||
% tokens = define.split("##", 1)
|
||||
DEFINES += "{{tokens[0].strip()}}"
|
||||
% end
|
||||
|
||||
OTHER_FILES += platformio.ini
|
||||
|
||||
@@ -93,5 +93,12 @@
|
||||
{
|
||||
"path": "."
|
||||
}
|
||||
]
|
||||
],
|
||||
"settings":
|
||||
{
|
||||
"sublimegdb_workingdir": "{{project_dir}}",
|
||||
"sublimegdb_exec_cmd": "",
|
||||
"sublimegdb_commandline": "{{platformio_path}} -f -c sublimetext debug --interface=gdb --interpreter=mi -x .pioinit"
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
6
platformio/ide/tpls/vim/.clang_complete.tpl
Normal file
6
platformio/ide/tpls/vim/.clang_complete.tpl
Normal file
@@ -0,0 +1,6 @@
|
||||
% for include in includes:
|
||||
-I{{include}}
|
||||
% end
|
||||
% for define in defines:
|
||||
-D{{!define}}
|
||||
% end
|
||||
8
platformio/ide/tpls/vim/.gcc-flags.json.tpl
Normal file
8
platformio/ide/tpls/vim/.gcc-flags.json.tpl
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"execPath": "{{ cxx_path.replace("\\", "/") }}",
|
||||
"gccDefaultCFlags": "-fsyntax-only {{! cc_flags.replace(' -MMD ', ' ').replace('"', '\\"') }}",
|
||||
"gccDefaultCppFlags": "-fsyntax-only {{! cxx_flags.replace(' -MMD ', ' ').replace('"', '\\"') }}",
|
||||
"gccErrorLimit": 15,
|
||||
"gccIncludePaths": "{{ ','.join(includes).replace("\\", "/") }}",
|
||||
"gccSuppressWarnings": false
|
||||
}
|
||||
4
platformio/ide/tpls/vim/.gitignore.tpl
Normal file
4
platformio/ide/tpls/vim/.gitignore.tpl
Normal file
@@ -0,0 +1,4 @@
|
||||
.pioenvs
|
||||
.piolibdeps
|
||||
.clang_complete
|
||||
.gcc-flags.json
|
||||
@@ -15,7 +15,7 @@
|
||||
</ItemGroup>
|
||||
% for file in src_files:
|
||||
<ItemGroup>
|
||||
% if any([file.endswith(".%s" % e) for e in ("h", "hh", "hpp", "inc")]):
|
||||
% if any(file.endswith(".%s" % e) for e in ("h", "hh", "hpp", "inc")):
|
||||
<ClInclude Include="{{file}}">
|
||||
<Filter>Header Files</Filter>
|
||||
</ClInclude>
|
||||
|
||||
@@ -60,7 +60,7 @@
|
||||
</ItemGroup>
|
||||
% for file in src_files:
|
||||
<ItemGroup>
|
||||
% if any([file.endswith(".%s" % e) for e in ("h", "hh", "hpp", "inc")]):
|
||||
% if any(file.endswith(".%s" % e) for e in ("h", "hh", "hpp", "inc")):
|
||||
<ClInclude Include="{{file}}">
|
||||
<Filter>Header Files</Filter>
|
||||
</ClInclude>
|
||||
|
||||
5
platformio/ide/tpls/vscode/.gitignore.tpl
Normal file
5
platformio/ide/tpls/vscode/.gitignore.tpl
Normal file
@@ -0,0 +1,5 @@
|
||||
.pioenvs
|
||||
.piolibdeps
|
||||
.vscode/.browse.c_cpp.db*
|
||||
.vscode/c_cpp_properties.json
|
||||
.vscode/launch.json
|
||||
62
platformio/ide/tpls/vscode/.vscode/c_cpp_properties.json.tpl
vendored
Normal file
62
platformio/ide/tpls/vscode/.vscode/c_cpp_properties.json.tpl
vendored
Normal file
@@ -0,0 +1,62 @@
|
||||
{
|
||||
"!!! WARNING !!!": "PLEASE DO NOT MODIFY THIS FILE! USE http://docs.platformio.org/page/projectconf/section_env_build.html#build-flags",
|
||||
"configurations": [
|
||||
{
|
||||
% import platform
|
||||
% from os.path import commonprefix, dirname
|
||||
%
|
||||
% systype = platform.system().lower()
|
||||
%
|
||||
% cleaned_includes = []
|
||||
% for include in includes:
|
||||
% if "toolchain-" not in dirname(commonprefix([include, cc_path])):
|
||||
% cleaned_includes.append(include)
|
||||
% end
|
||||
% end
|
||||
%
|
||||
% if systype == "windows":
|
||||
"name": "Win32",
|
||||
% elif systype == "darwin":
|
||||
"name": "Mac",
|
||||
"macFrameworkPath": [],
|
||||
% else:
|
||||
"name": "Linux",
|
||||
% end
|
||||
"includePath": [
|
||||
% for include in cleaned_includes:
|
||||
"{{include.replace('\\\\', '/').replace('\\', '/').replace('"', '\\"')}}",
|
||||
% end
|
||||
""
|
||||
],
|
||||
"browse": {
|
||||
"limitSymbolsToIncludedHeaders": true,
|
||||
"databaseFilename": "${workspaceRoot}/.vscode/.browse.c_cpp.db",
|
||||
"path": [
|
||||
% for include in cleaned_includes:
|
||||
"{{include.replace('\\\\', '/').replace('\\', '/').replace('"', '\\"')}}",
|
||||
% end
|
||||
""
|
||||
]
|
||||
},
|
||||
"defines": [
|
||||
% for define in defines:
|
||||
"{{!define.replace('"', '\\"')}}",
|
||||
% end
|
||||
""
|
||||
],
|
||||
"intelliSenseMode": "clang-x64",
|
||||
% import re
|
||||
% STD_RE = re.compile(r"\-std=[a-z\+]+(\d+)")
|
||||
% cc_stds = STD_RE.findall(cc_flags)
|
||||
% cxx_stds = STD_RE.findall(cxx_flags)
|
||||
%
|
||||
% if cc_stds:
|
||||
"cStandard": "c{{ cc_stds[-1] }}",
|
||||
% end
|
||||
% if cxx_stds:
|
||||
"cppStandard": "c++{{ cxx_stds[-1] }}",
|
||||
% end
|
||||
"compilerPath": "{{ cc_path.replace('\\\\', '/').replace('\\', '/').replace('"', '\\"') }}"
|
||||
}
|
||||
]
|
||||
}
|
||||
7
platformio/ide/tpls/vscode/.vscode/extensions.json.tpl
vendored
Normal file
7
platformio/ide/tpls/vscode/.vscode/extensions.json.tpl
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
// See http://go.microsoft.com/fwlink/?LinkId=827846
|
||||
// for the documentation about the extensions.json format
|
||||
"recommendations": [
|
||||
"platformio.platformio-ide"
|
||||
]
|
||||
}
|
||||
41
platformio/ide/tpls/vscode/.vscode/launch.json.tpl
vendored
Normal file
41
platformio/ide/tpls/vscode/.vscode/launch.json.tpl
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
// AUTOMATICALLY GENERATED FILE. PLEASE DO NOT MODIFY IT MANUALLY
|
||||
|
||||
// PIO Unified Debugger
|
||||
//
|
||||
// Documentation: http://docs.platformio.org/page/plus/debugging.html
|
||||
// Configuration: http://docs.platformio.org/page/projectconf/section_env_debug.html
|
||||
|
||||
% from os.path import dirname, join
|
||||
%
|
||||
% def _escape_path(path):
|
||||
% return path.replace('\\\\', '/').replace('\\', '/').replace('"', '\\"')
|
||||
% end
|
||||
%
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "platformio-debug",
|
||||
"request": "launch",
|
||||
"name": "PIO Debug",
|
||||
"executable": "{{ _escape_path(prog_path) }}",
|
||||
"toolchainBinDir": "{{ _escape_path(dirname(gdb_path)) }}",
|
||||
% if svd_path:
|
||||
"svdPath": "{{ _escape_path(svd_path) }}",
|
||||
% end
|
||||
"preLaunchTask": "PlatformIO: Pre-Debug",
|
||||
"internalConsoleOptions": "openOnSessionStart"
|
||||
},
|
||||
{
|
||||
"type": "platformio-debug",
|
||||
"request": "launch",
|
||||
"name": "PIO Debug (Skip Pre-Debug)",
|
||||
"executable": "{{ _escape_path(prog_path) }}",
|
||||
"toolchainBinDir": "{{ _escape_path(dirname(gdb_path)) }}",
|
||||
% if svd_path:
|
||||
"svdPath": "{{ _escape_path(svd_path) }}",
|
||||
% end
|
||||
"internalConsoleOptions": "openOnSessionStart"
|
||||
}
|
||||
]
|
||||
}
|
||||
108
platformio/lockfile.py
Normal file
108
platformio/lockfile.py
Normal file
@@ -0,0 +1,108 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from os import remove
|
||||
from os.path import abspath, exists, getmtime
|
||||
from time import sleep, time
|
||||
|
||||
from platformio import exception
|
||||
|
||||
LOCKFILE_TIMEOUT = 3600 # in seconds, 1 hour
|
||||
LOCKFILE_DELAY = 0.2
|
||||
|
||||
LOCKFILE_INTERFACE_FCNTL = 1
|
||||
LOCKFILE_INTERFACE_MSVCRT = 2
|
||||
|
||||
try:
|
||||
import fcntl
|
||||
LOCKFILE_CURRENT_INTERFACE = LOCKFILE_INTERFACE_FCNTL
|
||||
except ImportError:
|
||||
try:
|
||||
import msvcrt
|
||||
LOCKFILE_CURRENT_INTERFACE = LOCKFILE_INTERFACE_MSVCRT
|
||||
except ImportError:
|
||||
LOCKFILE_CURRENT_INTERFACE = None
|
||||
|
||||
|
||||
class LockFileExists(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class LockFile(object):
|
||||
|
||||
def __init__(self, path, timeout=LOCKFILE_TIMEOUT, delay=LOCKFILE_DELAY):
|
||||
self.timeout = timeout
|
||||
self.delay = delay
|
||||
self._lock_path = abspath(path) + ".lock"
|
||||
self._fp = None
|
||||
|
||||
def _lock(self):
|
||||
if not LOCKFILE_CURRENT_INTERFACE and exists(self._lock_path):
|
||||
# remove stale lock
|
||||
if time() - getmtime(self._lock_path) > 10:
|
||||
try:
|
||||
remove(self._lock_path)
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
else:
|
||||
raise LockFileExists
|
||||
|
||||
self._fp = open(self._lock_path, "w")
|
||||
try:
|
||||
if LOCKFILE_CURRENT_INTERFACE == LOCKFILE_INTERFACE_FCNTL:
|
||||
fcntl.flock(self._fp.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
|
||||
elif LOCKFILE_CURRENT_INTERFACE == LOCKFILE_INTERFACE_MSVCRT:
|
||||
msvcrt.locking(self._fp.fileno(), msvcrt.LK_NBLCK, 1)
|
||||
except IOError:
|
||||
self._fp = None
|
||||
raise LockFileExists
|
||||
return True
|
||||
|
||||
def _unlock(self):
|
||||
if not self._fp:
|
||||
return
|
||||
if LOCKFILE_CURRENT_INTERFACE == LOCKFILE_INTERFACE_FCNTL:
|
||||
fcntl.flock(self._fp.fileno(), fcntl.LOCK_UN)
|
||||
elif LOCKFILE_CURRENT_INTERFACE == LOCKFILE_INTERFACE_MSVCRT:
|
||||
msvcrt.locking(self._fp.fileno(), msvcrt.LK_UNLCK, 1)
|
||||
self._fp.close()
|
||||
self._fp = None
|
||||
|
||||
def acquire(self):
|
||||
elapsed = 0
|
||||
while elapsed < self.timeout:
|
||||
try:
|
||||
return self._lock()
|
||||
except LockFileExists:
|
||||
sleep(self.delay)
|
||||
elapsed += self.delay
|
||||
|
||||
raise exception.LockFileTimeoutError()
|
||||
|
||||
def release(self):
|
||||
self._unlock()
|
||||
if exists(self._lock_path):
|
||||
try:
|
||||
remove(self._lock_path)
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
def __enter__(self):
|
||||
self.acquire()
|
||||
|
||||
def __exit__(self, type_, value, traceback):
|
||||
self.release()
|
||||
|
||||
def __del__(self):
|
||||
self.release()
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -29,43 +29,17 @@ from platformio.commands.platform import \
|
||||
platform_uninstall as cmd_platform_uninstall
|
||||
from platformio.commands.platform import platform_update as cmd_platform_update
|
||||
from platformio.commands.upgrade import get_latest_version
|
||||
from platformio.managers.core import update_core_packages
|
||||
from platformio.managers.lib import LibraryManager
|
||||
from platformio.managers.platform import PlatformManager
|
||||
from platformio.pioplus import pioplus_update
|
||||
|
||||
|
||||
def in_silence(ctx=None):
|
||||
ctx = ctx or app.get_session_var("command_ctx")
|
||||
assert ctx
|
||||
ctx_args = ctx.args or []
|
||||
return (ctx_args and
|
||||
(ctx.args[0] == "upgrade" or "--json-output" in ctx_args))
|
||||
|
||||
|
||||
def clean_cache():
|
||||
with app.ContentCache() as cc:
|
||||
cc.clean()
|
||||
from platformio.managers.platform import PlatformFactory, PlatformManager
|
||||
|
||||
|
||||
def on_platformio_start(ctx, force, caller):
|
||||
if not caller:
|
||||
if getenv("PLATFORMIO_CALLER"):
|
||||
caller = getenv("PLATFORMIO_CALLER")
|
||||
elif util.is_container():
|
||||
if getenv("C9_UID"):
|
||||
caller = "C9"
|
||||
elif getenv("USER") == "cabox":
|
||||
caller = "CA"
|
||||
elif getenv("CHE_API", getenv("CHE_API_ENDPOINT")):
|
||||
caller = "Che"
|
||||
|
||||
app.set_session_var("command_ctx", ctx)
|
||||
app.set_session_var("force_option", force)
|
||||
app.set_session_var("caller_id", caller)
|
||||
set_caller(caller)
|
||||
telemetry.on_command()
|
||||
|
||||
if ctx.args and (ctx.args[0] == "upgrade" or "update" in ctx.args):
|
||||
clean_cache()
|
||||
if not in_silence(ctx):
|
||||
after_upgrade(ctx)
|
||||
|
||||
@@ -78,7 +52,8 @@ def on_platformio_end(ctx, result): # pylint: disable=W0613
|
||||
check_platformio_upgrade()
|
||||
check_internal_updates(ctx, "platforms")
|
||||
check_internal_updates(ctx, "libraries")
|
||||
except (exception.GetLatestVersionError, exception.APIRequestError):
|
||||
except (exception.InternetIsOffline, exception.GetLatestVersionError,
|
||||
exception.APIRequestError):
|
||||
click.secho(
|
||||
"Failed to check for PlatformIO upgrades. "
|
||||
"Please check your Internet connection.",
|
||||
@@ -89,6 +64,32 @@ def on_platformio_exception(e):
|
||||
telemetry.on_exception(e)
|
||||
|
||||
|
||||
def in_silence(ctx=None):
|
||||
ctx = ctx or app.get_session_var("command_ctx")
|
||||
assert ctx
|
||||
ctx_args = ctx.args or []
|
||||
return ctx_args and any([
|
||||
ctx.args[0] == "upgrade", "--json-output" in ctx_args,
|
||||
"--version" in ctx_args
|
||||
])
|
||||
|
||||
|
||||
def set_caller(caller=None):
|
||||
if not caller:
|
||||
if getenv("PLATFORMIO_CALLER"):
|
||||
caller = getenv("PLATFORMIO_CALLER")
|
||||
elif getenv("VSCODE_PID") or getenv("VSCODE_NLS_CONFIG"):
|
||||
caller = "vscode"
|
||||
elif util.is_container():
|
||||
if getenv("C9_UID"):
|
||||
caller = "C9"
|
||||
elif getenv("USER") == "cabox":
|
||||
caller = "CA"
|
||||
elif getenv("CHE_API", getenv("CHE_API_ENDPOINT")):
|
||||
caller = "Che"
|
||||
app.set_session_var("caller_id", caller)
|
||||
|
||||
|
||||
class Upgrader(object):
|
||||
|
||||
def __init__(self, from_version, to_version):
|
||||
@@ -97,20 +98,22 @@ class Upgrader(object):
|
||||
self.to_version = semantic_version.Version.coerce(
|
||||
util.pepver_to_semver(to_version))
|
||||
|
||||
self._upgraders = [
|
||||
(semantic_version.Version("3.0.0-a1"), self._upgrade_to_3_0_0),
|
||||
(semantic_version.Version("3.0.0-b11"), self._upgrade_to_3_0_0)
|
||||
]
|
||||
self._upgraders = [(semantic_version.Version("3.0.0-a.1"),
|
||||
self._upgrade_to_3_0_0),
|
||||
(semantic_version.Version("3.0.0-b.11"),
|
||||
self._upgrade_to_3_0_0b11),
|
||||
(semantic_version.Version("3.5.0-a.2"),
|
||||
self._update_dev_platforms)]
|
||||
|
||||
def run(self, ctx):
|
||||
if self.from_version > self.to_version:
|
||||
return True
|
||||
|
||||
result = [True]
|
||||
for item in self._upgraders:
|
||||
if self.from_version >= item[0] or self.to_version < item[0]:
|
||||
for version, callback in self._upgraders:
|
||||
if self.from_version >= version or self.to_version < version:
|
||||
continue
|
||||
result.append(item[1](ctx))
|
||||
result.append(callback(ctx))
|
||||
|
||||
return all(result)
|
||||
|
||||
@@ -146,39 +149,55 @@ class Upgrader(object):
|
||||
m['name'] for m in PlatformManager().get_installed()
|
||||
]
|
||||
if "espressif" not in current_platforms:
|
||||
return
|
||||
return True
|
||||
ctx.invoke(cmd_platform_install, platforms=["espressif8266"])
|
||||
ctx.invoke(cmd_platform_uninstall, platforms=["espressif"])
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def _update_dev_platforms(ctx):
|
||||
ctx.invoke(cmd_platform_update)
|
||||
return True
|
||||
|
||||
|
||||
def after_upgrade(ctx):
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
last_version = app.get_state_item("last_version", "0.0.0")
|
||||
if last_version == __version__:
|
||||
return
|
||||
|
||||
if last_version == "0.0.0":
|
||||
app.set_state_item("last_version", __version__)
|
||||
elif semantic_version.Version.coerce(util.pepver_to_semver(
|
||||
last_version)) > semantic_version.Version.coerce(
|
||||
util.pepver_to_semver(__version__)):
|
||||
click.secho("*" * terminal_width, fg="yellow")
|
||||
click.secho(
|
||||
"Obsolete PIO Core v%s is used (previous was %s)" % (__version__,
|
||||
last_version),
|
||||
fg="yellow")
|
||||
click.secho(
|
||||
"Please remove multiple PIO Cores from a system:", fg="yellow")
|
||||
click.secho(
|
||||
"http://docs.platformio.org/page/faq.html"
|
||||
"#multiple-pio-cores-in-a-system",
|
||||
fg="cyan")
|
||||
click.secho("*" * terminal_width, fg="yellow")
|
||||
return
|
||||
else:
|
||||
click.secho("Please wait while upgrading PlatformIO ...", fg="yellow")
|
||||
clean_cache()
|
||||
click.secho("Please wait while upgrading PlatformIO...", fg="yellow")
|
||||
app.clean_cache()
|
||||
|
||||
# Update PlatformIO's Core packages
|
||||
update_core_packages(silent=True)
|
||||
|
||||
u = Upgrader(last_version, __version__)
|
||||
if u.run(ctx):
|
||||
app.set_state_item("last_version", __version__)
|
||||
|
||||
# update development platforms
|
||||
pm = PlatformManager()
|
||||
for manifest in pm.get_installed():
|
||||
# pm.update(manifest['name'], "^" + manifest['version'])
|
||||
pm.update(manifest['name'])
|
||||
|
||||
# update PlatformIO Plus tool if installed
|
||||
pioplus_update()
|
||||
|
||||
click.secho(
|
||||
"PlatformIO has been successfully upgraded to %s!\n" %
|
||||
__version__,
|
||||
fg="green")
|
||||
|
||||
telemetry.on_event(
|
||||
category="Auto",
|
||||
action="Upgrade",
|
||||
@@ -188,22 +207,22 @@ def after_upgrade(ctx):
|
||||
click.echo("")
|
||||
|
||||
# PlatformIO banner
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
click.echo("*" * terminal_width)
|
||||
click.echo("If you like %s, please:" % (click.style(
|
||||
"PlatformIO", fg="cyan")))
|
||||
click.echo(
|
||||
"If you like %s, please:" % (click.style("PlatformIO", fg="cyan")))
|
||||
click.echo("- %s us on Twitter to stay up-to-date "
|
||||
"on the latest project news > %s" % (click.style(
|
||||
"follow", fg="cyan"), click.style(
|
||||
"https://twitter.com/PlatformIO_Org", fg="cyan")))
|
||||
click.echo("- %s it on GitHub > %s" % (click.style(
|
||||
"star", fg="cyan"), click.style(
|
||||
"https://github.com/platformio/platformio", fg="cyan")))
|
||||
"on the latest project news > %s" %
|
||||
(click.style("follow", fg="cyan"),
|
||||
click.style("https://twitter.com/PlatformIO_Org", fg="cyan")))
|
||||
click.echo(
|
||||
"- %s it on GitHub > %s" %
|
||||
(click.style("star", fg="cyan"),
|
||||
click.style("https://github.com/platformio/platformio", fg="cyan")))
|
||||
if not getenv("PLATFORMIO_IDE"):
|
||||
click.echo("- %s PlatformIO IDE for IoT development > %s" %
|
||||
(click.style(
|
||||
"try", fg="cyan"), click.style(
|
||||
"http://platformio.org/platformio-ide", fg="cyan")))
|
||||
click.echo(
|
||||
"- %s PlatformIO IDE for IoT development > %s" %
|
||||
(click.style("try", fg="cyan"),
|
||||
click.style("https://platformio.org/platformio-ide", fg="cyan")))
|
||||
if not util.is_ci():
|
||||
click.echo("- %s us with PlatformIO Plus > %s" % (click.style(
|
||||
"support", fg="cyan"), click.style(
|
||||
@@ -222,6 +241,11 @@ def check_platformio_upgrade():
|
||||
last_check['platformio_upgrade'] = int(time())
|
||||
app.set_state_item("last_check", last_check)
|
||||
|
||||
util.internet_on(raise_exception=True)
|
||||
|
||||
# Update PlatformIO's Core packages
|
||||
update_core_packages(silent=True)
|
||||
|
||||
latest_version = get_latest_version()
|
||||
if semantic_version.Version.coerce(util.pepver_to_semver(
|
||||
latest_version)) <= semantic_version.Version.coerce(
|
||||
@@ -264,11 +288,19 @@ def check_internal_updates(ctx, what):
|
||||
last_check[what + '_update'] = int(time())
|
||||
app.set_state_item("last_check", last_check)
|
||||
|
||||
util.internet_on(raise_exception=True)
|
||||
|
||||
pm = PlatformManager() if what == "platforms" else LibraryManager()
|
||||
outdated_items = []
|
||||
for manifest in pm.get_installed():
|
||||
if manifest['name'] not in outdated_items and \
|
||||
pm.is_outdated(manifest['name']):
|
||||
if manifest['name'] in outdated_items:
|
||||
continue
|
||||
conds = [
|
||||
pm.outdated(manifest['__pkg_dir']), what == "platforms"
|
||||
and PlatformFactory.newPlatform(
|
||||
manifest['__pkg_dir']).are_outdated_packages()
|
||||
]
|
||||
if any(conds):
|
||||
outdated_items.append(manifest['name'])
|
||||
|
||||
if not outdated_items:
|
||||
@@ -279,15 +311,15 @@ def check_internal_updates(ctx, what):
|
||||
click.echo("")
|
||||
click.echo("*" * terminal_width)
|
||||
click.secho(
|
||||
"There are the new updates for %s (%s)" %
|
||||
(what, ", ".join(outdated_items)),
|
||||
"There are the new updates for %s (%s)" % (what,
|
||||
", ".join(outdated_items)),
|
||||
fg="yellow")
|
||||
|
||||
if not app.get_setting("auto_update_" + what):
|
||||
click.secho("Please update them via ", fg="yellow", nl=False)
|
||||
click.secho(
|
||||
"`platformio %s update`" %
|
||||
("lib --global" if what == "libraries" else "platform"),
|
||||
"`platformio %s update`" % ("lib --global" if what == "libraries"
|
||||
else "platform"),
|
||||
fg="cyan",
|
||||
nl=False)
|
||||
click.secho(" command.\n", fg="yellow")
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
137
platformio/managers/core.py
Normal file
137
platformio/managers/core.py
Normal file
@@ -0,0 +1,137 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from os.path import dirname, join
|
||||
|
||||
from platformio import __version__, exception, util
|
||||
from platformio.managers.package import PackageManager
|
||||
|
||||
CORE_PACKAGES = {
|
||||
"contrib-piohome": "^1.0.2",
|
||||
"contrib-pysite": ">=0.3.2,<2",
|
||||
"tool-pioplus": "^1.4.5",
|
||||
"tool-unity": "~1.20403.0",
|
||||
"tool-scons": "~2.20501.4"
|
||||
}
|
||||
|
||||
PIOPLUS_AUTO_UPDATES_MAX = 100
|
||||
|
||||
# pylint: disable=arguments-differ
|
||||
|
||||
|
||||
class CorePackageManager(PackageManager):
|
||||
|
||||
def __init__(self):
|
||||
PackageManager.__init__(self, join(util.get_home_dir(), "packages"), [
|
||||
"https://dl.bintray.com/platformio/dl-packages/manifest.json",
|
||||
"http%s://dl.platformio.org/packages/manifest.json" %
|
||||
("" if sys.version_info < (2, 7, 9) else "s")
|
||||
])
|
||||
|
||||
def install( # pylint: disable=keyword-arg-before-vararg
|
||||
self,
|
||||
name,
|
||||
requirements=None,
|
||||
*args,
|
||||
**kwargs):
|
||||
PackageManager.install(self, name, requirements, *args, **kwargs)
|
||||
self.cleanup_packages()
|
||||
return self.get_package_dir(name, requirements)
|
||||
|
||||
def update(self, *args, **kwargs):
|
||||
result = PackageManager.update(self, *args, **kwargs)
|
||||
self.cleanup_packages()
|
||||
return result
|
||||
|
||||
def cleanup_packages(self):
|
||||
self.cache_reset()
|
||||
best_pkg_versions = {}
|
||||
for name, requirements in CORE_PACKAGES.items():
|
||||
pkg_dir = self.get_package_dir(name, requirements)
|
||||
if not pkg_dir:
|
||||
continue
|
||||
best_pkg_versions[name] = self.load_manifest(pkg_dir)['version']
|
||||
for manifest in self.get_installed():
|
||||
if manifest['name'] not in best_pkg_versions:
|
||||
continue
|
||||
if manifest['version'] != best_pkg_versions[manifest['name']]:
|
||||
self.uninstall(manifest['__pkg_dir'], after_update=True)
|
||||
self.cache_reset()
|
||||
return True
|
||||
|
||||
|
||||
def get_core_package_dir(name):
|
||||
if name not in CORE_PACKAGES:
|
||||
raise exception.PlatformioException("Please upgrade PIO Core")
|
||||
requirements = CORE_PACKAGES[name]
|
||||
pm = CorePackageManager()
|
||||
pkg_dir = pm.get_package_dir(name, requirements)
|
||||
if pkg_dir:
|
||||
return pkg_dir
|
||||
return pm.install(name, requirements)
|
||||
|
||||
|
||||
def update_core_packages(only_check=False, silent=False):
|
||||
pm = CorePackageManager()
|
||||
for name, requirements in CORE_PACKAGES.items():
|
||||
pkg_dir = pm.get_package_dir(name)
|
||||
if not pkg_dir:
|
||||
continue
|
||||
if not silent or pm.outdated(pkg_dir, requirements):
|
||||
pm.update(name, requirements, only_check=only_check)
|
||||
return True
|
||||
|
||||
|
||||
def pioplus_call(args, **kwargs):
|
||||
if "windows" in util.get_systype() and sys.version_info < (2, 7, 6):
|
||||
raise exception.PlatformioException(
|
||||
"PlatformIO Core Plus v%s does not run under Python version %s.\n"
|
||||
"Minimum supported version is 2.7.6, please upgrade Python.\n"
|
||||
"Python 3 is not yet supported.\n" % (__version__, sys.version))
|
||||
|
||||
pioplus_path = join(get_core_package_dir("tool-pioplus"), "pioplus")
|
||||
pythonexe_path = util.get_pythonexe_path()
|
||||
os.environ['PYTHONEXEPATH'] = pythonexe_path
|
||||
os.environ['PYTHONPYSITEDIR'] = get_core_package_dir("contrib-pysite")
|
||||
os.environ['PIOCOREPYSITEDIR'] = dirname(util.get_source_dir() or "")
|
||||
os.environ['PATH'] = (os.pathsep).join(
|
||||
[dirname(pythonexe_path), os.environ['PATH']])
|
||||
util.copy_pythonpath_to_osenv()
|
||||
code = subprocess.call([pioplus_path] + args, **kwargs)
|
||||
|
||||
# handle remote update request
|
||||
if code == 13:
|
||||
count_attr = "_update_count"
|
||||
try:
|
||||
count_value = getattr(pioplus_call, count_attr)
|
||||
except AttributeError:
|
||||
count_value = 0
|
||||
setattr(pioplus_call, count_attr, 1)
|
||||
count_value += 1
|
||||
setattr(pioplus_call, count_attr, count_value)
|
||||
if count_value < PIOPLUS_AUTO_UPDATES_MAX:
|
||||
update_core_packages()
|
||||
return pioplus_call(args, **kwargs)
|
||||
|
||||
# handle reload request
|
||||
elif code == 14:
|
||||
return pioplus_call(args, **kwargs)
|
||||
|
||||
if code != 0:
|
||||
raise exception.ReturnErrorCode(1)
|
||||
|
||||
return True
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -13,17 +13,18 @@
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=too-many-arguments, too-many-locals, too-many-branches
|
||||
# pylint: disable=too-many-return-statements
|
||||
|
||||
import json
|
||||
import os
|
||||
from hashlib import md5
|
||||
from os.path import dirname, join
|
||||
import re
|
||||
from glob import glob
|
||||
from os.path import isdir, join
|
||||
|
||||
import click
|
||||
import semantic_version
|
||||
|
||||
from platformio import app, commands, exception, util
|
||||
from platformio.managers.package import BasePkgManager
|
||||
from platformio.managers.platform import PlatformFactory, PlatformManager
|
||||
|
||||
|
||||
class LibraryManager(BasePkgManager):
|
||||
@@ -34,70 +35,97 @@ class LibraryManager(BasePkgManager):
|
||||
BasePkgManager.__init__(self, package_dir)
|
||||
|
||||
@property
|
||||
def manifest_name(self):
|
||||
return ".library.json"
|
||||
def manifest_names(self):
|
||||
return [
|
||||
".library.json", "library.json", "library.properties",
|
||||
"module.json"
|
||||
]
|
||||
|
||||
def check_pkg_structure(self, pkg_dir):
|
||||
try:
|
||||
return BasePkgManager.check_pkg_structure(self, pkg_dir)
|
||||
except exception.MissingPackageManifest:
|
||||
# we will generate manifest automatically
|
||||
pass
|
||||
def get_manifest_path(self, pkg_dir):
|
||||
path = BasePkgManager.get_manifest_path(self, pkg_dir)
|
||||
if path:
|
||||
return path
|
||||
|
||||
manifest = {
|
||||
"name": "Library_" + md5(pkg_dir).hexdigest()[:5],
|
||||
"version": "0.0.0"
|
||||
}
|
||||
manifest_path = self._find_any_manifest(pkg_dir)
|
||||
if manifest_path:
|
||||
_manifest = self._parse_manifest(manifest_path)
|
||||
pkg_dir = dirname(manifest_path)
|
||||
for key in ("name", "version"):
|
||||
if key not in _manifest:
|
||||
_manifest[key] = manifest[key]
|
||||
manifest = _manifest
|
||||
else:
|
||||
for root, dirs, files in os.walk(pkg_dir):
|
||||
if len(dirs) == 1 and not files:
|
||||
manifest['name'] = dirs[0]
|
||||
continue
|
||||
if dirs or files:
|
||||
pkg_dir = root
|
||||
break
|
||||
# if library without manifest, returns first source file
|
||||
src_dir = join(util.glob_escape(pkg_dir))
|
||||
if isdir(join(pkg_dir, "src")):
|
||||
src_dir = join(src_dir, "src")
|
||||
chs_files = glob(join(src_dir, "*.[chS]"))
|
||||
if chs_files:
|
||||
return chs_files[0]
|
||||
cpp_files = glob(join(src_dir, "*.cpp"))
|
||||
if cpp_files:
|
||||
return cpp_files[0]
|
||||
|
||||
with open(join(pkg_dir, self.manifest_name), "w") as fp:
|
||||
json.dump(manifest, fp)
|
||||
|
||||
return pkg_dir
|
||||
|
||||
@staticmethod
|
||||
def _find_any_manifest(pkg_dir):
|
||||
manifests = ("library.json", "library.properties", "module.json")
|
||||
for root, _, files in os.walk(pkg_dir):
|
||||
for manifest in manifests:
|
||||
if manifest in files:
|
||||
return join(root, manifest)
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _parse_manifest(path):
|
||||
manifest = {}
|
||||
if path.endswith(".json"):
|
||||
return util.load_json(path)
|
||||
elif path.endswith("library.properties"):
|
||||
with open(path) as fp:
|
||||
for line in fp.readlines():
|
||||
if "=" not in line:
|
||||
continue
|
||||
key, value = line.split("=", 1)
|
||||
manifest[key.strip()] = value.strip()
|
||||
def load_manifest(self, pkg_dir):
|
||||
manifest = BasePkgManager.load_manifest(self, pkg_dir)
|
||||
if not manifest:
|
||||
return manifest
|
||||
|
||||
# if Arduino library.properties
|
||||
if "sentence" in manifest:
|
||||
manifest['frameworks'] = ["arduino"]
|
||||
if "author" in manifest:
|
||||
manifest['description'] = manifest['sentence']
|
||||
del manifest['sentence']
|
||||
|
||||
if "author" in manifest:
|
||||
if isinstance(manifest['author'], dict):
|
||||
manifest['authors'] = [manifest['author']]
|
||||
else:
|
||||
manifest['authors'] = [{"name": manifest['author']}]
|
||||
del manifest['author']
|
||||
if "sentence" in manifest:
|
||||
manifest['description'] = manifest['sentence']
|
||||
del manifest['sentence']
|
||||
del manifest['author']
|
||||
|
||||
if "authors" in manifest and not isinstance(manifest['authors'], list):
|
||||
manifest['authors'] = [manifest['authors']]
|
||||
|
||||
if "keywords" not in manifest:
|
||||
keywords = []
|
||||
for keyword in re.split(r"[\s/]+",
|
||||
manifest.get("category", "Uncategorized")):
|
||||
keyword = keyword.strip()
|
||||
if not keyword:
|
||||
continue
|
||||
keywords.append(keyword.lower())
|
||||
manifest['keywords'] = keywords
|
||||
if "category" in manifest:
|
||||
del manifest['category']
|
||||
|
||||
# don't replace VCS URL
|
||||
if "url" in manifest and "description" in manifest:
|
||||
manifest['homepage'] = manifest['url']
|
||||
del manifest['url']
|
||||
|
||||
if "architectures" in manifest:
|
||||
platforms = []
|
||||
platforms_map = {
|
||||
"avr": "atmelavr",
|
||||
"sam": "atmelsam",
|
||||
"samd": "atmelsam",
|
||||
"esp8266": "espressif8266",
|
||||
"esp32": "espressif32",
|
||||
"arc32": "intel_arc32"
|
||||
}
|
||||
for arch in manifest['architectures'].split(","):
|
||||
arch = arch.strip()
|
||||
if arch == "*":
|
||||
platforms = "*"
|
||||
break
|
||||
if arch in platforms_map:
|
||||
platforms.append(platforms_map[arch])
|
||||
manifest['platforms'] = platforms
|
||||
del manifest['architectures']
|
||||
|
||||
# convert listed items via comma to array
|
||||
for key in ("keywords", "frameworks", "platforms"):
|
||||
if key not in manifest or \
|
||||
not isinstance(manifest[key], basestring):
|
||||
continue
|
||||
manifest[key] = [
|
||||
i.strip() for i in manifest[key].split(",") if i.strip()
|
||||
]
|
||||
|
||||
return manifest
|
||||
|
||||
@staticmethod
|
||||
@@ -125,78 +153,51 @@ class LibraryManager(BasePkgManager):
|
||||
]
|
||||
return items
|
||||
|
||||
@staticmethod
|
||||
def max_satisfying_repo_version(versions, requirements=None):
|
||||
def max_satisfying_repo_version(self, versions, requirements=None):
|
||||
|
||||
def _cmp_dates(datestr1, datestr2):
|
||||
from datetime import datetime
|
||||
assert "T" in datestr1 and "T" in datestr2
|
||||
dateformat = "%Y-%m-%d %H:%M:%S"
|
||||
date1 = datetime.strptime(datestr1[:-1].replace("T", " "),
|
||||
dateformat)
|
||||
date2 = datetime.strptime(datestr2[:-1].replace("T", " "),
|
||||
dateformat)
|
||||
date1 = util.parse_date(datestr1)
|
||||
date2 = util.parse_date(datestr2)
|
||||
if date1 == date2:
|
||||
return 0
|
||||
return -1 if date1 < date2 else 1
|
||||
|
||||
semver_spec = self.parse_semver_spec(
|
||||
requirements) if requirements else None
|
||||
item = None
|
||||
reqspec = None
|
||||
if requirements:
|
||||
try:
|
||||
reqspec = semantic_version.Spec(requirements)
|
||||
except ValueError:
|
||||
pass
|
||||
for v in versions:
|
||||
specver = None
|
||||
try:
|
||||
specver = semantic_version.Version(v['version'], partial=True)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
if reqspec:
|
||||
if not specver or specver not in reqspec:
|
||||
for v in versions:
|
||||
semver_new = self.parse_semver_version(v['name'])
|
||||
if semver_spec:
|
||||
if not semver_new or semver_new not in semver_spec:
|
||||
continue
|
||||
if not item or semantic_version.Version(
|
||||
item['version'], partial=True) < specver:
|
||||
if not item or self.parse_semver_version(
|
||||
item['name']) < semver_new:
|
||||
item = v
|
||||
elif requirements:
|
||||
if requirements == v['version']:
|
||||
if requirements == v['name']:
|
||||
return v
|
||||
|
||||
else:
|
||||
if not item or _cmp_dates(item['date'], v['date']) == -1:
|
||||
if not item or _cmp_dates(item['released'],
|
||||
v['released']) == -1:
|
||||
item = v
|
||||
return item
|
||||
|
||||
def get_latest_repo_version(self, name, requirements):
|
||||
def get_latest_repo_version(self, name, requirements, silent=False):
|
||||
item = self.max_satisfying_repo_version(
|
||||
util.get_api_result(
|
||||
"/lib/versions/%d" % self._get_pkg_id_by_name(name,
|
||||
requirements),
|
||||
cache_valid="1h"),
|
||||
requirements)
|
||||
return item['version'] if item else None
|
||||
|
||||
def _get_pkg_id_by_name(self,
|
||||
name,
|
||||
requirements,
|
||||
silent=False,
|
||||
interactive=False):
|
||||
if name.startswith("id="):
|
||||
return int(name[3:])
|
||||
# try to find ID from installed packages
|
||||
package_dir = self.get_package_dir(name, requirements)
|
||||
if package_dir:
|
||||
manifest = self.load_manifest(package_dir)
|
||||
if "id" in manifest:
|
||||
return int(manifest['id'])
|
||||
return int(
|
||||
self.search_for_library({
|
||||
"name": name
|
||||
}, silent, interactive)['id'])
|
||||
"/lib/info/%d" % self.search_lib_id(
|
||||
{
|
||||
"name": name,
|
||||
"requirements": requirements
|
||||
},
|
||||
silent=silent),
|
||||
cache_valid="1h")['versions'], requirements)
|
||||
return item['name'] if item else None
|
||||
|
||||
def _install_from_piorepo(self, name, requirements):
|
||||
assert name.startswith("id=")
|
||||
assert name.startswith("id="), name
|
||||
version = self.get_latest_repo_version(name, requirements)
|
||||
if not version:
|
||||
raise exception.UndefinedPackageVersion(requirements or "latest",
|
||||
@@ -211,74 +212,20 @@ class LibraryManager(BasePkgManager):
|
||||
name, dl_data['url'].replace("http://", "https://")
|
||||
if app.get_setting("enable_ssl") else dl_data['url'], requirements)
|
||||
|
||||
def install(self,
|
||||
name,
|
||||
requirements=None,
|
||||
silent=False,
|
||||
trigger_event=True,
|
||||
interactive=False):
|
||||
already_installed = False
|
||||
_name, _requirements, _url = self.parse_pkg_name(name, requirements)
|
||||
|
||||
try:
|
||||
if not _url:
|
||||
_name = "id=%d" % self._get_pkg_id_by_name(
|
||||
_name,
|
||||
_requirements,
|
||||
silent=silent,
|
||||
interactive=interactive)
|
||||
already_installed = self.get_package(_name, _requirements, _url)
|
||||
pkg_dir = BasePkgManager.install(
|
||||
self, _name
|
||||
if not _url else name, _requirements, silent, trigger_event)
|
||||
except exception.InternetIsOffline as e:
|
||||
if not silent:
|
||||
click.secho(str(e), fg="yellow")
|
||||
return
|
||||
|
||||
if already_installed:
|
||||
return
|
||||
|
||||
manifest = self.load_manifest(pkg_dir)
|
||||
if "dependencies" not in manifest:
|
||||
return pkg_dir
|
||||
|
||||
if not silent:
|
||||
click.secho("Installing dependencies", fg="yellow")
|
||||
|
||||
for filters in self.normalize_dependencies(manifest['dependencies']):
|
||||
assert "name" in filters
|
||||
if any([s in filters.get("version", "") for s in ("\\", "/")]):
|
||||
self.install("{name}={version}".format(**filters))
|
||||
else:
|
||||
try:
|
||||
lib_info = self.search_for_library(filters, silent,
|
||||
interactive)
|
||||
except exception.LibNotFound as e:
|
||||
if not silent:
|
||||
click.secho("Warning! %s" % e, fg="yellow")
|
||||
continue
|
||||
|
||||
if filters.get("version"):
|
||||
self.install(
|
||||
lib_info['id'],
|
||||
requirements=filters.get("version"),
|
||||
silent=silent,
|
||||
trigger_event=trigger_event)
|
||||
else:
|
||||
self.install(
|
||||
lib_info['id'],
|
||||
silent=silent,
|
||||
trigger_event=trigger_event)
|
||||
return pkg_dir
|
||||
|
||||
@staticmethod
|
||||
def search_for_library( # pylint: disable=too-many-branches
|
||||
def search_lib_id( # pylint: disable=too-many-branches
|
||||
self,
|
||||
filters,
|
||||
silent=False,
|
||||
interactive=False):
|
||||
assert isinstance(filters, dict)
|
||||
assert "name" in filters
|
||||
|
||||
# try to find ID within installed packages
|
||||
lib_id = self._get_lib_id_from_installed(filters)
|
||||
if lib_id:
|
||||
return lib_id
|
||||
|
||||
# looking in PIO Library Registry
|
||||
if not silent:
|
||||
click.echo("Looking for %s library in registry" % click.style(
|
||||
filters['name'], fg="cyan"))
|
||||
@@ -290,39 +237,43 @@ class LibraryManager(BasePkgManager):
|
||||
if not isinstance(values, list):
|
||||
values = [v.strip() for v in values.split(",") if v]
|
||||
for value in values:
|
||||
query.append('%s:"%s"' % (key[:-1] if key.endswith("s") else
|
||||
key, value))
|
||||
query.append(
|
||||
'%s:"%s"' % (key[:-1]
|
||||
if key.endswith("s") else key, value))
|
||||
|
||||
lib_info = None
|
||||
result = util.get_api_result(
|
||||
"/lib/search", dict(query=" ".join(query)), cache_valid="3d")
|
||||
"/v2/lib/search", dict(query=" ".join(query)), cache_valid="1h")
|
||||
if result['total'] == 1:
|
||||
lib_info = result['items'][0]
|
||||
elif result['total'] > 1:
|
||||
click.secho(
|
||||
"Conflict: More than one library has been found "
|
||||
"by request %s:" % json.dumps(filters),
|
||||
fg="red",
|
||||
err=True)
|
||||
commands.lib.echo_liblist_header()
|
||||
for item in result['items']:
|
||||
commands.lib.echo_liblist_item(item)
|
||||
|
||||
if not interactive:
|
||||
click.secho(
|
||||
"Automatically chose the first available library "
|
||||
"(use `--interactive` option to make a choice)",
|
||||
fg="yellow",
|
||||
err=True)
|
||||
if silent and not interactive:
|
||||
lib_info = result['items'][0]
|
||||
else:
|
||||
deplib_id = click.prompt(
|
||||
"Please choose library ID",
|
||||
type=click.Choice([str(i['id']) for i in result['items']]))
|
||||
click.secho(
|
||||
"Conflict: More than one library has been found "
|
||||
"by request %s:" % json.dumps(filters),
|
||||
fg="yellow",
|
||||
err=True)
|
||||
for item in result['items']:
|
||||
if item['id'] == int(deplib_id):
|
||||
lib_info = item
|
||||
break
|
||||
commands.lib.print_lib_item(item)
|
||||
|
||||
if not interactive:
|
||||
click.secho(
|
||||
"Automatically chose the first available library "
|
||||
"(use `--interactive` option to make a choice)",
|
||||
fg="yellow",
|
||||
err=True)
|
||||
lib_info = result['items'][0]
|
||||
else:
|
||||
deplib_id = click.prompt(
|
||||
"Please choose library ID",
|
||||
type=click.Choice(
|
||||
[str(i['id']) for i in result['items']]))
|
||||
for item in result['items']:
|
||||
if item['id'] == int(deplib_id):
|
||||
lib_info = item
|
||||
break
|
||||
|
||||
if not lib_info:
|
||||
if filters.keys() == ["name"]:
|
||||
@@ -331,7 +282,153 @@ class LibraryManager(BasePkgManager):
|
||||
raise exception.LibNotFound(str(filters))
|
||||
if not silent:
|
||||
click.echo("Found: %s" % click.style(
|
||||
"http://platformio.org/lib/show/{id}/{name}".format(
|
||||
"https://platformio.org/lib/show/{id}/{name}".format(
|
||||
**lib_info),
|
||||
fg="blue"))
|
||||
return lib_info
|
||||
return int(lib_info['id'])
|
||||
|
||||
def _get_lib_id_from_installed(self, filters):
|
||||
if filters['name'].startswith("id="):
|
||||
return int(filters['name'][3:])
|
||||
package_dir = self.get_package_dir(
|
||||
filters['name'], filters.get("requirements",
|
||||
filters.get("version")))
|
||||
if not package_dir:
|
||||
return None
|
||||
manifest = self.load_manifest(package_dir)
|
||||
if "id" not in manifest:
|
||||
return None
|
||||
|
||||
for key in ("frameworks", "platforms"):
|
||||
if key not in filters:
|
||||
continue
|
||||
if key not in manifest:
|
||||
return None
|
||||
if not util.items_in_list(
|
||||
util.items_to_list(filters[key]),
|
||||
util.items_to_list(manifest[key])):
|
||||
return None
|
||||
|
||||
if "authors" in filters:
|
||||
if "authors" not in manifest:
|
||||
return None
|
||||
manifest_authors = manifest['authors']
|
||||
if not isinstance(manifest_authors, list):
|
||||
manifest_authors = [manifest_authors]
|
||||
manifest_authors = [
|
||||
a['name'] for a in manifest_authors
|
||||
if isinstance(a, dict) and "name" in a
|
||||
]
|
||||
filter_authors = filters['authors']
|
||||
if not isinstance(filter_authors, list):
|
||||
filter_authors = [filter_authors]
|
||||
if not set(filter_authors) <= set(manifest_authors):
|
||||
return None
|
||||
|
||||
return int(manifest['id'])
|
||||
|
||||
def install( # pylint: disable=arguments-differ
|
||||
self,
|
||||
name,
|
||||
requirements=None,
|
||||
silent=False,
|
||||
after_update=False,
|
||||
interactive=False,
|
||||
force=False):
|
||||
_name, _requirements, _url = self.parse_pkg_uri(name, requirements)
|
||||
if not _url:
|
||||
name = "id=%d" % self.search_lib_id(
|
||||
{
|
||||
"name": _name,
|
||||
"requirements": _requirements
|
||||
},
|
||||
silent=silent,
|
||||
interactive=interactive)
|
||||
requirements = _requirements
|
||||
pkg_dir = BasePkgManager.install(
|
||||
self,
|
||||
name,
|
||||
requirements,
|
||||
silent=silent,
|
||||
after_update=after_update,
|
||||
force=force)
|
||||
|
||||
if not pkg_dir:
|
||||
return None
|
||||
|
||||
manifest = self.load_manifest(pkg_dir)
|
||||
if "dependencies" not in manifest:
|
||||
return pkg_dir
|
||||
|
||||
if not silent:
|
||||
click.secho("Installing dependencies", fg="yellow")
|
||||
|
||||
for filters in self.normalize_dependencies(manifest['dependencies']):
|
||||
assert "name" in filters
|
||||
|
||||
# avoid circle dependencies
|
||||
if not self.INSTALL_HISTORY:
|
||||
self.INSTALL_HISTORY = []
|
||||
history_key = str(filters)
|
||||
if history_key in self.INSTALL_HISTORY:
|
||||
continue
|
||||
self.INSTALL_HISTORY.append(history_key)
|
||||
|
||||
if any(s in filters.get("version", "") for s in ("\\", "/")):
|
||||
self.install(
|
||||
"{name}={version}".format(**filters),
|
||||
silent=silent,
|
||||
after_update=after_update,
|
||||
interactive=interactive,
|
||||
force=force)
|
||||
else:
|
||||
try:
|
||||
lib_id = self.search_lib_id(filters, silent, interactive)
|
||||
except exception.LibNotFound as e:
|
||||
if not silent or is_builtin_lib(filters['name']):
|
||||
click.secho("Warning! %s" % e, fg="yellow")
|
||||
continue
|
||||
|
||||
if filters.get("version"):
|
||||
self.install(
|
||||
lib_id,
|
||||
filters.get("version"),
|
||||
silent=silent,
|
||||
after_update=after_update,
|
||||
interactive=interactive,
|
||||
force=force)
|
||||
else:
|
||||
self.install(
|
||||
lib_id,
|
||||
silent=silent,
|
||||
after_update=after_update,
|
||||
interactive=interactive,
|
||||
force=force)
|
||||
return pkg_dir
|
||||
|
||||
|
||||
@util.memoized()
|
||||
def get_builtin_libs(storage_names=None):
|
||||
items = []
|
||||
storage_names = storage_names or []
|
||||
pm = PlatformManager()
|
||||
for manifest in pm.get_installed():
|
||||
p = PlatformFactory.newPlatform(manifest['__pkg_dir'])
|
||||
for storage in p.get_lib_storages():
|
||||
if storage_names and storage['name'] not in storage_names:
|
||||
continue
|
||||
lm = LibraryManager(storage['path'])
|
||||
items.append({
|
||||
"name": storage['name'],
|
||||
"path": storage['path'],
|
||||
"items": lm.get_installed()
|
||||
})
|
||||
return items
|
||||
|
||||
|
||||
@util.memoized()
|
||||
def is_builtin_lib(name):
|
||||
for storage in get_builtin_libs():
|
||||
if any(l.get("name") == name for l in storage['items']):
|
||||
return True
|
||||
return False
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -18,16 +18,19 @@ import re
|
||||
from imp import load_source
|
||||
from multiprocessing import cpu_count
|
||||
from os.path import basename, dirname, isdir, isfile, join
|
||||
from urllib import quote
|
||||
|
||||
import click
|
||||
import semantic_version
|
||||
|
||||
from platformio import app, exception, util
|
||||
from platformio import __version__, app, exception, util
|
||||
from platformio.managers.core import get_core_package_dir
|
||||
from platformio.managers.package import BasePkgManager, PackageManager
|
||||
|
||||
|
||||
class PlatformManager(BasePkgManager):
|
||||
|
||||
FILE_CACHE_VALID = None # disable platform caching
|
||||
FILE_CACHE_VALID = None # disable platform download caching
|
||||
|
||||
def __init__(self, package_dir=None, repositories=None):
|
||||
if not repositories:
|
||||
@@ -36,13 +39,22 @@ class PlatformManager(BasePkgManager):
|
||||
"{0}://dl.platformio.org/platforms/manifest.json".format(
|
||||
"https" if app.get_setting("enable_ssl") else "http")
|
||||
]
|
||||
BasePkgManager.__init__(self, package_dir or
|
||||
join(util.get_home_dir(), "platforms"),
|
||||
repositories)
|
||||
BasePkgManager.__init__(
|
||||
self, package_dir or join(util.get_home_dir(), "platforms"),
|
||||
repositories)
|
||||
|
||||
@property
|
||||
def manifest_name(self):
|
||||
return "platform.json"
|
||||
def manifest_names(self):
|
||||
return ["platform.json"]
|
||||
|
||||
def get_manifest_path(self, pkg_dir):
|
||||
if not isdir(pkg_dir):
|
||||
return None
|
||||
for name in self.manifest_names:
|
||||
manifest_path = join(pkg_dir, name)
|
||||
if isfile(manifest_path):
|
||||
return manifest_path
|
||||
return None
|
||||
|
||||
def install(self,
|
||||
name,
|
||||
@@ -50,50 +62,86 @@ class PlatformManager(BasePkgManager):
|
||||
with_packages=None,
|
||||
without_packages=None,
|
||||
skip_default_package=False,
|
||||
**_): # pylint: disable=too-many-arguments
|
||||
platform_dir = BasePkgManager.install(self, name, requirements)
|
||||
p = PlatformFactory.newPlatform(self.get_manifest_path(platform_dir))
|
||||
p.install_packages(with_packages, without_packages,
|
||||
skip_default_package)
|
||||
self.cleanup_packages(p.packages.keys())
|
||||
return True
|
||||
after_update=False,
|
||||
silent=False,
|
||||
force=False,
|
||||
**_): # pylint: disable=too-many-arguments, arguments-differ
|
||||
platform_dir = BasePkgManager.install(
|
||||
self, name, requirements, silent=silent, force=force)
|
||||
p = PlatformFactory.newPlatform(platform_dir)
|
||||
|
||||
def uninstall(self, name, requirements=None, trigger_event=True):
|
||||
name, requirements, _ = self.parse_pkg_name(name, requirements)
|
||||
p = PlatformFactory.newPlatform(name, requirements)
|
||||
BasePkgManager.uninstall(self, name, requirements)
|
||||
# trigger event is disabled when upgrading operation
|
||||
# don't cleanup packages, "install" will do that
|
||||
if trigger_event:
|
||||
self.cleanup_packages(p.packages.keys())
|
||||
return True
|
||||
# don't cleanup packages or install them after update
|
||||
# we check packages for updates in def update()
|
||||
if after_update:
|
||||
return True
|
||||
|
||||
p.install_packages(
|
||||
with_packages,
|
||||
without_packages,
|
||||
skip_default_package,
|
||||
silent=silent,
|
||||
force=force)
|
||||
return self.cleanup_packages(p.packages.keys())
|
||||
|
||||
def uninstall(self, package, requirements=None, after_update=False):
|
||||
if isdir(package):
|
||||
pkg_dir = package
|
||||
else:
|
||||
name, requirements, url = self.parse_pkg_uri(package, requirements)
|
||||
pkg_dir = self.get_package_dir(name, requirements, url)
|
||||
|
||||
if not pkg_dir:
|
||||
raise exception.UnknownPlatform(package)
|
||||
|
||||
p = PlatformFactory.newPlatform(pkg_dir)
|
||||
BasePkgManager.uninstall(self, pkg_dir, requirements)
|
||||
|
||||
# don't cleanup packages or install them after update
|
||||
# we check packages for updates in def update()
|
||||
if after_update:
|
||||
return True
|
||||
|
||||
return self.cleanup_packages(p.packages.keys())
|
||||
|
||||
def update( # pylint: disable=arguments-differ
|
||||
self,
|
||||
name,
|
||||
package,
|
||||
requirements=None,
|
||||
only_packages=False,
|
||||
only_check=False):
|
||||
name, requirements, _ = self.parse_pkg_name(name, requirements)
|
||||
only_check=False,
|
||||
only_packages=False):
|
||||
if isdir(package):
|
||||
pkg_dir = package
|
||||
else:
|
||||
name, requirements, url = self.parse_pkg_uri(package, requirements)
|
||||
pkg_dir = self.get_package_dir(name, requirements, url)
|
||||
|
||||
if not pkg_dir:
|
||||
raise exception.UnknownPlatform(package)
|
||||
|
||||
p = PlatformFactory.newPlatform(pkg_dir)
|
||||
pkgs_before = p.get_installed_packages().keys()
|
||||
|
||||
missed_pkgs = set()
|
||||
if not only_packages:
|
||||
BasePkgManager.update(self, name, requirements, only_check)
|
||||
p = PlatformFactory.newPlatform(name, requirements)
|
||||
BasePkgManager.update(self, pkg_dir, requirements, only_check)
|
||||
p = PlatformFactory.newPlatform(pkg_dir)
|
||||
missed_pkgs = set(pkgs_before) & set(p.packages.keys())
|
||||
missed_pkgs -= set(p.get_installed_packages().keys())
|
||||
|
||||
p.update_packages(only_check)
|
||||
self.cleanup_packages(p.packages.keys())
|
||||
|
||||
if missed_pkgs:
|
||||
p.install_packages(
|
||||
with_packages=list(missed_pkgs), skip_default_package=True)
|
||||
|
||||
return True
|
||||
|
||||
def is_outdated(self, name, requirements=None):
|
||||
if BasePkgManager.is_outdated(self, name, requirements):
|
||||
return True
|
||||
p = PlatformFactory.newPlatform(name, requirements)
|
||||
return p.are_outdated_packages()
|
||||
|
||||
def cleanup_packages(self, names):
|
||||
self.reset_cache()
|
||||
self.cache_reset()
|
||||
deppkgs = {}
|
||||
for manifest in PlatformManager().get_installed():
|
||||
p = PlatformFactory.newPlatform(manifest['name'],
|
||||
manifest['version'])
|
||||
p = PlatformFactory.newPlatform(manifest['__pkg_dir'])
|
||||
for pkgname, pkgmanifest in p.get_installed_packages().items():
|
||||
if pkgname not in deppkgs:
|
||||
deppkgs[pkgname] = set()
|
||||
@@ -103,34 +151,52 @@ class PlatformManager(BasePkgManager):
|
||||
for manifest in pm.get_installed():
|
||||
if manifest['name'] not in names:
|
||||
continue
|
||||
if (manifest['name'] not in deppkgs or
|
||||
manifest['version'] not in deppkgs[manifest['name']]):
|
||||
pm.uninstall(
|
||||
manifest['name'], manifest['version'], trigger_event=False)
|
||||
if (manifest['name'] not in deppkgs
|
||||
or manifest['version'] not in deppkgs[manifest['name']]):
|
||||
try:
|
||||
pm.uninstall(manifest['__pkg_dir'], after_update=True)
|
||||
except exception.UnknownPackage:
|
||||
pass
|
||||
|
||||
self.reset_cache()
|
||||
self.cache_reset()
|
||||
return True
|
||||
|
||||
@util.memoized(expire=5000)
|
||||
def get_installed_boards(self):
|
||||
boards = []
|
||||
for manifest in self.get_installed():
|
||||
p = PlatformFactory.newPlatform(
|
||||
self.get_manifest_path(manifest['__pkg_dir']))
|
||||
p = PlatformFactory.newPlatform(manifest['__pkg_dir'])
|
||||
for config in p.get_boards().values():
|
||||
boards.append(config.get_brief_data())
|
||||
board = config.get_brief_data()
|
||||
if board not in boards:
|
||||
boards.append(board)
|
||||
return boards
|
||||
|
||||
@staticmethod
|
||||
@util.memoized
|
||||
@util.memoized()
|
||||
def get_registered_boards():
|
||||
return util.get_api_result("/boards", cache_valid="365d")
|
||||
return util.get_api_result("/boards", cache_valid="7d")
|
||||
|
||||
def board_config(self, id_):
|
||||
def get_all_boards(self):
|
||||
boards = self.get_installed_boards()
|
||||
know_boards = ["%s:%s" % (b['platform'], b['id']) for b in boards]
|
||||
try:
|
||||
for board in self.get_registered_boards():
|
||||
key = "%s:%s" % (board['platform'], board['id'])
|
||||
if key not in know_boards:
|
||||
boards.append(board)
|
||||
except (exception.APIRequestError, exception.InternetIsOffline):
|
||||
pass
|
||||
return sorted(boards, key=lambda b: b['name'])
|
||||
|
||||
def board_config(self, id_, platform=None):
|
||||
for manifest in self.get_installed_boards():
|
||||
if manifest['id'] == id_:
|
||||
if manifest['id'] == id_ and (not platform
|
||||
or manifest['platform'] == platform):
|
||||
return manifest
|
||||
for manifest in self.get_registered_boards():
|
||||
if manifest['id'] == id_:
|
||||
if manifest['id'] == id_ and (not platform
|
||||
or manifest['platform'] == platform):
|
||||
return manifest
|
||||
raise exception.UnknownBoard(id_)
|
||||
|
||||
@@ -154,15 +220,19 @@ class PlatformFactory(object):
|
||||
|
||||
@classmethod
|
||||
def newPlatform(cls, name, requirements=None):
|
||||
pm = PlatformManager()
|
||||
platform_dir = None
|
||||
if name.endswith("platform.json") and isfile(name):
|
||||
if isdir(name):
|
||||
platform_dir = name
|
||||
name = pm.load_manifest(platform_dir)['name']
|
||||
elif name.endswith("platform.json") and isfile(name):
|
||||
platform_dir = dirname(name)
|
||||
name = util.load_json(name)['name']
|
||||
else:
|
||||
if not requirements and "@" in name:
|
||||
name, requirements = name.rsplit("@", 1)
|
||||
platform_dir = PlatformManager().get_package_dir(name,
|
||||
requirements)
|
||||
name, requirements, url = pm.parse_pkg_uri(name, requirements)
|
||||
platform_dir = pm.get_package_dir(name, requirements, url)
|
||||
if platform_dir:
|
||||
name = pm.load_manifest(platform_dir)['name']
|
||||
|
||||
if not platform_dir:
|
||||
raise exception.UnknownPlatform(name if not requirements else
|
||||
@@ -184,13 +254,15 @@ class PlatformFactory(object):
|
||||
|
||||
class PlatformPackagesMixin(object):
|
||||
|
||||
def install_packages(self,
|
||||
with_packages=None,
|
||||
without_packages=None,
|
||||
skip_default_package=False,
|
||||
silent=False):
|
||||
with_packages = set(self.pkg_types_to_names(with_packages or []))
|
||||
without_packages = set(self.pkg_types_to_names(without_packages or []))
|
||||
def install_packages( # pylint: disable=too-many-arguments
|
||||
self,
|
||||
with_packages=None,
|
||||
without_packages=None,
|
||||
skip_default_package=False,
|
||||
silent=False,
|
||||
force=False):
|
||||
with_packages = set(self.find_pkg_names(with_packages or []))
|
||||
without_packages = set(self.find_pkg_names(without_packages or []))
|
||||
|
||||
upkgs = with_packages | without_packages
|
||||
ppkgs = set(self.packages.keys())
|
||||
@@ -198,44 +270,76 @@ class PlatformPackagesMixin(object):
|
||||
raise exception.UnknownPackage(", ".join(upkgs - ppkgs))
|
||||
|
||||
for name, opts in self.packages.items():
|
||||
version = opts.get("version", "")
|
||||
if name in without_packages:
|
||||
continue
|
||||
elif (name in with_packages or
|
||||
not (skip_default_package or opts.get("optional", False))):
|
||||
if any([s in opts.get("version", "") for s in ("\\", "/")]):
|
||||
if ":" in version:
|
||||
self.pm.install(
|
||||
"%s=%s" % (name, opts['version']), silent=silent)
|
||||
"%s=%s" % (name, version), silent=silent, force=force)
|
||||
else:
|
||||
self.pm.install(name, opts.get("version"), silent=silent)
|
||||
self.pm.install(name, version, silent=silent, force=force)
|
||||
|
||||
return True
|
||||
|
||||
def get_installed_packages(self):
|
||||
items = {}
|
||||
for name, opts in self.packages.items():
|
||||
package = self.pm.get_package(name, opts['version'])
|
||||
if package:
|
||||
items[name] = package
|
||||
return items
|
||||
def find_pkg_names(self, candidates):
|
||||
result = []
|
||||
for candidate in candidates:
|
||||
found = False
|
||||
|
||||
# lookup by package types
|
||||
for _name, _opts in self.packages.items():
|
||||
if _opts.get("type") == candidate:
|
||||
result.append(_name)
|
||||
found = True
|
||||
|
||||
if (self.frameworks and candidate.startswith("framework-")
|
||||
and candidate[10:] in self.frameworks):
|
||||
result.append(self.frameworks[candidate[10:]]['package'])
|
||||
found = True
|
||||
|
||||
if not found:
|
||||
result.append(candidate)
|
||||
|
||||
return result
|
||||
|
||||
def update_packages(self, only_check=False):
|
||||
for name in self.get_installed_packages():
|
||||
self.pm.update(name, self.packages[name]['version'], only_check)
|
||||
for name, manifest in self.get_installed_packages().items():
|
||||
requirements = self.packages[name].get("version", "")
|
||||
if ":" in requirements:
|
||||
_, requirements, __ = self.pm.parse_pkg_uri(requirements)
|
||||
self.pm.update(manifest['__pkg_dir'], requirements, only_check)
|
||||
|
||||
def get_installed_packages(self):
|
||||
items = {}
|
||||
for name in self.packages:
|
||||
pkg_dir = self.get_package_dir(name)
|
||||
if pkg_dir:
|
||||
items[name] = self.pm.load_manifest(pkg_dir)
|
||||
return items
|
||||
|
||||
def are_outdated_packages(self):
|
||||
for name, opts in self.get_installed_packages().items():
|
||||
if (opts['version'] != self.pm.get_latest_repo_version(
|
||||
name, self.packages[name].get("version"))):
|
||||
for name, manifest in self.get_installed_packages().items():
|
||||
requirements = self.packages[name].get("version", "")
|
||||
if ":" in requirements:
|
||||
_, requirements, __ = self.pm.parse_pkg_uri(requirements)
|
||||
if self.pm.outdated(manifest['__pkg_dir'], requirements):
|
||||
return True
|
||||
return False
|
||||
|
||||
def get_package_dir(self, name):
|
||||
return self.pm.get_package_dir(name,
|
||||
self.packages[name].get("version"))
|
||||
version = self.packages[name].get("version", "")
|
||||
if ":" in version:
|
||||
return self.pm.get_package_dir(
|
||||
*self.pm.parse_pkg_uri("%s=%s" % (name, version)))
|
||||
return self.pm.get_package_dir(name, version)
|
||||
|
||||
def get_package_version(self, name):
|
||||
package = self.pm.get_package(name, self.packages[name].get("version"))
|
||||
return package['version'] if package else None
|
||||
pkg_dir = self.get_package_dir(name)
|
||||
if not pkg_dir:
|
||||
return None
|
||||
return self.pm.load_manifest(pkg_dir).get("version")
|
||||
|
||||
|
||||
class PlatformRunMixin(object):
|
||||
@@ -270,9 +374,10 @@ class PlatformRunMixin(object):
|
||||
def _run_scons(self, variables, targets):
|
||||
cmd = [
|
||||
util.get_pythonexe_path(),
|
||||
join(self.get_package_dir("tool-scons"), "script", "scons"), "-Q",
|
||||
join(get_core_package_dir("tool-scons"), "script", "scons"), "-Q",
|
||||
"-j %d" % self.get_job_nums(), "--warn=no-no-parallel-support",
|
||||
"-f", join(util.get_source_dir(), "builder", "main.py")
|
||||
"-f",
|
||||
join(util.get_source_dir(), "builder", "main.py")
|
||||
]
|
||||
cmd.append("PIOVERBOSE=%d" % (1 if self.verbose else 0))
|
||||
cmd += targets
|
||||
@@ -297,6 +402,12 @@ class PlatformRunMixin(object):
|
||||
is_error = self.LINE_ERROR_RE.search(line) is not None
|
||||
self._echo_line(line, level=3 if is_error else 2)
|
||||
|
||||
a_pos = line.find("fatal error:")
|
||||
b_pos = line.rfind(": No such file or directory")
|
||||
if a_pos == -1 or b_pos == -1:
|
||||
return
|
||||
self._echo_missed_dependency(line[a_pos + 12:b_pos].strip())
|
||||
|
||||
def _echo_line(self, line, level):
|
||||
if line.startswith("scons: "):
|
||||
line = line[7:]
|
||||
@@ -308,6 +419,27 @@ class PlatformRunMixin(object):
|
||||
fg = "green"
|
||||
click.secho(line, fg=fg, err=level > 1)
|
||||
|
||||
@staticmethod
|
||||
def _echo_missed_dependency(filename):
|
||||
if "/" in filename or not filename.endswith((".h", ".hpp")):
|
||||
return
|
||||
banner = """
|
||||
{dots}
|
||||
* Looking for {filename_styled} dependency? Check our library registry!
|
||||
*
|
||||
* CLI > platformio lib search "header:{filename}"
|
||||
* Web > {link}
|
||||
*
|
||||
{dots}
|
||||
""".format(filename=filename,
|
||||
filename_styled=click.style(filename, fg="cyan"),
|
||||
link=click.style(
|
||||
"https://platformio.org/lib/search?query=header:%s" % quote(
|
||||
filename, safe=""),
|
||||
fg="blue"),
|
||||
dots="*" * (56 + len(filename)))
|
||||
click.echo(banner, err=True)
|
||||
|
||||
@staticmethod
|
||||
def get_job_nums():
|
||||
try:
|
||||
@@ -316,8 +448,10 @@ class PlatformRunMixin(object):
|
||||
return 1
|
||||
|
||||
|
||||
class PlatformBase(PlatformPackagesMixin, PlatformRunMixin):
|
||||
class PlatformBase( # pylint: disable=too-many-public-methods
|
||||
PlatformPackagesMixin, PlatformRunMixin):
|
||||
|
||||
PIO_VERSION = semantic_version.Version(util.pepver_to_semver(__version__))
|
||||
_BOARDS_CACHE = {}
|
||||
|
||||
def __init__(self, manifest_path):
|
||||
@@ -332,6 +466,12 @@ class PlatformBase(PlatformPackagesMixin, PlatformRunMixin):
|
||||
self.silent = False
|
||||
self.verbose = False
|
||||
|
||||
if self.engines and "platformio" in self.engines:
|
||||
if self.PIO_VERSION not in semantic_version.Spec(
|
||||
self.engines['platformio']):
|
||||
raise exception.IncompatiblePlatform(self.name,
|
||||
str(self.PIO_VERSION))
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._manifest['name']
|
||||
@@ -356,6 +496,14 @@ class PlatformBase(PlatformPackagesMixin, PlatformRunMixin):
|
||||
def vendor_url(self):
|
||||
return self._manifest.get("url")
|
||||
|
||||
@property
|
||||
def docs_url(self):
|
||||
return self._manifest.get("docs")
|
||||
|
||||
@property
|
||||
def repository_url(self):
|
||||
return self._manifest.get("repository", {}).get("url")
|
||||
|
||||
@property
|
||||
def license(self):
|
||||
return self._manifest.get("license")
|
||||
@@ -364,6 +512,10 @@ class PlatformBase(PlatformPackagesMixin, PlatformRunMixin):
|
||||
def frameworks(self):
|
||||
return self._manifest.get("frameworks")
|
||||
|
||||
@property
|
||||
def engines(self):
|
||||
return self._manifest.get("engines")
|
||||
|
||||
@property
|
||||
def manifest(self):
|
||||
return self._manifest
|
||||
@@ -395,8 +547,8 @@ class PlatformBase(PlatformPackagesMixin, PlatformRunMixin):
|
||||
config = PlatformBoardConfig(manifest_path)
|
||||
if "platform" in config and config.get("platform") != self.name:
|
||||
return
|
||||
elif ("platforms" in config and
|
||||
self.name not in config.get("platforms")):
|
||||
elif "platforms" in config \
|
||||
and self.name not in config.get("platforms"):
|
||||
return
|
||||
config.manifest['platform'] = self.name
|
||||
self._BOARDS_CACHE[board_id] = config
|
||||
@@ -422,9 +574,9 @@ class PlatformBase(PlatformPackagesMixin, PlatformRunMixin):
|
||||
if not isdir(boards_dir):
|
||||
continue
|
||||
manifest_path = join(boards_dir, "%s.json" % id_)
|
||||
if not isfile(manifest_path):
|
||||
continue
|
||||
_append_board(id_, manifest_path)
|
||||
if isfile(manifest_path):
|
||||
_append_board(id_, manifest_path)
|
||||
break
|
||||
if id_ not in self._BOARDS_CACHE:
|
||||
raise exception.UnknownBoard(id_)
|
||||
return self._BOARDS_CACHE[id_] if id_ else self._BOARDS_CACHE
|
||||
@@ -435,20 +587,6 @@ class PlatformBase(PlatformPackagesMixin, PlatformRunMixin):
|
||||
def get_package_type(self, name):
|
||||
return self.packages[name].get("type")
|
||||
|
||||
def pkg_types_to_names(self, types):
|
||||
names = []
|
||||
for type_ in types:
|
||||
name = type_
|
||||
# lookup by package types
|
||||
for _name, _opts in self.packages.items():
|
||||
if _opts.get("type") == type_:
|
||||
name = None
|
||||
names.append(_name)
|
||||
# if type is the right name
|
||||
if name:
|
||||
names.append(name)
|
||||
return names
|
||||
|
||||
def configure_default_packages(self, variables, targets):
|
||||
# enable used frameworks
|
||||
frameworks = variables.get("pioframework", [])
|
||||
@@ -460,28 +598,45 @@ class PlatformBase(PlatformPackagesMixin, PlatformRunMixin):
|
||||
framework = framework.lower().strip()
|
||||
if not framework or framework not in self.frameworks:
|
||||
continue
|
||||
_pkg_name = self.frameworks[framework]['package']
|
||||
self.packages[_pkg_name]['optional'] = False
|
||||
_pkg_name = self.frameworks[framework].get("package")
|
||||
if _pkg_name:
|
||||
self.packages[_pkg_name]['optional'] = False
|
||||
|
||||
# enable upload tools for upload targets
|
||||
if any(["upload" in t for t in targets] + ["program" in targets]):
|
||||
for _name, _opts in self.packages.iteritems():
|
||||
if _opts.get("type") == "uploader":
|
||||
self.packages[_name]['optional'] = False
|
||||
elif "nobuild" in targets:
|
||||
# skip all packages, allow only upload tools
|
||||
self.packages[_name]['optional'] = True
|
||||
for name, opts in self.packages.iteritems():
|
||||
if opts.get("type") == "uploader":
|
||||
self.packages[name]['optional'] = False
|
||||
# skip all packages in "nobuild" mode
|
||||
# allow only upload tools and frameworks
|
||||
elif "nobuild" in targets and opts.get("type") != "framework":
|
||||
self.packages[name]['optional'] = True
|
||||
|
||||
if "__test" in targets and "tool-unity" not in self.packages:
|
||||
self.packages['tool-unity'] = {
|
||||
"version": "~1.20302.1",
|
||||
"optional": False
|
||||
}
|
||||
if "tool-scons" not in self.packages:
|
||||
self.packages['tool-scons'] = {
|
||||
"version": "~3.20501.2",
|
||||
"optional": False
|
||||
}
|
||||
def get_lib_storages(self):
|
||||
storages = []
|
||||
for opts in (self.frameworks or {}).values():
|
||||
if "package" not in opts:
|
||||
continue
|
||||
pkg_dir = self.get_package_dir(opts['package'])
|
||||
if not pkg_dir or not isdir(join(pkg_dir, "libraries")):
|
||||
continue
|
||||
libs_dir = join(pkg_dir, "libraries")
|
||||
storages.append({"name": opts['package'], "path": libs_dir})
|
||||
libcores_dir = join(libs_dir, "__cores__")
|
||||
if not isdir(libcores_dir):
|
||||
continue
|
||||
for item in os.listdir(libcores_dir):
|
||||
libcore_dir = join(libcores_dir, item)
|
||||
if not isdir(libcore_dir):
|
||||
continue
|
||||
storages.append({
|
||||
"name":
|
||||
"%s-core-%s" % (opts['package'], item),
|
||||
"path":
|
||||
libcore_dir
|
||||
})
|
||||
|
||||
return storages
|
||||
|
||||
|
||||
class PlatformBoardConfig(object):
|
||||
@@ -511,6 +666,15 @@ class PlatformBoardConfig(object):
|
||||
else:
|
||||
raise KeyError("Invalid board option '%s'" % path)
|
||||
|
||||
def update(self, path, value):
|
||||
newdict = None
|
||||
for key in path.split(".")[::-1]:
|
||||
if newdict is None:
|
||||
newdict = {key: value}
|
||||
else:
|
||||
newdict = {key: newdict}
|
||||
util.merge_dicts(self._manifest, newdict)
|
||||
|
||||
def __contains__(self, key):
|
||||
try:
|
||||
self.get(key)
|
||||
@@ -532,15 +696,73 @@ class PlatformBoardConfig(object):
|
||||
|
||||
def get_brief_data(self):
|
||||
return {
|
||||
"id": self.id,
|
||||
"name": self._manifest['name'],
|
||||
"platform": self._manifest.get("platform"),
|
||||
"mcu": self._manifest.get("build", {}).get("mcu", "").upper(),
|
||||
"id":
|
||||
self.id,
|
||||
"name":
|
||||
self._manifest['name'],
|
||||
"platform":
|
||||
self._manifest.get("platform"),
|
||||
"mcu":
|
||||
self._manifest.get("build", {}).get("mcu", "").upper(),
|
||||
"fcpu":
|
||||
int(self._manifest.get("build", {}).get("f_cpu", "0L")[:-1]),
|
||||
"ram": self._manifest.get("upload", {}).get("maximum_ram_size", 0),
|
||||
"rom": self._manifest.get("upload", {}).get("maximum_size", 0),
|
||||
"frameworks": self._manifest.get("frameworks"),
|
||||
"vendor": self._manifest['vendor'],
|
||||
"url": self._manifest['url']
|
||||
int("".join([
|
||||
c for c in str(
|
||||
self._manifest.get("build", {}).get("f_cpu", "0L"))
|
||||
if c.isdigit()
|
||||
])),
|
||||
"ram":
|
||||
self._manifest.get("upload", {}).get("maximum_ram_size", 0),
|
||||
"rom":
|
||||
self._manifest.get("upload", {}).get("maximum_size", 0),
|
||||
"connectivity":
|
||||
self._manifest.get("connectivity"),
|
||||
"frameworks":
|
||||
self._manifest.get("frameworks"),
|
||||
"debug":
|
||||
self.get_debug_data(),
|
||||
"vendor":
|
||||
self._manifest['vendor'],
|
||||
"url":
|
||||
self._manifest['url']
|
||||
}
|
||||
|
||||
def get_debug_data(self):
|
||||
if not self._manifest.get("debug", {}).get("tools"):
|
||||
return None
|
||||
tools = {}
|
||||
for name, options in self._manifest['debug']['tools'].items():
|
||||
tools[name] = {}
|
||||
for key, value in options.items():
|
||||
if key in ("default", "onboard"):
|
||||
tools[name][key] = value
|
||||
return {"tools": tools}
|
||||
|
||||
def get_debug_tool_name(self, custom=None):
|
||||
debug_tools = self._manifest.get("debug", {}).get("tools")
|
||||
tool_name = custom
|
||||
if tool_name == "custom":
|
||||
return tool_name
|
||||
if not debug_tools:
|
||||
raise exception.DebugSupportError(self._manifest['name'])
|
||||
if tool_name:
|
||||
if tool_name in debug_tools:
|
||||
return tool_name
|
||||
raise exception.DebugInvalidOptions(
|
||||
"Unknown debug tool `%s`. Please use one of `%s` or `custom`" %
|
||||
(tool_name, ", ".join(sorted(debug_tools.keys()))))
|
||||
|
||||
# automatically select best tool
|
||||
data = {"default": [], "onboard": [], "external": []}
|
||||
for key, value in debug_tools.items():
|
||||
if value.get("default"):
|
||||
data['default'].append(key)
|
||||
elif value.get("onboard"):
|
||||
data['onboard'].append(key)
|
||||
data['external'].append(key)
|
||||
|
||||
for key, value in data.items():
|
||||
if not value:
|
||||
continue
|
||||
return sorted(value)[0]
|
||||
|
||||
assert any(item for item in data)
|
||||
|
||||
@@ -1,94 +0,0 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from os.path import join
|
||||
|
||||
from platformio import exception, util
|
||||
from platformio.managers.package import PackageManager
|
||||
|
||||
PACKAGE_DEPS = {
|
||||
"pysite": {
|
||||
"name": "pysite-pioplus",
|
||||
"requirements": ">=0.3.0,<2"
|
||||
},
|
||||
"tool": {
|
||||
"name": "tool-pioplus",
|
||||
"requirements": ">=0.6.6,<2"
|
||||
}
|
||||
}
|
||||
|
||||
AUTO_UPDATES_MAX = 100
|
||||
|
||||
|
||||
class PioPlusPackageManager(PackageManager):
|
||||
|
||||
def __init__(self):
|
||||
PackageManager.__init__(
|
||||
self,
|
||||
join(util.get_home_dir(), "packages"), [
|
||||
"https://dl.bintray.com/platformio/dl-packages/manifest.json",
|
||||
"http%s://dl.platformio.org/packages/manifest.json" %
|
||||
("" if sys.version_info < (2, 7, 9) else "s")
|
||||
])
|
||||
|
||||
|
||||
def pioplus_install():
|
||||
pm = PioPlusPackageManager()
|
||||
for item in PACKAGE_DEPS.values():
|
||||
pm.install(item['name'], item['requirements'], silent=True)
|
||||
|
||||
|
||||
def pioplus_update():
|
||||
pm = PioPlusPackageManager()
|
||||
for item in PACKAGE_DEPS.values():
|
||||
package_dir = pm.get_package_dir(item['name'])
|
||||
if package_dir:
|
||||
pm.update(item['name'], item['requirements'])
|
||||
|
||||
|
||||
def pioplus_call(args, **kwargs):
|
||||
pioplus_install()
|
||||
pm = PioPlusPackageManager()
|
||||
pioplus_path = join(
|
||||
pm.get_package_dir(PACKAGE_DEPS['tool']['name'],
|
||||
PACKAGE_DEPS['tool']['requirements']), "pioplus")
|
||||
os.environ['PYTHONEXEPATH'] = util.get_pythonexe_path()
|
||||
os.environ['PYTHONPYSITEDIR'] = pm.get_package_dir(
|
||||
PACKAGE_DEPS['pysite']['name'], PACKAGE_DEPS['pysite']['requirements'])
|
||||
util.copy_pythonpath_to_osenv()
|
||||
code = subprocess.call([pioplus_path] + args, **kwargs)
|
||||
|
||||
# handle remote update request
|
||||
if code == 13:
|
||||
count_attr = "_update_count"
|
||||
try:
|
||||
count_value = getattr(pioplus_call, count_attr)
|
||||
except AttributeError:
|
||||
count_value = 0
|
||||
setattr(pioplus_call, count_attr, 1)
|
||||
count_value += 1
|
||||
setattr(pioplus_call, count_attr, count_value)
|
||||
if count_value < AUTO_UPDATES_MAX:
|
||||
pioplus_update()
|
||||
return pioplus_call(args, **kwargs)
|
||||
|
||||
# handle reload request
|
||||
elif code == 14:
|
||||
return pioplus_call(args, **kwargs)
|
||||
|
||||
if code != 0:
|
||||
raise exception.ReturnErrorCode(1)
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -15,10 +15,12 @@
|
||||
import atexit
|
||||
import platform
|
||||
import Queue
|
||||
import re
|
||||
import sys
|
||||
import threading
|
||||
from collections import deque
|
||||
from os import getenv
|
||||
from os import getenv, sep
|
||||
from os.path import join
|
||||
from time import sleep, time
|
||||
from traceback import format_exc
|
||||
|
||||
@@ -92,12 +94,26 @@ class MeasurementProtocol(TelemetryBase):
|
||||
self['an'] = " ".join(dpdata)
|
||||
|
||||
def _prefill_custom_data(self):
|
||||
|
||||
def _filter_args(items):
|
||||
result = []
|
||||
stop = False
|
||||
for item in items:
|
||||
item = str(item).lower()
|
||||
result.append(item)
|
||||
if stop:
|
||||
break
|
||||
if item == "account":
|
||||
stop = True
|
||||
return result
|
||||
|
||||
caller_id = str(app.get_session_var("caller_id"))
|
||||
self['cd1'] = util.get_systype()
|
||||
self['cd2'] = "Python/%s %s" % (platform.python_version(),
|
||||
platform.platform())
|
||||
self['cd4'] = 1 if (not util.is_ci() and
|
||||
(caller_id or not util.is_container())) else 0
|
||||
# self['cd3'] = " ".join(_filter_args(sys.argv[1:]))
|
||||
self['cd4'] = 1 if (not util.is_ci()
|
||||
and (caller_id or not util.is_container())) else 0
|
||||
if caller_id:
|
||||
self['cd5'] = caller_id.lower()
|
||||
|
||||
@@ -109,7 +125,6 @@ class MeasurementProtocol(TelemetryBase):
|
||||
return _arg
|
||||
return None
|
||||
|
||||
self['cd3'] = " ".join([str(s).lower() for s in sys.argv[1:]])
|
||||
if not app.get_session_var("command_ctx"):
|
||||
return
|
||||
ctx_args = app.get_session_var("command_ctx").args
|
||||
@@ -121,8 +136,8 @@ class MeasurementProtocol(TelemetryBase):
|
||||
"settings", "account"):
|
||||
cmd_path = args[:2]
|
||||
if args[0] == "lib" and len(args) > 1:
|
||||
lib_subcmds = ("install", "list", "register", "search", "show",
|
||||
"uninstall", "update")
|
||||
lib_subcmds = ("builtin", "install", "list", "register", "search",
|
||||
"show", "stats", "uninstall", "update")
|
||||
sub_cmd = _first_arg_from_list(args[1:], lib_subcmds)
|
||||
if sub_cmd:
|
||||
cmd_path.append(sub_cmd)
|
||||
@@ -138,16 +153,22 @@ class MeasurementProtocol(TelemetryBase):
|
||||
cmd_path.append(sub_cmd)
|
||||
self['screen_name'] = " ".join([p.title() for p in cmd_path])
|
||||
|
||||
def send(self, hittype):
|
||||
@staticmethod
|
||||
def _ignore_hit():
|
||||
if not app.get_setting("enable_telemetry"):
|
||||
return True
|
||||
if app.get_session_var("caller_id") and \
|
||||
all(c in sys.argv for c in ("run", "idedata")):
|
||||
return True
|
||||
return False
|
||||
|
||||
def send(self, hittype):
|
||||
if self._ignore_hit():
|
||||
return
|
||||
|
||||
self['t'] = hittype
|
||||
|
||||
# correct queue time
|
||||
if "qt" in self._params and isinstance(self['qt'], float):
|
||||
self['qt'] = int((time() - self['qt']) * 1000)
|
||||
|
||||
MPDataPusher().push(self._params)
|
||||
|
||||
|
||||
@@ -227,8 +248,13 @@ class MPDataPusher(object):
|
||||
timeout=1)
|
||||
r.raise_for_status()
|
||||
return True
|
||||
except requests.exceptions.HTTPError as e:
|
||||
# skip Bad Request
|
||||
if 400 >= e.response.status_code < 500:
|
||||
return True
|
||||
except: # pylint: disable=W0702
|
||||
self._http_offline = True
|
||||
pass
|
||||
self._http_offline = True
|
||||
return False
|
||||
|
||||
|
||||
@@ -250,8 +276,9 @@ def measure_ci():
|
||||
"label": getenv("APPVEYOR_REPO_NAME")
|
||||
},
|
||||
"CIRCLECI": {
|
||||
"label": "%s/%s" % (getenv("CIRCLE_PROJECT_USERNAME"),
|
||||
getenv("CIRCLE_PROJECT_REPONAME"))
|
||||
"label":
|
||||
"%s/%s" % (getenv("CIRCLE_PROJECT_USERNAME"),
|
||||
getenv("CIRCLE_PROJECT_REPONAME"))
|
||||
},
|
||||
"TRAVIS": {
|
||||
"label": getenv("TRAVIS_REPO_SLUG")
|
||||
@@ -273,7 +300,10 @@ def measure_ci():
|
||||
|
||||
|
||||
def on_run_environment(options, targets):
|
||||
opts = ["%s=%s" % (opt, value) for opt, value in sorted(options.items())]
|
||||
opts = [
|
||||
"%s=%s" % (opt, value.replace("\n", ", ") if "\n" in value else value)
|
||||
for opt, value in sorted(options.items())
|
||||
]
|
||||
targets = [t.title() for t in targets or ["run"]]
|
||||
on_event("Env", " ".join(targets), "&".join(opts))
|
||||
|
||||
@@ -292,19 +322,38 @@ def on_event(category, action, label=None, value=None, screen_name=None):
|
||||
|
||||
|
||||
def on_exception(e):
|
||||
skip = any([
|
||||
|
||||
def _cleanup_description(text):
|
||||
text = text.replace("Traceback (most recent call last):", "")
|
||||
text = re.sub(
|
||||
r'File "([^"]+)"',
|
||||
lambda m: join(*m.group(1).split(sep)[-2:]),
|
||||
text,
|
||||
flags=re.M)
|
||||
text = re.sub(r"\s+", " ", text, flags=re.M)
|
||||
return text.strip()
|
||||
|
||||
skip_conditions = [
|
||||
isinstance(e, cls)
|
||||
for cls in (IOError, exception.AbortedByUser,
|
||||
exception.NotGlobalLibDir, exception.InternetIsOffline)
|
||||
])
|
||||
if skip:
|
||||
for cls in (IOError, exception.ReturnErrorCode,
|
||||
exception.AbortedByUser, exception.NotGlobalLibDir,
|
||||
exception.InternetIsOffline,
|
||||
exception.NotPlatformIOProject,
|
||||
exception.UserSideException)
|
||||
]
|
||||
try:
|
||||
skip_conditions.append("[API] Account: " in str(e))
|
||||
except UnicodeEncodeError as ue:
|
||||
e = ue
|
||||
if any(skip_conditions):
|
||||
return
|
||||
is_crash = any([
|
||||
not isinstance(e, exception.PlatformioException),
|
||||
"Error" in e.__class__.__name__
|
||||
])
|
||||
mp = MeasurementProtocol()
|
||||
mp['exd'] = "%s: %s" % (type(e).__name__, format_exc() if is_crash else e)
|
||||
description = _cleanup_description(format_exc() if is_crash else str(e))
|
||||
mp['exd'] = ("%s: %s" % (type(e).__name__, description))[:2048]
|
||||
mp['exf'] = 1 if is_crash else 0
|
||||
mp.send("exception")
|
||||
|
||||
@@ -365,3 +414,4 @@ def resend_backuped_reports():
|
||||
# clean
|
||||
tm['backup'] = []
|
||||
app.set_state_item("telemetry", tm)
|
||||
return True
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -13,15 +13,14 @@
|
||||
# limitations under the License.
|
||||
|
||||
from os import chmod
|
||||
from os.path import join, splitext
|
||||
from os.path import exists, islink, join
|
||||
from tarfile import open as tarfile_open
|
||||
from time import mktime
|
||||
from zipfile import ZipFile
|
||||
|
||||
import click
|
||||
|
||||
from platformio import app, util
|
||||
from platformio.exception import UnsupportedArchiveType
|
||||
from platformio import exception, util
|
||||
|
||||
|
||||
class ArchiveBase(object):
|
||||
@@ -32,6 +31,9 @@ class ArchiveBase(object):
|
||||
def get_items(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_item_filename(self, item):
|
||||
raise NotImplementedError()
|
||||
|
||||
def extract_item(self, item, dest_dir):
|
||||
self._afo.extract(item, dest_dir)
|
||||
self.after_extract(item, dest_dir)
|
||||
@@ -39,6 +41,9 @@ class ArchiveBase(object):
|
||||
def after_extract(self, item, dest_dir):
|
||||
pass
|
||||
|
||||
def close(self):
|
||||
self._afo.close()
|
||||
|
||||
|
||||
class TARArchive(ArchiveBase):
|
||||
|
||||
@@ -48,6 +53,9 @@ class TARArchive(ArchiveBase):
|
||||
def get_items(self):
|
||||
return self._afo.getmembers()
|
||||
|
||||
def get_item_filename(self, item):
|
||||
return item.name
|
||||
|
||||
|
||||
class ZIPArchive(ArchiveBase):
|
||||
|
||||
@@ -69,6 +77,9 @@ class ZIPArchive(ArchiveBase):
|
||||
def get_items(self):
|
||||
return self._afo.infolist()
|
||||
|
||||
def get_item_filename(self, item):
|
||||
return item.filename
|
||||
|
||||
def after_extract(self, item, dest_dir):
|
||||
self.preserve_permissions(item, dest_dir)
|
||||
self.preserve_mtime(item, dest_dir)
|
||||
@@ -76,28 +87,40 @@ class ZIPArchive(ArchiveBase):
|
||||
|
||||
class FileUnpacker(object):
|
||||
|
||||
def __init__(self, archpath, dest_dir="."):
|
||||
self._archpath = archpath
|
||||
self._dest_dir = dest_dir
|
||||
def __init__(self, archpath):
|
||||
self.archpath = archpath
|
||||
self._unpacker = None
|
||||
|
||||
_, archext = splitext(archpath.lower())
|
||||
if archext in (".gz", ".bz2"):
|
||||
self._unpacker = TARArchive(archpath)
|
||||
elif archext == ".zip":
|
||||
self._unpacker = ZIPArchive(archpath)
|
||||
|
||||
def __enter__(self):
|
||||
if self.archpath.lower().endswith((".gz", ".bz2")):
|
||||
self._unpacker = TARArchive(self.archpath)
|
||||
elif self.archpath.lower().endswith(".zip"):
|
||||
self._unpacker = ZIPArchive(self.archpath)
|
||||
if not self._unpacker:
|
||||
raise UnsupportedArchiveType(archpath)
|
||||
raise exception.UnsupportedArchiveType(self.archpath)
|
||||
return self
|
||||
|
||||
def start(self):
|
||||
if app.is_disabled_progressbar():
|
||||
def __exit__(self, *args):
|
||||
if self._unpacker:
|
||||
self._unpacker.close()
|
||||
|
||||
def unpack(self, dest_dir=".", with_progress=True):
|
||||
assert self._unpacker
|
||||
if not with_progress:
|
||||
click.echo("Unpacking...")
|
||||
for item in self._unpacker.get_items():
|
||||
self._unpacker.extract_item(item, self._dest_dir)
|
||||
self._unpacker.extract_item(item, dest_dir)
|
||||
else:
|
||||
items = self._unpacker.get_items()
|
||||
with click.progressbar(items, label="Unpacking") as pb:
|
||||
for item in pb:
|
||||
self._unpacker.extract_item(item, self._dest_dir)
|
||||
self._unpacker.extract_item(item, dest_dir)
|
||||
|
||||
# check on disk
|
||||
for item in self._unpacker.get_items():
|
||||
filename = self._unpacker.get_item_filename(item)
|
||||
item_path = join(dest_dir, filename)
|
||||
if not islink(item_path) and not exists(item_path):
|
||||
raise exception.ExtractArchiveItemError(filename, dest_dir)
|
||||
|
||||
return True
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -12,8 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import collections
|
||||
import functools
|
||||
import json
|
||||
import os
|
||||
import platform
|
||||
@@ -22,12 +20,13 @@ import socket
|
||||
import stat
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
from functools import wraps
|
||||
from glob import glob
|
||||
from os.path import (abspath, basename, dirname, expanduser, isdir, isfile,
|
||||
join, normpath, splitdrive)
|
||||
from shutil import rmtree
|
||||
from threading import Thread
|
||||
from time import sleep
|
||||
|
||||
import click
|
||||
import requests
|
||||
@@ -37,30 +36,40 @@ from platformio import __apiurl__, __version__, exception
|
||||
# pylint: disable=wrong-import-order, too-many-ancestors
|
||||
|
||||
try:
|
||||
from configparser import ConfigParser
|
||||
import configparser as ConfigParser
|
||||
except ImportError:
|
||||
from ConfigParser import ConfigParser
|
||||
import ConfigParser as ConfigParser
|
||||
|
||||
|
||||
class ProjectConfig(ConfigParser):
|
||||
class ProjectConfig(ConfigParser.ConfigParser):
|
||||
|
||||
VARTPL_RE = re.compile(r"\$\{([^\.\}]+)\.([^\}]+)\}")
|
||||
|
||||
def items(self, section, **_):
|
||||
def items(self, section, **_): # pylint: disable=arguments-differ
|
||||
items = []
|
||||
for option in ConfigParser.options(self, section):
|
||||
for option in ConfigParser.ConfigParser.options(self, section):
|
||||
items.append((option, self.get(section, option)))
|
||||
return items
|
||||
|
||||
def get(self, section, option, **kwargs):
|
||||
value = ConfigParser.get(self, section, option, **kwargs)
|
||||
try:
|
||||
value = ConfigParser.ConfigParser.get(self, section, option,
|
||||
**kwargs)
|
||||
except ConfigParser.Error as e:
|
||||
raise exception.InvalidProjectConf(str(e))
|
||||
if "${" not in value or "}" not in value:
|
||||
return value
|
||||
return self.VARTPL_RE.sub(self._re_sub_handler, value)
|
||||
|
||||
def _re_sub_handler(self, match):
|
||||
section, option = match.group(1), match.group(2)
|
||||
if section == "env" and not self.has_section(section):
|
||||
if section in ("env", "sysenv") and not self.has_section(section):
|
||||
if section == "env":
|
||||
click.secho(
|
||||
"Warning! Access to system environment variable via "
|
||||
"`${{env.{0}}}` is deprecated. Please use "
|
||||
"`${{sysenv.{0}}}` instead".format(option),
|
||||
fg="yellow")
|
||||
return os.getenv(option)
|
||||
return self.get(section, option)
|
||||
|
||||
@@ -112,36 +121,42 @@ class cd(object):
|
||||
|
||||
|
||||
class memoized(object):
|
||||
'''
|
||||
Decorator. Caches a function's return value each time it is called.
|
||||
If called later with the same arguments, the cached value is returned
|
||||
(not reevaluated).
|
||||
https://wiki.python.org/moin/PythonDecoratorLibrary#Memoize
|
||||
'''
|
||||
|
||||
def __init__(self, func):
|
||||
self.func = func
|
||||
def __init__(self, expire=0):
|
||||
self.expire = expire / 1000 # milliseconds
|
||||
self.cache = {}
|
||||
|
||||
def __call__(self, *args):
|
||||
if not isinstance(args, collections.Hashable):
|
||||
# uncacheable. a list, for instance.
|
||||
# better to not cache than blow up.
|
||||
return self.func(*args)
|
||||
if args in self.cache:
|
||||
return self.cache[args]
|
||||
else:
|
||||
value = self.func(*args)
|
||||
self.cache[args] = value
|
||||
return value
|
||||
def __call__(self, func):
|
||||
|
||||
def __repr__(self):
|
||||
'''Return the function's docstring.'''
|
||||
return self.func.__doc__
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
key = str(args) + str(kwargs)
|
||||
if (key not in self.cache
|
||||
or (self.expire > 0
|
||||
and self.cache[key][0] < time.time() - self.expire)):
|
||||
self.cache[key] = (time.time(), func(*args, **kwargs))
|
||||
return self.cache[key][1]
|
||||
|
||||
def __get__(self, obj, objtype):
|
||||
'''Support instance methods.'''
|
||||
return functools.partial(self.__call__, obj)
|
||||
return wrapper
|
||||
|
||||
|
||||
class throttle(object):
|
||||
|
||||
def __init__(self, threshhold):
|
||||
self.threshhold = threshhold # milliseconds
|
||||
self.last = 0
|
||||
|
||||
def __call__(self, func):
|
||||
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
diff = int(round((time.time() - self.last) * 1000))
|
||||
if diff < self.threshhold:
|
||||
time.sleep((self.threshhold - diff) * 0.001)
|
||||
self.last = time.time()
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def singleton(cls):
|
||||
@@ -156,9 +171,16 @@ def singleton(cls):
|
||||
return get_instance
|
||||
|
||||
|
||||
def path_to_unicode(path):
|
||||
return path.decode(sys.getfilesystemencoding()).encode("utf-8")
|
||||
|
||||
|
||||
def load_json(file_path):
|
||||
with open(file_path, "r") as f:
|
||||
return json.load(f)
|
||||
try:
|
||||
with open(file_path, "r") as f:
|
||||
return json.load(f)
|
||||
except ValueError:
|
||||
raise exception.InvalidJSONFile(file_path)
|
||||
|
||||
|
||||
def get_systype():
|
||||
@@ -183,8 +205,8 @@ def get_project_optional_dir(name, default=None):
|
||||
else:
|
||||
try:
|
||||
config = load_project_config()
|
||||
if (config.has_section("platformio") and
|
||||
config.has_option("platformio", name)):
|
||||
if (config.has_section("platformio")
|
||||
and config.has_option("platformio", name)):
|
||||
data = config.get("platformio", name)
|
||||
except exception.NotPlatformIOProject:
|
||||
pass
|
||||
@@ -203,20 +225,29 @@ def get_project_optional_dir(name, default=None):
|
||||
def get_home_dir():
|
||||
home_dir = get_project_optional_dir("home_dir",
|
||||
join(expanduser("~"), ".platformio"))
|
||||
|
||||
win_home_dir = None
|
||||
if "windows" in get_systype():
|
||||
try:
|
||||
home_dir.encode("utf8")
|
||||
except UnicodeDecodeError:
|
||||
home_dir = splitdrive(home_dir)[0] + "\\.platformio"
|
||||
win_home_dir = splitdrive(home_dir)[0] + "\\.platformio"
|
||||
if isdir(win_home_dir):
|
||||
home_dir = win_home_dir
|
||||
|
||||
if not isdir(home_dir):
|
||||
os.makedirs(home_dir)
|
||||
try:
|
||||
os.makedirs(home_dir)
|
||||
except: # pylint: disable=bare-except
|
||||
if win_home_dir:
|
||||
os.makedirs(win_home_dir)
|
||||
home_dir = win_home_dir
|
||||
|
||||
assert isdir(home_dir)
|
||||
return home_dir
|
||||
|
||||
|
||||
def get_cache_dir():
|
||||
return get_project_optional_dir("cache_dir", join(get_home_dir(),
|
||||
".cache"))
|
||||
|
||||
|
||||
def get_source_dir():
|
||||
curpath = abspath(__file__)
|
||||
if not isfile(curpath):
|
||||
@@ -260,9 +291,14 @@ def get_projectsrc_dir():
|
||||
return get_project_optional_dir("src_dir", join(get_project_dir(), "src"))
|
||||
|
||||
|
||||
def get_projectinclude_dir():
|
||||
return get_project_optional_dir("include_dir",
|
||||
join(get_project_dir(), "include"))
|
||||
|
||||
|
||||
def get_projecttest_dir():
|
||||
return get_project_optional_dir("test_dir",
|
||||
join(get_project_dir(), "test"))
|
||||
return get_project_optional_dir("test_dir", join(get_project_dir(),
|
||||
"test"))
|
||||
|
||||
|
||||
def get_projectboards_dir():
|
||||
@@ -270,8 +306,8 @@ def get_projectboards_dir():
|
||||
join(get_project_dir(), "boards"))
|
||||
|
||||
|
||||
def get_projectpioenvs_dir(force=False):
|
||||
path = get_project_optional_dir("envs_dir",
|
||||
def get_projectbuild_dir(force=False):
|
||||
path = get_project_optional_dir("build_dir",
|
||||
join(get_project_dir(), ".pioenvs"))
|
||||
try:
|
||||
if not isdir(path):
|
||||
@@ -281,7 +317,7 @@ def get_projectpioenvs_dir(force=False):
|
||||
with open(dontmod_path, "w") as fp:
|
||||
fp.write("""
|
||||
[InternetShortcut]
|
||||
URL=http://docs.platformio.org/page/projectconf.html#envs-dir
|
||||
URL=http://docs.platformio.org/page/projectconf/section_platformio.html#build-dir
|
||||
""")
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
if not force:
|
||||
@@ -289,25 +325,40 @@ URL=http://docs.platformio.org/page/projectconf.html#envs-dir
|
||||
return path
|
||||
|
||||
|
||||
# compatibility with PIO Core+
|
||||
get_projectpioenvs_dir = get_projectbuild_dir
|
||||
|
||||
|
||||
def get_projectdata_dir():
|
||||
return get_project_optional_dir("data_dir",
|
||||
join(get_project_dir(), "data"))
|
||||
return get_project_optional_dir("data_dir", join(get_project_dir(),
|
||||
"data"))
|
||||
|
||||
|
||||
def load_project_config(path=None):
|
||||
if not path or isdir(path):
|
||||
project_dir = path or get_project_dir()
|
||||
if not is_platformio_project(project_dir):
|
||||
raise exception.NotPlatformIOProject(project_dir)
|
||||
path = join(project_dir, "platformio.ini")
|
||||
assert isfile(path)
|
||||
path = join(path or get_project_dir(), "platformio.ini")
|
||||
if not isfile(path):
|
||||
raise exception.NotPlatformIOProject(
|
||||
dirname(path) if path.endswith("platformio.ini") else path)
|
||||
cp = ProjectConfig()
|
||||
cp.read(path)
|
||||
try:
|
||||
cp.read(path)
|
||||
except ConfigParser.Error as e:
|
||||
raise exception.InvalidProjectConf(str(e))
|
||||
return cp
|
||||
|
||||
|
||||
def change_filemtime(path, time):
|
||||
os.utime(path, (time, time))
|
||||
def parse_conf_multi_values(items):
|
||||
if not items:
|
||||
return []
|
||||
return [
|
||||
item.strip() for item in items.split("\n" if "\n" in items else ", ")
|
||||
if item.strip()
|
||||
]
|
||||
|
||||
|
||||
def change_filemtime(path, mtime):
|
||||
os.utime(path, (mtime, mtime))
|
||||
|
||||
|
||||
def is_ci():
|
||||
@@ -368,7 +419,7 @@ def copy_pythonpath_to_osenv():
|
||||
os.environ['PYTHONPATH'] = os.pathsep.join(_PYTHONPATH)
|
||||
|
||||
|
||||
def get_serialports(filter_hwid=False):
|
||||
def get_serial_ports(filter_hwid=False):
|
||||
try:
|
||||
from serial.tools.list_ports import comports
|
||||
except ImportError:
|
||||
@@ -378,7 +429,7 @@ def get_serialports(filter_hwid=False):
|
||||
for p, d, h in comports():
|
||||
if not p:
|
||||
continue
|
||||
if platform.system() == "Windows":
|
||||
if "windows" in get_systype():
|
||||
try:
|
||||
d = unicode(d, errors="ignore")
|
||||
except TypeError:
|
||||
@@ -390,35 +441,127 @@ def get_serialports(filter_hwid=False):
|
||||
return result
|
||||
|
||||
# fix for PySerial
|
||||
if not result and platform.system() == "Darwin":
|
||||
if not result and "darwin" in get_systype():
|
||||
for p in glob("/dev/tty.*"):
|
||||
result.append({"port": p, "description": "n/a", "hwid": "n/a"})
|
||||
return result
|
||||
|
||||
|
||||
def get_logicaldisks():
|
||||
disks = []
|
||||
if platform.system() == "Windows":
|
||||
result = exec_command(
|
||||
["wmic", "logicaldisk", "get", "name,VolumeName"]).get("out", "")
|
||||
disknamere = re.compile(r"^([A-Z]{1}\:)\s*(\S+)?")
|
||||
for line in result.split("\n"):
|
||||
match = disknamere.match(line.strip())
|
||||
if not match:
|
||||
continue
|
||||
disks.append({"disk": match.group(1), "name": match.group(2)})
|
||||
# Backward compatibility for PIO Core <3.5
|
||||
get_serialports = get_serial_ports
|
||||
|
||||
|
||||
def get_logical_devices():
|
||||
items = []
|
||||
if "windows" in get_systype():
|
||||
try:
|
||||
result = exec_command(
|
||||
["wmic", "logicaldisk", "get", "name,VolumeName"]).get(
|
||||
"out", "")
|
||||
devicenamere = re.compile(r"^([A-Z]{1}\:)\s*(\S+)?")
|
||||
for line in result.split("\n"):
|
||||
match = devicenamere.match(line.strip())
|
||||
if not match:
|
||||
continue
|
||||
items.append({
|
||||
"path": match.group(1) + "\\",
|
||||
"name": match.group(2)
|
||||
})
|
||||
return items
|
||||
except WindowsError: # pylint: disable=undefined-variable
|
||||
pass
|
||||
# try "fsutil"
|
||||
result = exec_command(["fsutil", "fsinfo", "drives"]).get("out", "")
|
||||
for device in re.findall(r"[A-Z]:\\", result):
|
||||
items.append({"path": device, "name": None})
|
||||
return items
|
||||
else:
|
||||
result = exec_command(["df"]).get("out")
|
||||
disknamere = re.compile(r"\d+\%\s+([a-z\d\-_/]+)$", flags=re.I)
|
||||
devicenamere = re.compile(r"^/.+\d+\%\s+([a-z\d\-_/]+)$", flags=re.I)
|
||||
for line in result.split("\n"):
|
||||
match = disknamere.search(line.strip())
|
||||
match = devicenamere.match(line.strip())
|
||||
if not match:
|
||||
continue
|
||||
disks.append({
|
||||
"disk": match.group(1),
|
||||
items.append({
|
||||
"path": match.group(1),
|
||||
"name": basename(match.group(1))
|
||||
})
|
||||
return disks
|
||||
return items
|
||||
|
||||
|
||||
def get_mdns_services():
|
||||
try:
|
||||
import zeroconf
|
||||
except ImportError:
|
||||
from site import addsitedir
|
||||
from platformio.managers.core import get_core_package_dir
|
||||
contrib_pysite_dir = get_core_package_dir("contrib-pysite")
|
||||
addsitedir(contrib_pysite_dir)
|
||||
sys.path.insert(0, contrib_pysite_dir)
|
||||
import zeroconf
|
||||
|
||||
class mDNSListener(object):
|
||||
|
||||
def __init__(self):
|
||||
self._zc = zeroconf.Zeroconf(
|
||||
interfaces=zeroconf.InterfaceChoice.All)
|
||||
self._found_types = []
|
||||
self._found_services = []
|
||||
|
||||
def __enter__(self):
|
||||
zeroconf.ServiceBrowser(self._zc, "_services._dns-sd._udp.local.",
|
||||
self)
|
||||
return self
|
||||
|
||||
def __exit__(self, etype, value, traceback):
|
||||
self._zc.close()
|
||||
|
||||
def remove_service(self, zc, type_, name):
|
||||
pass
|
||||
|
||||
def add_service(self, zc, type_, name):
|
||||
try:
|
||||
assert zeroconf.service_type_name(name)
|
||||
assert str(name)
|
||||
except (AssertionError, UnicodeError,
|
||||
zeroconf.BadTypeInNameException):
|
||||
return
|
||||
if name not in self._found_types:
|
||||
self._found_types.append(name)
|
||||
zeroconf.ServiceBrowser(self._zc, name, self)
|
||||
if type_ in self._found_types:
|
||||
s = zc.get_service_info(type_, name)
|
||||
if s:
|
||||
self._found_services.append(s)
|
||||
|
||||
def get_services(self):
|
||||
return self._found_services
|
||||
|
||||
items = []
|
||||
with mDNSListener() as mdns:
|
||||
time.sleep(3)
|
||||
for service in mdns.get_services():
|
||||
properties = None
|
||||
try:
|
||||
if service.properties:
|
||||
json.dumps(service.properties)
|
||||
properties = service.properties
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
|
||||
items.append({
|
||||
"type":
|
||||
service.type,
|
||||
"name":
|
||||
service.name,
|
||||
"ip":
|
||||
".".join([str(ord(c)) for c in service.address]),
|
||||
"port":
|
||||
service.port,
|
||||
"properties":
|
||||
properties
|
||||
})
|
||||
return items
|
||||
|
||||
|
||||
def get_request_defheaders():
|
||||
@@ -426,11 +569,12 @@ def get_request_defheaders():
|
||||
return {"User-Agent": "PlatformIO/%s CI/%d %s" % data}
|
||||
|
||||
|
||||
@memoized
|
||||
@memoized(expire=10000)
|
||||
def _api_request_session():
|
||||
return requests.Session()
|
||||
|
||||
|
||||
@throttle(500)
|
||||
def _get_api_result(
|
||||
url, # pylint: disable=too-many-branches
|
||||
params=None,
|
||||
@@ -440,7 +584,7 @@ def _get_api_result(
|
||||
|
||||
result = None
|
||||
r = None
|
||||
disable_ssl_check = sys.version_info < (2, 7, 9)
|
||||
verify_ssl = sys.version_info >= (2, 7, 9)
|
||||
|
||||
headers = get_request_defheaders()
|
||||
if not url.startswith("http"):
|
||||
@@ -456,15 +600,17 @@ def _get_api_result(
|
||||
data=data,
|
||||
headers=headers,
|
||||
auth=auth,
|
||||
verify=disable_ssl_check)
|
||||
verify=verify_ssl)
|
||||
else:
|
||||
r = _api_request_session().get(url,
|
||||
params=params,
|
||||
headers=headers,
|
||||
auth=auth,
|
||||
verify=disable_ssl_check)
|
||||
r = _api_request_session().get(
|
||||
url,
|
||||
params=params,
|
||||
headers=headers,
|
||||
auth=auth,
|
||||
verify=verify_ssl)
|
||||
result = r.json()
|
||||
r.raise_for_status()
|
||||
return r.text
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if result and "message" in result:
|
||||
raise exception.APIRequestError(result['message'])
|
||||
@@ -473,12 +619,12 @@ def _get_api_result(
|
||||
else:
|
||||
raise exception.APIRequestError(e)
|
||||
except ValueError:
|
||||
raise exception.APIRequestError("Invalid response: %s" %
|
||||
r.text.encode("utf-8"))
|
||||
raise exception.APIRequestError(
|
||||
"Invalid response: %s" % r.text.encode("utf-8"))
|
||||
finally:
|
||||
if r:
|
||||
r.close()
|
||||
return result
|
||||
return None
|
||||
|
||||
|
||||
def get_api_result(url, params=None, data=None, auth=None, cache_valid=None):
|
||||
@@ -493,16 +639,18 @@ def get_api_result(url, params=None, data=None, auth=None, cache_valid=None):
|
||||
if cache_key:
|
||||
result = cc.get(cache_key)
|
||||
if result is not None:
|
||||
return result
|
||||
return json.loads(result)
|
||||
|
||||
# check internet before and resolve issue with 60 seconds timeout
|
||||
internet_on(raise_exception=True)
|
||||
|
||||
result = _get_api_result(url, params, data)
|
||||
if cache_valid:
|
||||
with ContentCache() as cc:
|
||||
cc.set(cache_key, result, cache_valid)
|
||||
return result
|
||||
return json.loads(result)
|
||||
except (requests.exceptions.ConnectionError,
|
||||
requests.exceptions.Timeout) as e:
|
||||
if not internet_on():
|
||||
raise exception.InternetIsOffline()
|
||||
from platformio.maintenance import in_silence
|
||||
total += 1
|
||||
if not in_silence():
|
||||
@@ -510,22 +658,43 @@ def get_api_result(url, params=None, data=None, auth=None, cache_valid=None):
|
||||
"[API] ConnectionError: {0} (incremented retry: max={1}, "
|
||||
"total={2})".format(e, max_retries, total),
|
||||
fg="yellow")
|
||||
sleep(2 * total)
|
||||
time.sleep(2 * total)
|
||||
|
||||
raise exception.APIRequestError(
|
||||
"Could not connect to PlatformIO API Service. "
|
||||
"Please try later.")
|
||||
|
||||
|
||||
def internet_on(timeout=3):
|
||||
host = "8.8.8.8"
|
||||
port = 53
|
||||
try:
|
||||
socket.setdefaulttimeout(timeout)
|
||||
socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect((host, port))
|
||||
return True
|
||||
except: # pylint: disable=bare-except
|
||||
return False
|
||||
PING_INTERNET_IPS = [
|
||||
"192.30.253.113", # github.com
|
||||
"159.122.18.156", # dl.bintray.com
|
||||
"193.222.52.25" # dl.platformio.org
|
||||
]
|
||||
|
||||
|
||||
@memoized(expire=5000)
|
||||
def _internet_on():
|
||||
timeout = 2
|
||||
socket.setdefaulttimeout(timeout)
|
||||
for ip in PING_INTERNET_IPS:
|
||||
try:
|
||||
if os.getenv("HTTP_PROXY", os.getenv("HTTPS_PROXY")):
|
||||
requests.get(
|
||||
"http://%s" % ip, allow_redirects=False, timeout=timeout)
|
||||
else:
|
||||
socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect((ip,
|
||||
80))
|
||||
return True
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
def internet_on(raise_exception=False):
|
||||
result = _internet_on()
|
||||
if raise_exception and not result:
|
||||
raise exception.InternetIsOffline()
|
||||
return result
|
||||
|
||||
|
||||
def get_pythonexe_path():
|
||||
@@ -558,13 +727,92 @@ def where_is_program(program, envpath=None):
|
||||
|
||||
|
||||
def pepver_to_semver(pepver):
|
||||
return re.sub(r"(\.\d+)\.?(dev|a|b|rc|post)", r"\1-\2", pepver, 1)
|
||||
return re.sub(r"(\.\d+)\.?(dev|a|b|rc|post)", r"\1-\2.", pepver, 1)
|
||||
|
||||
|
||||
def items_to_list(items):
|
||||
if not isinstance(items, list):
|
||||
items = [i.strip() for i in items.split(",")]
|
||||
return [i.lower() for i in items if i]
|
||||
|
||||
|
||||
def items_in_list(needle, haystack):
|
||||
needle = items_to_list(needle)
|
||||
haystack = items_to_list(haystack)
|
||||
if "*" in needle or "*" in haystack:
|
||||
return True
|
||||
return set(needle) & set(haystack)
|
||||
|
||||
|
||||
def parse_date(datestr):
|
||||
if "T" in datestr and "Z" in datestr:
|
||||
return time.strptime(datestr, "%Y-%m-%dT%H:%M:%SZ")
|
||||
return time.strptime(datestr)
|
||||
|
||||
|
||||
def format_filesize(filesize):
|
||||
base = 1024
|
||||
unit = 0
|
||||
suffix = "B"
|
||||
filesize = float(filesize)
|
||||
if filesize < base:
|
||||
return "%d%s" % (filesize, suffix)
|
||||
for i, suffix in enumerate("KMGTPEZY"):
|
||||
unit = base**(i + 2)
|
||||
if filesize >= unit:
|
||||
continue
|
||||
if filesize % (base**(i + 1)):
|
||||
return "%.2f%sB" % ((base * filesize / unit), suffix)
|
||||
break
|
||||
return "%d%sB" % ((base * filesize / unit), suffix)
|
||||
|
||||
|
||||
def merge_dicts(d1, d2, path=None):
|
||||
if path is None:
|
||||
path = []
|
||||
for key in d2:
|
||||
if (key in d1 and isinstance(d1[key], dict)
|
||||
and isinstance(d2[key], dict)):
|
||||
merge_dicts(d1[key], d2[key], path + [str(key)])
|
||||
else:
|
||||
d1[key] = d2[key]
|
||||
return d1
|
||||
|
||||
|
||||
def rmtree_(path):
|
||||
|
||||
def _onerror(_, name, __):
|
||||
os.chmod(name, stat.S_IWRITE)
|
||||
os.remove(name)
|
||||
try:
|
||||
os.chmod(name, stat.S_IWRITE)
|
||||
os.remove(name)
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
click.secho(
|
||||
"Please manually remove file `%s`" % name, fg="red", err=True)
|
||||
raise e
|
||||
|
||||
return rmtree(path, onerror=_onerror)
|
||||
|
||||
|
||||
#
|
||||
# Glob.Escape from Python 3.4
|
||||
# https://github.com/python/cpython/blob/master/Lib/glob.py#L161
|
||||
#
|
||||
|
||||
try:
|
||||
from glob import escape as glob_escape # pylint: disable=unused-import
|
||||
except ImportError:
|
||||
magic_check = re.compile('([*?[])')
|
||||
magic_check_bytes = re.compile(b'([*?[])')
|
||||
|
||||
def glob_escape(pathname):
|
||||
"""Escape all special characters.
|
||||
"""
|
||||
# Escaping is done by wrapping any of "*?[" between square brackets.
|
||||
# Metacharacters do not work in the drive part and shouldn't be
|
||||
# escaped.
|
||||
drive, pathname = os.path.splitdrive(pathname)
|
||||
if isinstance(pathname, bytes):
|
||||
pathname = magic_check_bytes.sub(br'[\1]', pathname)
|
||||
else:
|
||||
pathname = magic_check.sub(r'[\1]', pathname)
|
||||
return drive + pathname
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -14,32 +14,34 @@
|
||||
|
||||
import re
|
||||
from os.path import join
|
||||
from subprocess import check_call
|
||||
from subprocess import CalledProcessError, check_call
|
||||
from sys import modules
|
||||
from urlparse import urlparse
|
||||
|
||||
from platformio import util
|
||||
from platformio.exception import PlatformioException
|
||||
from platformio.exception import PlatformioException, UserSideException
|
||||
|
||||
|
||||
class VCSClientFactory(object):
|
||||
|
||||
@staticmethod
|
||||
def newClient(src_dir, remote_url):
|
||||
def newClient(src_dir, remote_url, silent=False):
|
||||
result = urlparse(remote_url)
|
||||
type_ = result.scheme
|
||||
if not type_ and remote_url.startswith("git@"):
|
||||
tag = None
|
||||
if not type_ and remote_url.startswith("git+"):
|
||||
type_ = "git"
|
||||
remote_url = remote_url[4:]
|
||||
elif "+" in result.scheme:
|
||||
type_, _ = result.scheme.split("+", 1)
|
||||
remote_url = remote_url[len(type_) + 1:]
|
||||
if result.fragment:
|
||||
remote_url = remote_url.rsplit("#", 1)[0]
|
||||
if "#" in remote_url:
|
||||
remote_url, tag = remote_url.rsplit("#", 1)
|
||||
if not type_:
|
||||
raise PlatformioException("VCS: Unknown repository type %s" %
|
||||
remote_url)
|
||||
raise PlatformioException(
|
||||
"VCS: Unknown repository type %s" % remote_url)
|
||||
obj = getattr(modules[__name__], "%sClient" % type_.title())(
|
||||
src_dir, remote_url, result.fragment)
|
||||
src_dir, remote_url, tag, silent)
|
||||
assert isinstance(obj, VCSClientBase)
|
||||
return obj
|
||||
|
||||
@@ -48,18 +50,22 @@ class VCSClientBase(object):
|
||||
|
||||
command = None
|
||||
|
||||
def __init__(self, src_dir, remote_url=None, tag=None):
|
||||
def __init__(self, src_dir, remote_url=None, tag=None, silent=False):
|
||||
self.src_dir = src_dir
|
||||
self.remote_url = remote_url
|
||||
self.tag = tag
|
||||
self.silent = silent
|
||||
self.check_client()
|
||||
|
||||
def check_client(self):
|
||||
try:
|
||||
assert self.command
|
||||
assert self.run_cmd(["--version"])
|
||||
except (AssertionError, OSError):
|
||||
raise PlatformioException(
|
||||
if self.silent:
|
||||
self.get_cmd_output(["--version"])
|
||||
else:
|
||||
assert self.run_cmd(["--version"])
|
||||
except (AssertionError, OSError, PlatformioException):
|
||||
raise UserSideException(
|
||||
"VCS: `%s` client is not installed in your system" %
|
||||
self.command)
|
||||
return True
|
||||
@@ -81,11 +87,19 @@ class VCSClientBase(object):
|
||||
def get_current_revision(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_latest_revision(self):
|
||||
return None if self.can_be_updated else self.get_current_revision()
|
||||
|
||||
def run_cmd(self, args, **kwargs):
|
||||
args = [self.command] + args
|
||||
if "cwd" not in kwargs:
|
||||
kwargs['cwd'] = self.src_dir
|
||||
return check_call(args, **kwargs) == 0
|
||||
try:
|
||||
check_call(args, **kwargs)
|
||||
return True
|
||||
except CalledProcessError as e:
|
||||
raise PlatformioException(
|
||||
"VCS: Could not process command %s" % e.cmd)
|
||||
|
||||
def get_cmd_output(self, args, **kwargs):
|
||||
args = [self.command] + args
|
||||
@@ -95,19 +109,36 @@ class VCSClientBase(object):
|
||||
if result['returncode'] == 0:
|
||||
return result['out'].strip()
|
||||
raise PlatformioException(
|
||||
"VCS: Could not receive an output from `%s` command (%s)" % (
|
||||
args, result))
|
||||
"VCS: Could not receive an output from `%s` command (%s)" %
|
||||
(args, result))
|
||||
|
||||
|
||||
class GitClient(VCSClientBase):
|
||||
|
||||
command = "git"
|
||||
|
||||
def check_client(self):
|
||||
try:
|
||||
return VCSClientBase.check_client(self)
|
||||
except UserSideException:
|
||||
raise UserSideException(
|
||||
"Please install Git client from https://git-scm.com/downloads")
|
||||
|
||||
def get_branches(self):
|
||||
output = self.get_cmd_output(["branch"])
|
||||
output = output.replace("*", "") # fix active branch
|
||||
return [b.strip() for b in output.split("\n")]
|
||||
|
||||
def get_current_branch(self):
|
||||
output = self.get_cmd_output(["branch"])
|
||||
for line in output.split("\n"):
|
||||
line = line.strip()
|
||||
if line.startswith("*"):
|
||||
branch = line[1:].strip()
|
||||
if branch != "(no branch)":
|
||||
return branch
|
||||
return None
|
||||
|
||||
def get_tags(self):
|
||||
output = self.get_cmd_output(["tag", "-l"])
|
||||
return [t.strip() for t in output.split("\n")]
|
||||
@@ -134,12 +165,25 @@ class GitClient(VCSClientBase):
|
||||
return True
|
||||
|
||||
def update(self):
|
||||
args = ["pull"]
|
||||
args = ["pull", "--recurse-submodules"]
|
||||
return self.run_cmd(args)
|
||||
|
||||
def get_current_revision(self):
|
||||
return self.get_cmd_output(["rev-parse", "--short", "HEAD"])
|
||||
|
||||
def get_latest_revision(self):
|
||||
if not self.can_be_updated:
|
||||
return self.get_current_revision()
|
||||
branch = self.get_current_branch()
|
||||
if not branch:
|
||||
return self.get_current_revision()
|
||||
result = self.get_cmd_output(["ls-remote"])
|
||||
for line in result.split("\n"):
|
||||
ref_pos = line.strip().find("refs/heads/" + branch)
|
||||
if ref_pos > 0:
|
||||
return line[:ref_pos].strip()[:7]
|
||||
return None
|
||||
|
||||
|
||||
class HgClient(VCSClientBase):
|
||||
|
||||
@@ -159,6 +203,11 @@ class HgClient(VCSClientBase):
|
||||
def get_current_revision(self):
|
||||
return self.get_cmd_output(["identify", "--id"])
|
||||
|
||||
def get_latest_revision(self):
|
||||
if not self.can_be_updated:
|
||||
return self.get_latest_revision()
|
||||
return self.get_cmd_output(["identify", "--id", self.remote_url])
|
||||
|
||||
|
||||
class SvnClient(VCSClientBase):
|
||||
|
||||
@@ -177,9 +226,8 @@ class SvnClient(VCSClientBase):
|
||||
return self.run_cmd(args)
|
||||
|
||||
def get_current_revision(self):
|
||||
output = self.get_cmd_output([
|
||||
"info", "--non-interactive", "--trust-server-cert", "-r", "HEAD"
|
||||
])
|
||||
output = self.get_cmd_output(
|
||||
["info", "--non-interactive", "--trust-server-cert", "-r", "HEAD"])
|
||||
for line in output.split("\n"):
|
||||
line = line.strip()
|
||||
if line.startswith("Revision:"):
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -12,26 +12,17 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# UDEV Rules for PlatformIO supported boards, http://platformio.org/boards
|
||||
#####################################################################################
|
||||
#
|
||||
# The latest version of this file may be found at:
|
||||
# https://github.com/platformio/platformio-core/blob/develop/scripts/99-platformio-udev.rules
|
||||
# INSTALLATION
|
||||
#
|
||||
# This file must be placed at:
|
||||
# /etc/udev/rules.d/99-platformio-udev.rules (preferred location)
|
||||
# or
|
||||
# /lib/udev/rules.d/99-platformio-udev.rules (req'd on some broken systems)
|
||||
# Please visit > http://docs.platformio.org/en/latest/faq.html#platformio-udev-rules
|
||||
#
|
||||
# To install, type this command in a terminal:
|
||||
# sudo cp 99-platformio-udev.rules /etc/udev/rules.d/99-platformio-udev.rules
|
||||
#####################################################################################
|
||||
|
||||
#
|
||||
# Restart "udev" management tool:
|
||||
# sudo service udev restart
|
||||
# or
|
||||
# sudo udevadm control --reload-rules
|
||||
# sudo udevadm trigger
|
||||
# Boards
|
||||
#
|
||||
# After this file is installed, physically unplug and reconnect your board.
|
||||
|
||||
# CP210X USB UART
|
||||
SUBSYSTEMS=="usb", ATTRS{idVendor}=="10c4", ATTRS{idProduct}=="ea60", MODE:="0666"
|
||||
@@ -62,9 +53,15 @@ KERNEL=="ttyACM*", ATTRS{idVendor}=="16d0", ATTRS{idProduct}=="0753", MODE:="066
|
||||
# STM32 discovery boards, with onboard st/linkv2
|
||||
SUBSYSTEMS=="usb", ATTRS{idVendor}=="0483", ATTRS{idProduct}=="374?", MODE:="0666"
|
||||
|
||||
# Maple with DFU
|
||||
SUBSYSTEMS=="usb", ATTRS{idVendor}=="1eaf", ATTRS{idProduct}=="000[34]", MODE:="0666"
|
||||
|
||||
# USBtiny
|
||||
SUBSYSTEMS=="usb", ATTRS{idProduct}=="0c9f", ATTRS{idVendor}=="1781", MODE="0666"
|
||||
|
||||
# USBasp V2.0
|
||||
SUBSYSTEMS=="usb", ATTRS{idVendor}=="16c0", ATTRS{idProduct}=="05dc", MODE:="0666"
|
||||
|
||||
# Teensy boards
|
||||
ATTRS{idVendor}=="16c0", ATTRS{idProduct}=="04[789]?", ENV{ID_MM_DEVICE_IGNORE}="1"
|
||||
ATTRS{idVendor}=="16c0", ATTRS{idProduct}=="04[789]?", ENV{MTP_NO_PROBE}="1"
|
||||
@@ -77,5 +74,186 @@ SUBSYSTEMS=="usb", ATTRS{idVendor}=="1cbe", ATTRS{idProduct}=="00fd", MODE="0666
|
||||
#TI MSP430 Launchpad
|
||||
SUBSYSTEMS=="usb", ATTRS{idVendor}=="0451", ATTRS{idProduct}=="f432", MODE="0666"
|
||||
|
||||
|
||||
#
|
||||
# Debuggers
|
||||
#
|
||||
|
||||
# Black Magic Probe
|
||||
SUBSYSTEM=="tty", ATTRS{interface}=="Black Magic GDB Server"
|
||||
SUBSYSTEM=="tty", ATTRS{interface}=="Black Magic UART Port"
|
||||
|
||||
# opendous and estick
|
||||
ATTRS{idVendor}=="03eb", ATTRS{idProduct}=="204f", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
# Original FT232/FT245 VID:PID
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="6001", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
# Original FT2232 VID:PID
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="6010", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
# Original FT4232 VID:PID
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="6011", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
# Original FT232H VID:PID
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="6014", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
# DISTORTEC JTAG-lock-pick Tiny 2
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="8220", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
# TUMPA, TUMPA Lite
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="8a98", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="8a99", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
# XDS100v2
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="a6d0", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
# Xverve Signalyzer Tool (DT-USB-ST), Signalyzer LITE (DT-USB-SLITE)
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="bca0", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="bca1", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
# TI/Luminary Stellaris Evaluation Board FTDI (several)
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="bcd9", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
# TI/Luminary Stellaris In-Circuit Debug Interface FTDI (ICDI) Board
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="bcda", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
# egnite Turtelizer 2
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="bdc8", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
# Section5 ICEbear
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="c140", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="c141", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
# Amontec JTAGkey and JTAGkey-tiny
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="cff8", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
# TI ICDI
|
||||
ATTRS{idVendor}=="0451", ATTRS{idProduct}=="c32a", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
# STLink v1
|
||||
ATTRS{idVendor}=="0483", ATTRS{idProduct}=="3744", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
# STLink v2
|
||||
ATTRS{idVendor}=="0483", ATTRS{idProduct}=="3748", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
# STLink v2-1
|
||||
ATTRS{idVendor}=="0483", ATTRS{idProduct}=="374b", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
# Hilscher NXHX Boards
|
||||
ATTRS{idVendor}=="0640", ATTRS{idProduct}=="0028", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
# Hitex STR9-comStick
|
||||
ATTRS{idVendor}=="0640", ATTRS{idProduct}=="002c", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
# Hitex STM32-PerformanceStick
|
||||
ATTRS{idVendor}=="0640", ATTRS{idProduct}=="002d", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
# Altera USB Blaster
|
||||
ATTRS{idVendor}=="09fb", ATTRS{idProduct}=="6001", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
# Amontec JTAGkey-HiSpeed
|
||||
ATTRS{idVendor}=="0fbb", ATTRS{idProduct}=="1000", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
# SEGGER J-Link
|
||||
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="0101", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="0102", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="0103", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="0104", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="0105", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="0107", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="0108", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="1010", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="1011", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="1012", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="1013", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="1014", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="1015", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="1016", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="1017", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
ATTRS{idVendor}=="1366", ATTRS{idProduct}=="1018", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
# Raisonance RLink
|
||||
ATTRS{idVendor}=="138e", ATTRS{idProduct}=="9000", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
# Debug Board for Neo1973
|
||||
ATTRS{idVendor}=="1457", ATTRS{idProduct}=="5118", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
# Olimex ARM-USB-OCD
|
||||
ATTRS{idVendor}=="15ba", ATTRS{idProduct}=="0003", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
# Olimex ARM-USB-OCD-TINY
|
||||
ATTRS{idVendor}=="15ba", ATTRS{idProduct}=="0004", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
# Olimex ARM-JTAG-EW
|
||||
ATTRS{idVendor}=="15ba", ATTRS{idProduct}=="001e", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
# Olimex ARM-USB-OCD-TINY-H
|
||||
ATTRS{idVendor}=="15ba", ATTRS{idProduct}=="002a", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
# Olimex ARM-USB-OCD-H
|
||||
ATTRS{idVendor}=="15ba", ATTRS{idProduct}=="002b", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
# USBprog with OpenOCD firmware
|
||||
ATTRS{idVendor}=="1781", ATTRS{idProduct}=="0c63", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
# TI/Luminary Stellaris In-Circuit Debug Interface (ICDI) Board
|
||||
ATTRS{idVendor}=="1cbe", ATTRS{idProduct}=="00fd", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
# Marvell Sheevaplug
|
||||
ATTRS{idVendor}=="9e88", ATTRS{idProduct}=="9e8f", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
# Keil Software, Inc. ULink
|
||||
ATTRS{idVendor}=="c251", ATTRS{idProduct}=="2710", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
# CMSIS-DAP compatible adapters
|
||||
ATTRS{product}=="*CMSIS-DAP*", MODE="664", GROUP="plugdev"
|
||||
ATTRS{product}=="*CMSIS-DAP*", MODE="660", GROUP="plugdev", TAG+="uaccess"
|
||||
|
||||
#SEGGER J-LIK
|
||||
ATTR{idProduct}=="1001", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1002", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1003", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1004", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1005", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1006", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1007", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1008", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1009", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="100a", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="100b", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="100c", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="100d", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="100e", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="100f", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1010", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1011", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1012", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1013", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1014", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1015", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1016", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1017", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1018", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1019", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="101a", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="101b", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="101c", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="101d", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="101e", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="101f", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1020", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1021", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1022", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1023", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1024", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1025", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1026", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1027", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1028", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="1029", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="102a", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="102b", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="102c", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="102d", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="102e", ATTR{idVendor}=="1366", MODE="666"
|
||||
ATTR{idProduct}=="102f", ATTR{idVendor}=="1366", MODE="666"
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -12,8 +12,9 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from math import ceil
|
||||
from os.path import dirname, isfile, join, realpath
|
||||
import os
|
||||
import urlparse
|
||||
from os.path import dirname, isdir, isfile, join, realpath
|
||||
from sys import exit as sys_exit
|
||||
from sys import path
|
||||
|
||||
@@ -36,49 +37,152 @@ def is_compat_platform_and_framework(platform, framework):
|
||||
return False
|
||||
|
||||
|
||||
def generate_boards(boards):
|
||||
def campaign_url(url, source="platformio", medium="docs"):
|
||||
data = urlparse.urlparse(url)
|
||||
query = data.query
|
||||
if query:
|
||||
query += "&"
|
||||
query += "utm_source=%s&utm_medium=%s" % (source, medium)
|
||||
return urlparse.urlunparse(
|
||||
urlparse.ParseResult(data.scheme, data.netloc, data.path, data.params,
|
||||
query, data.fragment))
|
||||
|
||||
def _round_memory_size(size):
|
||||
if size == 1:
|
||||
return 1
|
||||
|
||||
size = ceil(size)
|
||||
for b in (64, 32, 16, 8, 4, 2, 1):
|
||||
if b < size:
|
||||
return int(ceil(size / b) * b)
|
||||
assert NotImplemented()
|
||||
|
||||
def generate_boards(boards, extend_debug=False, skip_columns=None):
|
||||
columns = [
|
||||
("ID", "``{id}``"),
|
||||
("Name", "`{name} <{url}>`_"),
|
||||
("Platform", ":ref:`{platform_title} <platform_{platform}>`"),
|
||||
("Debug", "{debug}"),
|
||||
("MCU", "{mcu}"),
|
||||
("Frequency", "{f_cpu:d}MHz"),
|
||||
("Flash", "{rom}"),
|
||||
("RAM", "{ram}"),
|
||||
]
|
||||
platforms = {m['name']: m['title'] for m in PLATFORM_MANIFESTS}
|
||||
lines = []
|
||||
|
||||
lines.append("""
|
||||
.. list-table::
|
||||
:header-rows: 1
|
||||
""")
|
||||
|
||||
* - ID
|
||||
- Name
|
||||
- Microcontroller
|
||||
- Frequency
|
||||
- Flash
|
||||
- RAM""")
|
||||
# add header
|
||||
for (name, template) in columns:
|
||||
if skip_columns and name in skip_columns:
|
||||
continue
|
||||
prefix = " * - " if name == "ID" else " - "
|
||||
lines.append(prefix + name)
|
||||
|
||||
for data in sorted(boards, key=lambda item: item['id']):
|
||||
board_ram = float(data['ram']) / 1024
|
||||
lines.append("""
|
||||
* - ``{id}``
|
||||
- `{name} <{url}>`_
|
||||
- {mcu}
|
||||
- {f_cpu:d} MHz
|
||||
- {rom} Kb
|
||||
- {ram} Kb""".format(
|
||||
debug = [":ref:`Yes <piodebug>`" if data['debug'] else "No"]
|
||||
if extend_debug and data['debug']:
|
||||
debug_onboard = []
|
||||
debug_external = []
|
||||
for name, options in data['debug']['tools'].items():
|
||||
attrs = []
|
||||
if options.get("default"):
|
||||
attrs.append("default")
|
||||
if options.get("onboard"):
|
||||
attrs.append("on-board")
|
||||
tool = ":ref:`debugging_tool_%s`" % name
|
||||
if attrs:
|
||||
tool = "%s (%s)" % (tool, ", ".join(attrs))
|
||||
if options.get("onboard"):
|
||||
debug_onboard.append(tool)
|
||||
else:
|
||||
debug_external.append(tool)
|
||||
debug = sorted(debug_onboard) + sorted(debug_external)
|
||||
|
||||
variables = dict(
|
||||
id=data['id'],
|
||||
name=data['name'],
|
||||
url=data['url'],
|
||||
platform=data['platform'],
|
||||
platform_title=platforms[data['platform']],
|
||||
debug=", ".join(debug),
|
||||
url=campaign_url(data['url']),
|
||||
mcu=data['mcu'].upper(),
|
||||
f_cpu=int(data['fcpu']) / 1000000,
|
||||
ram=int(board_ram) if board_ram % 1 == 0 else board_ram,
|
||||
rom=_round_memory_size(data['rom'] / 1024)))
|
||||
ram=util.format_filesize(data['ram']),
|
||||
rom=util.format_filesize(data['rom']))
|
||||
|
||||
return "\n".join(lines + [""])
|
||||
for (name, template) in columns:
|
||||
if skip_columns and name in skip_columns:
|
||||
continue
|
||||
prefix = " * - " if name == "ID" else " - "
|
||||
lines.append(prefix + template.format(**variables))
|
||||
|
||||
if lines:
|
||||
lines.append("")
|
||||
|
||||
return lines
|
||||
|
||||
|
||||
def generate_debug_contents(boards, skip_board_columns=None, extra_rst=None):
|
||||
lines = []
|
||||
onboard_debug = [
|
||||
b for b in boards if b['debug'] and any(
|
||||
t.get("onboard") for (_, t) in b['debug']['tools'].items())
|
||||
]
|
||||
external_debug = [
|
||||
b for b in boards if b['debug'] and b not in onboard_debug
|
||||
]
|
||||
if not onboard_debug and not external_debug:
|
||||
return lines
|
||||
|
||||
lines.append("""
|
||||
Debugging
|
||||
---------
|
||||
|
||||
:ref:`piodebug` - "1-click" solution for debugging with a zero configuration.
|
||||
|
||||
.. contents::
|
||||
:local:
|
||||
""")
|
||||
if extra_rst:
|
||||
lines.append(".. include:: %s" % extra_rst)
|
||||
|
||||
lines.append("""
|
||||
Debug Tools
|
||||
~~~~~~~~~~~
|
||||
|
||||
Supported debugging tools are listed in "Debug" column. For more detailed
|
||||
information, please scroll table by horizontal.
|
||||
You can switch between debugging :ref:`debugging_tools` using
|
||||
:ref:`projectconf_debug_tool` options.
|
||||
|
||||
.. warning::
|
||||
You will need to install debug tool drivers depending on your system.
|
||||
Please click on compatible debug tool below for the further instructions.
|
||||
""")
|
||||
|
||||
if onboard_debug:
|
||||
lines.append("""
|
||||
On-Board Debug Tools
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Boards listed below have on-board debug tool and **ARE READY** for debugging!
|
||||
You do not need to use/buy external debug tool.
|
||||
""")
|
||||
lines.extend(
|
||||
generate_boards(
|
||||
onboard_debug,
|
||||
extend_debug=True,
|
||||
skip_columns=skip_board_columns))
|
||||
if external_debug:
|
||||
lines.append("""
|
||||
External Debug Tools
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Boards listed below are compatible with :ref:`piodebug` but **DEPEND ON**
|
||||
external debug tool. See "Debug" column for compatible debug tools.
|
||||
""")
|
||||
lines.extend(
|
||||
generate_boards(
|
||||
external_debug,
|
||||
extend_debug=True,
|
||||
skip_columns=skip_board_columns))
|
||||
return lines
|
||||
|
||||
|
||||
def generate_packages(platform, packagenames, is_embedded):
|
||||
@@ -93,56 +197,59 @@ Packages
|
||||
:header-rows: 1
|
||||
|
||||
* - Name
|
||||
- Contents""")
|
||||
- Description""")
|
||||
for name in sorted(packagenames):
|
||||
assert name in API_PACKAGES, name
|
||||
contitems = [
|
||||
"`{name} <{url}>`_".format(**item) for item in API_PACKAGES[name]
|
||||
]
|
||||
lines.append("""
|
||||
* - ``{name}``
|
||||
- {contents}""".format(
|
||||
name=name, contents=", ".join(contitems)))
|
||||
* - `{name} <{url}>`__
|
||||
- {description}""".format(
|
||||
name=name,
|
||||
url=campaign_url(API_PACKAGES[name]['url']),
|
||||
description=API_PACKAGES[name]['description']))
|
||||
|
||||
if is_embedded:
|
||||
lines.append("""
|
||||
.. warning::
|
||||
**Linux Users**:
|
||||
|
||||
* Ubuntu/Debian users may need to add own "username" to the "dialout"
|
||||
group if they are not "root", doing this issuing a
|
||||
``sudo usermod -a -G dialout yourusername``.
|
||||
* Install "udev" rules file `99-platformio-udev.rules <https://github.com/platformio/platformio-core/blob/develop/scripts/99-platformio-udev.rules>`_
|
||||
(an instruction is located in the file).
|
||||
* Raspberry Pi users, please read this article
|
||||
`Enable serial port on Raspberry Pi <https://hallard.me/enable-serial-port-on-raspberry-pi/>`__.
|
||||
* Install "udev" rules :ref:`faq_udev_rules`
|
||||
* Raspberry Pi users, please read this article
|
||||
`Enable serial port on Raspberry Pi <https://hallard.me/enable-serial-port-on-raspberry-pi/>`__.
|
||||
""")
|
||||
|
||||
if platform == "teensy":
|
||||
lines.append("""
|
||||
**Windows Users:** Teensy programming uses only Windows built-in HID
|
||||
drivers. When Teensy is programmed to act as a USB Serial device,
|
||||
Windows XP, Vista, 7 and 8 require `this serial driver
|
||||
<http://www.pjrc.com/teensy/serial_install.exe>`_
|
||||
is needed to access the COM port your program uses. No special driver
|
||||
installation is necessary on Windows 10.
|
||||
**Windows Users:**
|
||||
|
||||
Teensy programming uses only Windows built-in HID
|
||||
drivers. When Teensy is programmed to act as a USB Serial device,
|
||||
Windows XP, Vista, 7 and 8 require `this serial driver
|
||||
<http://www.pjrc.com/teensy/serial_install.exe>`_
|
||||
is needed to access the COM port your program uses. No special driver
|
||||
installation is necessary on Windows 10.
|
||||
""")
|
||||
else:
|
||||
lines.append("""
|
||||
**Windows Users:** Please check that you have correctly installed USB
|
||||
driver from board manufacturer
|
||||
**Windows Users:**
|
||||
|
||||
Please check that you have a correctly installed USB driver from board
|
||||
manufacturer
|
||||
""")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def generate_platform(name):
|
||||
def generate_platform(name, rst_dir):
|
||||
print "Processing platform: %s" % name
|
||||
|
||||
compatible_boards = [
|
||||
board for board in BOARDS if name in board['platform']
|
||||
]
|
||||
|
||||
lines = []
|
||||
|
||||
lines.append(
|
||||
""".. Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
""".. Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
@@ -153,21 +260,96 @@ def generate_platform(name):
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
""")
|
||||
p = PlatformFactory.newPlatform(name)
|
||||
assert p.repository_url.endswith(".git")
|
||||
github_url = p.repository_url[:-4]
|
||||
|
||||
lines.append(".. _platform_%s:" % name)
|
||||
lines.append(".. _platform_%s:" % p.name)
|
||||
lines.append("")
|
||||
|
||||
_title = "Platform ``%s``" % name
|
||||
lines.append(_title)
|
||||
lines.append("=" * len(_title))
|
||||
|
||||
p = PlatformFactory.newPlatform(name)
|
||||
lines.append(p.title)
|
||||
lines.append("=" * len(p.title))
|
||||
lines.append(":ref:`projectconf_env_platform` = ``%s``" % p.name)
|
||||
lines.append("")
|
||||
lines.append(p.description)
|
||||
lines.append("""
|
||||
For more detailed information please visit `vendor site <%s>`_.""" %
|
||||
p.vendor_url)
|
||||
campaign_url(p.vendor_url))
|
||||
lines.append("""
|
||||
.. contents::""")
|
||||
.. contents:: Contents
|
||||
:local:
|
||||
:depth: 1
|
||||
""")
|
||||
|
||||
#
|
||||
# Extra
|
||||
#
|
||||
if isfile(join(rst_dir, "%s_extra.rst" % name)):
|
||||
lines.append(".. include:: %s_extra.rst" % p.name)
|
||||
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
lines.append("""
|
||||
Examples
|
||||
--------
|
||||
|
||||
Examples are listed from `%s development platform repository <%s>`_:
|
||||
""" % (p.title, campaign_url("%s/tree/master/examples" % github_url)))
|
||||
examples_dir = join(p.get_dir(), "examples")
|
||||
if isdir(examples_dir):
|
||||
for eitem in os.listdir(examples_dir):
|
||||
if not isdir(join(examples_dir, eitem)):
|
||||
continue
|
||||
url = "%s/tree/master/examples/%s" % (github_url, eitem)
|
||||
lines.append("* `%s <%s>`_" % (eitem, campaign_url(url)))
|
||||
|
||||
#
|
||||
# Debugging
|
||||
#
|
||||
if compatible_boards:
|
||||
lines.extend(
|
||||
generate_debug_contents(
|
||||
compatible_boards,
|
||||
skip_board_columns=["Platform"],
|
||||
extra_rst="%s_debug.rst" % name
|
||||
if isfile(join(rst_dir, "%s_debug.rst" % name)) else None))
|
||||
|
||||
#
|
||||
# Development version of dev/platform
|
||||
#
|
||||
lines.append("""
|
||||
Stable and upstream versions
|
||||
----------------------------
|
||||
|
||||
You can switch between `stable releases <{github_url}/releases>`__
|
||||
of {title} development platform and the latest upstream version using
|
||||
:ref:`projectconf_env_platform` option in :ref:`projectconf` as described below.
|
||||
|
||||
Stable
|
||||
~~~~~~
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
; Latest stable version
|
||||
[env:latest_stable]
|
||||
platform = {name}
|
||||
board = ...
|
||||
|
||||
; Custom stable version
|
||||
[env:custom_stable]
|
||||
platform = {name}@x.y.z
|
||||
board = ...
|
||||
|
||||
Upstream
|
||||
~~~~~~~~
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[env:upstream_develop]
|
||||
platform = {github_url}.git
|
||||
board = ...
|
||||
""".format(name=p.name, title=p.title, github_url=github_url))
|
||||
|
||||
#
|
||||
# Packages
|
||||
@@ -202,31 +384,28 @@ Frameworks
|
||||
#
|
||||
# Boards
|
||||
#
|
||||
vendors = {}
|
||||
for board in BOARDS:
|
||||
vendor = board['vendor']
|
||||
if name in board['platform']:
|
||||
if vendor in vendors:
|
||||
vendors[vendor].append(board)
|
||||
else:
|
||||
vendors[vendor] = [board]
|
||||
if compatible_boards:
|
||||
vendors = {}
|
||||
for board in compatible_boards:
|
||||
if board['vendor'] not in vendors:
|
||||
vendors[board['vendor']] = []
|
||||
vendors[board['vendor']].append(board)
|
||||
|
||||
if vendors:
|
||||
lines.append("""
|
||||
Boards
|
||||
------
|
||||
|
||||
.. note::
|
||||
* You can list pre-configured boards by :ref:`cmd_boards` command or
|
||||
`PlatformIO Boards Explorer <http://platformio.org/boards>`_
|
||||
`PlatformIO Boards Explorer <https://platformio.org/boards>`_
|
||||
* For more detailed ``board`` information please scroll tables below by
|
||||
horizontal.
|
||||
""")
|
||||
|
||||
for vendor, boards in sorted(vendors.iteritems()):
|
||||
lines.append(str(vendor))
|
||||
lines.append("~" * len(vendor))
|
||||
lines.append(generate_boards(boards))
|
||||
for vendor, boards in sorted(vendors.items()):
|
||||
lines.append(str(vendor))
|
||||
lines.append("~" * len(vendor))
|
||||
lines.extend(generate_boards(boards, skip_columns=["Platform"]))
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
@@ -238,17 +417,24 @@ def update_platform_docs():
|
||||
dirname(realpath(__file__)), "..", "docs", "platforms")
|
||||
rst_path = join(platforms_dir, "%s.rst" % name)
|
||||
with open(rst_path, "w") as f:
|
||||
f.write(generate_platform(name))
|
||||
if isfile(join(platforms_dir, "%s_extra.rst" % name)):
|
||||
f.write("\n.. include:: %s_extra.rst\n" % name)
|
||||
f.write(generate_platform(name, platforms_dir))
|
||||
|
||||
|
||||
def generate_framework(type_, data):
|
||||
def generate_framework(type_, data, rst_dir=None):
|
||||
print "Processing framework: %s" % type_
|
||||
|
||||
compatible_platforms = [
|
||||
m for m in PLATFORM_MANIFESTS
|
||||
if is_compat_platform_and_framework(m['name'], type_)
|
||||
]
|
||||
compatible_boards = [
|
||||
board for board in BOARDS if type_ in board['frameworks']
|
||||
]
|
||||
|
||||
lines = []
|
||||
|
||||
lines.append(
|
||||
""".. Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
""".. Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
@@ -263,17 +449,50 @@ def generate_framework(type_, data):
|
||||
lines.append(".. _framework_%s:" % type_)
|
||||
lines.append("")
|
||||
|
||||
_title = "Framework ``%s``" % type_
|
||||
lines.append(_title)
|
||||
lines.append("=" * len(_title))
|
||||
lines.append(data['title'])
|
||||
lines.append("=" * len(data['title']))
|
||||
lines.append(":ref:`projectconf_env_framework` = ``%s``" % type_)
|
||||
lines.append("")
|
||||
lines.append(data['description'])
|
||||
lines.append("""
|
||||
For more detailed information please visit `vendor site <%s>`_.
|
||||
""" % data['url'])
|
||||
|
||||
lines.append(".. contents::")
|
||||
""" % campaign_url(data['url']))
|
||||
|
||||
lines.append("""
|
||||
.. contents:: Contents
|
||||
:local:
|
||||
:depth: 1""")
|
||||
|
||||
# Extra
|
||||
if isfile(join(rst_dir, "%s_extra.rst" % type_)):
|
||||
lines.append(".. include:: %s_extra.rst" % type_)
|
||||
|
||||
#
|
||||
# Debugging
|
||||
#
|
||||
if compatible_boards:
|
||||
lines.extend(
|
||||
generate_debug_contents(
|
||||
compatible_boards,
|
||||
extra_rst="%s_debug.rst" % type_
|
||||
if isfile(join(rst_dir, "%s_debug.rst" % type_)) else None))
|
||||
|
||||
if compatible_platforms:
|
||||
# examples
|
||||
lines.append("""
|
||||
Examples
|
||||
--------
|
||||
""")
|
||||
for manifest in compatible_platforms:
|
||||
p = PlatformFactory.newPlatform(manifest['name'])
|
||||
lines.append(
|
||||
"* `%s for %s <%s>`_" %
|
||||
(data['title'], manifest['title'],
|
||||
campaign_url(
|
||||
"%s/tree/master/examples" % p.repository_url[:-4])))
|
||||
|
||||
# Platforms
|
||||
lines.append("""
|
||||
Platforms
|
||||
---------
|
||||
.. list-table::
|
||||
@@ -282,42 +501,35 @@ Platforms
|
||||
* - Name
|
||||
- Description""")
|
||||
|
||||
_found_platform = False
|
||||
for manifest in PLATFORM_MANIFESTS:
|
||||
if not is_compat_platform_and_framework(manifest['name'], type_):
|
||||
continue
|
||||
_found_platform = True
|
||||
p = PlatformFactory.newPlatform(manifest['name'])
|
||||
lines.append("""
|
||||
for manifest in compatible_platforms:
|
||||
p = PlatformFactory.newPlatform(manifest['name'])
|
||||
lines.append("""
|
||||
* - :ref:`platform_{type_}`
|
||||
- {description}""".format(
|
||||
type_=manifest['name'], description=p.description))
|
||||
if not _found_platform:
|
||||
del lines[-1]
|
||||
type_=manifest['name'], description=p.description))
|
||||
|
||||
lines.append("""
|
||||
#
|
||||
# Boards
|
||||
#
|
||||
if compatible_boards:
|
||||
vendors = {}
|
||||
for board in compatible_boards:
|
||||
if board['vendor'] not in vendors:
|
||||
vendors[board['vendor']] = []
|
||||
vendors[board['vendor']].append(board)
|
||||
lines.append("""
|
||||
Boards
|
||||
------
|
||||
|
||||
.. note::
|
||||
* You can list pre-configured boards by :ref:`cmd_boards` command or
|
||||
`PlatformIO Boards Explorer <http://platformio.org/boards>`_
|
||||
`PlatformIO Boards Explorer <https://platformio.org/boards>`_
|
||||
* For more detailed ``board`` information please scroll tables below by horizontal.
|
||||
""")
|
||||
|
||||
vendors = {}
|
||||
for data in BOARDS:
|
||||
frameworks = data['frameworks']
|
||||
vendor = data['vendor']
|
||||
if type_ in frameworks:
|
||||
if vendor in vendors:
|
||||
vendors[vendor].append(data)
|
||||
else:
|
||||
vendors[vendor] = [data]
|
||||
for vendor, boards in sorted(vendors.iteritems()):
|
||||
lines.append(str(vendor))
|
||||
lines.append("~" * len(vendor))
|
||||
lines.append(generate_boards(boards))
|
||||
for vendor, boards in sorted(vendors.items()):
|
||||
lines.append(str(vendor))
|
||||
lines.append("~" * len(vendor))
|
||||
lines.extend(generate_boards(boards))
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
@@ -328,48 +540,14 @@ def update_framework_docs():
|
||||
dirname(realpath(__file__)), "..", "docs", "frameworks")
|
||||
rst_path = join(frameworks_dir, "%s.rst" % name)
|
||||
with open(rst_path, "w") as f:
|
||||
f.write(generate_framework(name, framework))
|
||||
if isfile(join(frameworks_dir, "%s_extra.rst" % name)):
|
||||
f.write("\n.. include:: %s_extra.rst\n" % name)
|
||||
|
||||
|
||||
def update_create_platform_doc():
|
||||
lines = []
|
||||
lines.append(""".. _platform_creating_packages:
|
||||
|
||||
Packages
|
||||
--------
|
||||
|
||||
*PlatformIO* has pre-built packages for the most popular operation systems:
|
||||
*Mac OS*, *Linux (+ARM)* and *Windows*.
|
||||
|
||||
.. list-table::
|
||||
:header-rows: 1
|
||||
|
||||
* - Name
|
||||
- Contents""")
|
||||
for name, items in sorted(API_PACKAGES.iteritems()):
|
||||
contitems = ["`{name} <{url}>`_".format(**item) for item in items]
|
||||
lines.append("""
|
||||
* - ``{name}``
|
||||
- {contents}""".format(
|
||||
name=name, contents=", ".join(contitems)))
|
||||
|
||||
with open(
|
||||
join(util.get_source_dir(), "..", "docs", "platforms",
|
||||
"creating_platform.rst"), "r+") as fp:
|
||||
content = fp.read()
|
||||
fp.seek(0, 0)
|
||||
fp.write(content[:content.index(".. _platform_creating_packages:")] +
|
||||
"\n".join(lines) + "\n\n" + content[content.index(
|
||||
".. _platform_creating_manifest_file:"):])
|
||||
f.write(generate_framework(name, framework, frameworks_dir))
|
||||
|
||||
|
||||
def update_embedded_boards():
|
||||
lines = []
|
||||
|
||||
lines.append(
|
||||
""".. Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
""".. Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
@@ -392,14 +570,16 @@ Rapid Embedded Development, Continuous and IDE integration in a few
|
||||
steps with PlatformIO thanks to built-in project generator for the most
|
||||
popular embedded boards and IDE.
|
||||
|
||||
* You can list pre-configured boards using :ref:`cmd_boards` command or
|
||||
`PlatformIO Boards Explorer <http://platformio.org/boards>`_
|
||||
* For more detailed ``board`` information please scroll tables below by
|
||||
horizontal.
|
||||
.. note::
|
||||
* You can list pre-configured boards by :ref:`cmd_boards` command or
|
||||
`PlatformIO Boards Explorer <https://platformio.org/boards>`_
|
||||
* For more detailed ``board`` information please scroll tables below by horizontal.
|
||||
""")
|
||||
|
||||
lines.append(".. contents::")
|
||||
lines.append("")
|
||||
lines.append("""
|
||||
.. contents:: Vendors
|
||||
:local:
|
||||
""")
|
||||
|
||||
vendors = {}
|
||||
for data in BOARDS:
|
||||
@@ -412,7 +592,7 @@ popular embedded boards and IDE.
|
||||
for vendor, boards in sorted(vendors.iteritems()):
|
||||
lines.append(str(vendor))
|
||||
lines.append("~" * len(vendor))
|
||||
lines.append(generate_boards(boards))
|
||||
lines.extend(generate_boards(boards))
|
||||
|
||||
emboards_rst = join(
|
||||
dirname(realpath(__file__)), "..", "docs", "platforms",
|
||||
@@ -421,11 +601,234 @@ popular embedded boards and IDE.
|
||||
f.write("\n".join(lines))
|
||||
|
||||
|
||||
def update_debugging():
|
||||
tools_to_platforms = {}
|
||||
vendors = {}
|
||||
platforms = []
|
||||
frameworks = []
|
||||
for data in BOARDS:
|
||||
if not data['debug']:
|
||||
continue
|
||||
|
||||
for tool in data['debug']['tools']:
|
||||
tool = str(tool)
|
||||
if tool not in tools_to_platforms:
|
||||
tools_to_platforms[tool] = []
|
||||
tools_to_platforms[tool].append(data['platform'])
|
||||
|
||||
platforms.append(data['platform'])
|
||||
frameworks.extend(data['frameworks'])
|
||||
vendor = data['vendor']
|
||||
if vendor in vendors:
|
||||
vendors[vendor].append(data)
|
||||
else:
|
||||
vendors[vendor] = [data]
|
||||
|
||||
def _update_tool_compat_platforms(content):
|
||||
begin_tpl = ".. begin_compatible_platforms_"
|
||||
end_tpl = ".. end_compatible_platforms_"
|
||||
for tool, platforms in tools_to_platforms.items():
|
||||
begin = begin_tpl + tool
|
||||
end = end_tpl + tool
|
||||
begin_index = content.index(begin)
|
||||
end_index = content.index(end)
|
||||
chunk = ["\n\n:Compatible Platforms:\n"]
|
||||
chunk.extend([
|
||||
" * :ref:`platform_%s`" % str(p)
|
||||
for p in sorted(set(platforms))
|
||||
])
|
||||
chunk.extend(["\n"])
|
||||
content = content[:begin_index + len(begin)] + "\n".join(
|
||||
chunk) + content[end_index:]
|
||||
return content
|
||||
|
||||
lines = []
|
||||
# Platforms
|
||||
lines.append(""".. _debugging_platforms:
|
||||
|
||||
Platforms
|
||||
---------
|
||||
.. list-table::
|
||||
:header-rows: 1
|
||||
|
||||
* - Name
|
||||
- Description""")
|
||||
|
||||
for manifest in PLATFORM_MANIFESTS:
|
||||
if manifest['name'] not in platforms:
|
||||
continue
|
||||
p = PlatformFactory.newPlatform(manifest['name'])
|
||||
lines.append("""
|
||||
* - :ref:`platform_{type_}`
|
||||
- {description}""".format(
|
||||
type_=manifest['name'], description=p.description))
|
||||
|
||||
# Frameworks
|
||||
lines.append("""
|
||||
Frameworks
|
||||
----------
|
||||
.. list-table::
|
||||
:header-rows: 1
|
||||
|
||||
* - Name
|
||||
- Description""")
|
||||
for framework in API_FRAMEWORKS:
|
||||
if framework['name'] not in frameworks:
|
||||
continue
|
||||
lines.append("""
|
||||
* - :ref:`framework_{name}`
|
||||
- {description}""".format(**framework))
|
||||
|
||||
# Boards
|
||||
lines.append("""
|
||||
Boards
|
||||
------
|
||||
|
||||
.. note::
|
||||
For more detailed ``board`` information please scroll tables below by horizontal.
|
||||
""")
|
||||
for vendor, boards in sorted(vendors.iteritems()):
|
||||
lines.append(str(vendor))
|
||||
lines.append("~" * len(vendor))
|
||||
lines.extend(generate_boards(boards, extend_debug=True))
|
||||
|
||||
with open(
|
||||
join(util.get_source_dir(), "..", "docs", "plus", "debugging.rst"),
|
||||
"r+") as fp:
|
||||
content = _update_tool_compat_platforms(fp.read())
|
||||
fp.seek(0)
|
||||
fp.truncate()
|
||||
fp.write(content[:content.index(".. _debugging_platforms:")] +
|
||||
"\n".join(lines))
|
||||
|
||||
|
||||
def update_project_examples():
|
||||
platform_readme_tpl = """
|
||||
# {title}: development platform for [PlatformIO](https://platformio.org)
|
||||
|
||||
{description}
|
||||
|
||||
* [Home](https://platformio.org/platforms/{name}) (home page in PlatformIO Registry)
|
||||
* [Documentation](http://docs.platformio.org/page/platforms/{name}.html) (advanced usage, packages, boards, frameworks, etc.)
|
||||
|
||||
# Examples
|
||||
|
||||
{examples}
|
||||
"""
|
||||
framework_readme_tpl = """
|
||||
# {title}: framework for [PlatformIO](https://platformio.org)
|
||||
|
||||
{description}
|
||||
|
||||
* [Home](https://platformio.org/frameworks/{name}) (home page in PlatformIO Registry)
|
||||
* [Documentation](http://docs.platformio.org/page/frameworks/{name}.html)
|
||||
|
||||
# Examples
|
||||
|
||||
{examples}
|
||||
"""
|
||||
|
||||
project_examples_dir = join(util.get_source_dir(), "..", "examples")
|
||||
framework_examples_md_lines = {}
|
||||
embedded = []
|
||||
desktop = []
|
||||
|
||||
for manifest in PLATFORM_MANIFESTS:
|
||||
p = PlatformFactory.newPlatform(manifest['name'])
|
||||
github_url = p.repository_url[:-4]
|
||||
|
||||
# Platform README
|
||||
platform_examples_dir = join(p.get_dir(), "examples")
|
||||
examples_md_lines = []
|
||||
if isdir(platform_examples_dir):
|
||||
for item in os.listdir(platform_examples_dir):
|
||||
if not isdir(join(platform_examples_dir, item)):
|
||||
continue
|
||||
url = "%s/tree/master/examples/%s" % (github_url, item)
|
||||
examples_md_lines.append("* [%s](%s)" % (item, url))
|
||||
|
||||
readme_dir = join(project_examples_dir, "platforms", p.name)
|
||||
if not isdir(readme_dir):
|
||||
os.makedirs(readme_dir)
|
||||
with open(join(readme_dir, "README.md"), "w") as fp:
|
||||
fp.write(
|
||||
platform_readme_tpl.format(
|
||||
name=p.name,
|
||||
title=p.title,
|
||||
description=p.description,
|
||||
examples="\n".join(examples_md_lines)))
|
||||
|
||||
# Framework README
|
||||
for framework in API_FRAMEWORKS:
|
||||
if not is_compat_platform_and_framework(p.name, framework['name']):
|
||||
continue
|
||||
if framework['name'] not in framework_examples_md_lines:
|
||||
framework_examples_md_lines[framework['name']] = []
|
||||
lines = []
|
||||
lines.append("- [%s](%s)" % (p.title, github_url))
|
||||
lines.extend(" %s" % l for l in examples_md_lines)
|
||||
lines.append("")
|
||||
framework_examples_md_lines[framework['name']].extend(lines)
|
||||
|
||||
# Root README
|
||||
line = "* [%s](%s)" % (p.title, "%s/tree/master/examples" % github_url)
|
||||
if p.is_embedded():
|
||||
embedded.append(line)
|
||||
else:
|
||||
desktop.append(line)
|
||||
|
||||
# Frameworks
|
||||
frameworks = []
|
||||
for framework in API_FRAMEWORKS:
|
||||
readme_dir = join(project_examples_dir, "frameworks",
|
||||
framework['name'])
|
||||
if not isdir(readme_dir):
|
||||
os.makedirs(readme_dir)
|
||||
with open(join(readme_dir, "README.md"), "w") as fp:
|
||||
fp.write(
|
||||
framework_readme_tpl.format(
|
||||
name=framework['name'],
|
||||
title=framework['title'],
|
||||
description=framework['description'],
|
||||
examples="\n".join(
|
||||
framework_examples_md_lines[framework['name']])))
|
||||
url = campaign_url(
|
||||
"http://docs.platformio.org/en/latest/frameworks/%s.html#examples"
|
||||
% framework['name'],
|
||||
source="github",
|
||||
medium="examples")
|
||||
frameworks.append("* [%s](%s)" % (framework['title'], url))
|
||||
|
||||
with open(join(project_examples_dir, "README.md"), "w") as fp:
|
||||
fp.write("""# PlatformIO Project Examples
|
||||
|
||||
- [Development platforms](#development-platforms):
|
||||
- [Embedded](#embedded)
|
||||
- [Desktop](#desktop)
|
||||
- [Frameworks](#frameworks)
|
||||
|
||||
## Development platforms
|
||||
|
||||
### Embedded
|
||||
|
||||
%s
|
||||
|
||||
### Desktop
|
||||
|
||||
%s
|
||||
|
||||
## Frameworks
|
||||
|
||||
%s
|
||||
""" % ("\n".join(embedded), "\n".join(desktop), "\n".join(frameworks)))
|
||||
|
||||
|
||||
def main():
|
||||
update_create_platform_doc()
|
||||
update_platform_docs()
|
||||
update_framework_docs()
|
||||
update_embedded_boards()
|
||||
update_debugging()
|
||||
update_project_examples()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -14,6 +14,7 @@
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import site
|
||||
import sys
|
||||
from platform import system
|
||||
from tempfile import NamedTemporaryFile
|
||||
@@ -26,39 +27,34 @@ def fix_winpython_pathenv():
|
||||
"""
|
||||
Add Python & Python Scripts to the search path on Windows
|
||||
"""
|
||||
import ctypes
|
||||
from ctypes.wintypes import HWND, UINT, WPARAM, LPARAM, LPVOID
|
||||
try:
|
||||
import _winreg as winreg
|
||||
except ImportError:
|
||||
import winreg
|
||||
|
||||
# took these lines from the native "win_add2path.py"
|
||||
pythonpath = os.path.dirname(CURINTERPRETER_PATH)
|
||||
pythonpath = os.path.dirname(os.path.normpath(sys.executable))
|
||||
scripts = os.path.join(pythonpath, "Scripts")
|
||||
if not os.path.isdir(scripts):
|
||||
os.makedirs(scripts)
|
||||
appdata = os.environ["APPDATA"]
|
||||
if hasattr(site, "USER_SITE"):
|
||||
userpath = site.USER_SITE.replace(appdata, "%APPDATA%")
|
||||
userscripts = os.path.join(userpath, "Scripts")
|
||||
else:
|
||||
userscripts = None
|
||||
|
||||
with winreg.CreateKey(winreg.HKEY_CURRENT_USER, u"Environment") as key:
|
||||
with winreg.CreateKey(winreg.HKEY_CURRENT_USER, "Environment") as key:
|
||||
try:
|
||||
envpath = winreg.QueryValueEx(key, u"PATH")[0]
|
||||
envpath = winreg.QueryValueEx(key, "PATH")[0]
|
||||
except WindowsError:
|
||||
envpath = u"%PATH%"
|
||||
|
||||
paths = [envpath]
|
||||
for path in (pythonpath, scripts):
|
||||
for path in (pythonpath, scripts, userscripts):
|
||||
if path and path not in envpath and os.path.isdir(path):
|
||||
paths.append(path)
|
||||
|
||||
envpath = os.pathsep.join(paths)
|
||||
winreg.SetValueEx(key, u"PATH", 0, winreg.REG_EXPAND_SZ, envpath)
|
||||
winreg.ExpandEnvironmentStrings(envpath)
|
||||
|
||||
# notify the system about the changes
|
||||
SendMessage = ctypes.windll.user32.SendMessageW
|
||||
SendMessage.argtypes = HWND, UINT, WPARAM, LPVOID
|
||||
SendMessage.restype = LPARAM
|
||||
SendMessage(0xFFFF, 0x1A, 0, u"Environment")
|
||||
winreg.SetValueEx(key, "PATH", 0, winreg.REG_EXPAND_SZ, envpath)
|
||||
return True
|
||||
|
||||
|
||||
@@ -92,6 +88,10 @@ def exec_python_cmd(args):
|
||||
|
||||
|
||||
def install_pip():
|
||||
r = exec_python_cmd(["-m", "pip", "--version"])
|
||||
if r['returncode'] == 0:
|
||||
print r['out']
|
||||
return
|
||||
try:
|
||||
from urllib2 import urlopen
|
||||
except ImportError:
|
||||
@@ -112,27 +112,25 @@ def install_pip():
|
||||
|
||||
def install_platformio():
|
||||
r = None
|
||||
cmd = ["-m", "pip.__main__" if sys.version_info < (2, 7, 0) else "pip"]
|
||||
cmd = ["-m", "pip", "install", "-U", "platformio"]
|
||||
# cmd = [
|
||||
# "-m", "pip", "install", "-U",
|
||||
# "https://github.com/platformio/platformio-core/archive/develop.zip"
|
||||
# ]
|
||||
try:
|
||||
# r = exec_python_cmd(cmd + ["install", "-U", "platformio"])
|
||||
r = exec_python_cmd(cmd + [
|
||||
"install", "-U",
|
||||
"https://github.com/platformio/platformio-core/archive/develop.zip"
|
||||
])
|
||||
r = exec_python_cmd(cmd)
|
||||
assert r['returncode'] == 0
|
||||
except AssertionError:
|
||||
r = exec_python_cmd(cmd + ["--no-cache-dir", "install", "-U",
|
||||
"platformio"])
|
||||
cmd.insert(2, "--no-cache-dir")
|
||||
r = exec_python_cmd(cmd)
|
||||
if r:
|
||||
print_exec_result(r)
|
||||
|
||||
|
||||
def main():
|
||||
steps = [
|
||||
("Fixing Windows %PATH% Environment", fix_winpython_pathenv),
|
||||
("Installing Python Package Manager", install_pip),
|
||||
("Installing PlatformIO and dependencies", install_platformio)
|
||||
]
|
||||
steps = [("Fixing Windows %PATH% Environment", fix_winpython_pathenv),
|
||||
("Installing Python Package Manager", install_pip),
|
||||
("Installing PlatformIO and dependencies", install_platformio)]
|
||||
|
||||
if not IS_WINDOWS:
|
||||
del steps[0]
|
||||
@@ -161,7 +159,7 @@ Permission denied
|
||||
You need the `sudo` permission to install Python packages. Try
|
||||
|
||||
$ sudo python -c "$(curl -fsSL
|
||||
https://raw.githubusercontent.com/platformio/platformio/master/scripts/get-platformio.py)"
|
||||
https://raw.githubusercontent.com/platformio/platformio/develop/scripts/get-platformio.py)"
|
||||
""")
|
||||
|
||||
if is_error:
|
||||
|
||||
32
scripts/install_devplatforms.py
Normal file
32
scripts/install_devplatforms.py
Normal file
@@ -0,0 +1,32 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def main():
|
||||
platforms = json.loads(
|
||||
subprocess.check_output(
|
||||
["platformio", "platform", "search", "--json-output"]))
|
||||
for platform in platforms:
|
||||
if platform['forDesktop']:
|
||||
continue
|
||||
subprocess.check_call(
|
||||
["platformio", "platform", "install", platform['repository']])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
22
setup.py
22
setup.py
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -20,11 +20,10 @@ from platformio import (__author__, __description__, __email__, __license__,
|
||||
install_requires = [
|
||||
"bottle<0.13",
|
||||
"click>=5,<6",
|
||||
"lockfile>=0.9.1,<0.13",
|
||||
"requests>=2.4.0,<3",
|
||||
"semantic_version>=2.5.0",
|
||||
"colorama",
|
||||
"pyserial>=3,<4"
|
||||
"pyserial>=3,<4,!=3.3",
|
||||
"requests>=2.4.0,<3",
|
||||
"semantic_version>=2.5.0,<3"
|
||||
]
|
||||
|
||||
setup(
|
||||
@@ -36,6 +35,7 @@ setup(
|
||||
author_email=__email__,
|
||||
url=__url__,
|
||||
license=__license__,
|
||||
python_requires='>=2.7, <3',
|
||||
install_requires=install_requires,
|
||||
packages=find_packages(),
|
||||
package_data={
|
||||
@@ -50,6 +50,7 @@ setup(
|
||||
entry_points={
|
||||
"console_scripts": [
|
||||
"pio = platformio.__main__:main",
|
||||
"piodebuggdb = platformio.__main__:debug_gdb_main",
|
||||
"platformio = platformio.__main__:main"
|
||||
]
|
||||
},
|
||||
@@ -66,9 +67,8 @@ setup(
|
||||
"Topic :: Software Development :: Compilers"
|
||||
],
|
||||
keywords=[
|
||||
"iot", "ide", "build", "compile", "library manager",
|
||||
"embedded", "ci", "continuous integration", "arduino", "mbed",
|
||||
"esp8266", "framework", "ide", "ide integration", "library.json",
|
||||
"make", "cmake", "makefile", "mk", "pic32", "fpga"
|
||||
]
|
||||
)
|
||||
"iot", "embedded", "arduino", "mbed", "esp8266", "esp32", "fpga",
|
||||
"firmware", "continuous-integration", "cloud-ide", "avr", "arm",
|
||||
"ide", "unit-testing", "hardware", "verilog", "microcontroller",
|
||||
"debug"
|
||||
])
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -15,6 +15,7 @@
|
||||
from os.path import join
|
||||
|
||||
from platformio.commands.ci import cli as cmd_ci
|
||||
from platformio.commands.lib import cli as cmd_lib
|
||||
|
||||
|
||||
def test_ci_empty(clirunner):
|
||||
@@ -25,29 +26,31 @@ def test_ci_empty(clirunner):
|
||||
|
||||
def test_ci_boards(clirunner, validate_cliresult):
|
||||
result = clirunner.invoke(cmd_ci, [
|
||||
join("examples", "atmelavr-and-arduino", "arduino-internal-libs",
|
||||
"src", "ChatServer.ino"), "-b", "uno", "-b", "leonardo"
|
||||
join("examples", "wiring-blink", "src", "main.cpp"), "-b", "uno", "-b",
|
||||
"leonardo"
|
||||
])
|
||||
validate_cliresult(result)
|
||||
|
||||
|
||||
def test_ci_project_conf(clirunner, validate_cliresult):
|
||||
project_dir = join("examples", "atmelavr-and-arduino",
|
||||
"arduino-internal-libs")
|
||||
project_dir = join("examples", "wiring-blink")
|
||||
result = clirunner.invoke(cmd_ci, [
|
||||
join(project_dir, "src", "ChatServer.ino"), "--project-conf",
|
||||
join(project_dir, "src", "main.cpp"), "--project-conf",
|
||||
join(project_dir, "platformio.ini")
|
||||
])
|
||||
validate_cliresult(result)
|
||||
assert all([s in result.output for s in ("ethernet", "leonardo", "yun")])
|
||||
assert "uno" in result.output
|
||||
|
||||
|
||||
def test_ci_lib_and_board(clirunner, validate_cliresult):
|
||||
example_dir = join("examples", "atmelavr-and-arduino",
|
||||
"arduino-external-libs")
|
||||
def test_ci_lib_and_board(clirunner, tmpdir_factory, validate_cliresult):
|
||||
storage_dir = str(tmpdir_factory.mktemp("lib"))
|
||||
result = clirunner.invoke(
|
||||
cmd_lib, ["--storage-dir", storage_dir, "install", "1@2.3.2"])
|
||||
validate_cliresult(result)
|
||||
|
||||
result = clirunner.invoke(cmd_ci, [
|
||||
join(example_dir, "lib", "OneWire", "examples", "DS2408_Switch",
|
||||
"DS2408_Switch.pde"), "-l", join(example_dir, "lib", "OneWire"),
|
||||
"-b", "uno"
|
||||
join(storage_dir, "OneWire_ID1", "examples", "DS2408_Switch",
|
||||
"DS2408_Switch.pde"), "-l",
|
||||
join(storage_dir, "OneWire_ID1"), "-b", "uno"
|
||||
])
|
||||
validate_cliresult(result)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -54,7 +54,7 @@ def test_init_duplicated_boards(clirunner, validate_cliresult, tmpdir):
|
||||
assert set(config.sections()) == set(["env:uno"])
|
||||
|
||||
|
||||
def test_init_ide_without_board(clirunner, validate_cliresult, tmpdir):
|
||||
def test_init_ide_without_board(clirunner, tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
result = clirunner.invoke(cmd_init, ["--ide", "atom"])
|
||||
assert result.exit_code == -1
|
||||
@@ -67,13 +67,15 @@ def test_init_ide_atom(clirunner, validate_cliresult, tmpdir):
|
||||
cmd_init, ["--ide", "atom", "-b", "uno", "-b", "teensy31"])
|
||||
validate_cliresult(result)
|
||||
validate_pioproject(str(tmpdir))
|
||||
assert all([tmpdir.join(f).check()
|
||||
for f in (".clang_complete", ".gcc-flags.json")])
|
||||
assert all([
|
||||
tmpdir.join(f).check()
|
||||
for f in (".clang_complete", ".gcc-flags.json")
|
||||
])
|
||||
assert "arduinoavr" in tmpdir.join(".clang_complete").read()
|
||||
|
||||
# switch to NodeMCU
|
||||
result = clirunner.invoke(
|
||||
cmd_init, ["--ide", "atom", "-b", "nodemcuv2", "-b", "uno"])
|
||||
result = clirunner.invoke(cmd_init,
|
||||
["--ide", "atom", "-b", "nodemcuv2"])
|
||||
validate_cliresult(result)
|
||||
validate_pioproject(str(tmpdir))
|
||||
assert "arduinoespressif" in tmpdir.join(".clang_complete").read()
|
||||
@@ -104,15 +106,13 @@ def test_init_special_board(clirunner, validate_cliresult):
|
||||
boards = json.loads(result.output)
|
||||
|
||||
config = util.load_project_config()
|
||||
expected_result = [
|
||||
("platform", str(boards[0]['platform'])),
|
||||
("framework", str(boards[0]['frameworks'][0])), ("board", "uno")
|
||||
]
|
||||
expected_result = [("platform", str(boards[0]['platform'])),
|
||||
("framework",
|
||||
str(boards[0]['frameworks'][0])), ("board", "uno")]
|
||||
|
||||
assert config.has_section("env:uno")
|
||||
assert len(
|
||||
set(expected_result).symmetric_difference(
|
||||
set(config.items("env:uno")))) == 0
|
||||
assert not set(expected_result).symmetric_difference(
|
||||
set(config.items("env:uno")))
|
||||
|
||||
|
||||
def test_init_enable_auto_uploading(clirunner, validate_cliresult):
|
||||
@@ -122,14 +122,11 @@ def test_init_enable_auto_uploading(clirunner, validate_cliresult):
|
||||
validate_cliresult(result)
|
||||
validate_pioproject(getcwd())
|
||||
config = util.load_project_config()
|
||||
expected_result = [
|
||||
("platform", "atmelavr"), ("framework", "arduino"),
|
||||
("board", "uno"), ("targets", "upload")
|
||||
]
|
||||
expected_result = [("platform", "atmelavr"), ("framework", "arduino"),
|
||||
("board", "uno"), ("targets", "upload")]
|
||||
assert config.has_section("env:uno")
|
||||
assert len(
|
||||
set(expected_result).symmetric_difference(
|
||||
set(config.items("env:uno")))) == 0
|
||||
assert not set(expected_result).symmetric_difference(
|
||||
set(config.items("env:uno")))
|
||||
|
||||
|
||||
def test_init_custom_framework(clirunner, validate_cliresult):
|
||||
@@ -139,14 +136,11 @@ def test_init_custom_framework(clirunner, validate_cliresult):
|
||||
validate_cliresult(result)
|
||||
validate_pioproject(getcwd())
|
||||
config = util.load_project_config()
|
||||
expected_result = [
|
||||
("platform", "teensy"), ("framework", "mbed"),
|
||||
("board", "teensy31")
|
||||
]
|
||||
expected_result = [("platform", "teensy"), ("framework", "mbed"),
|
||||
("board", "teensy31")]
|
||||
assert config.has_section("env:teensy31")
|
||||
assert len(
|
||||
set(expected_result).symmetric_difference(
|
||||
set(config.items("env:teensy31")))) == 0
|
||||
assert not set(expected_result).symmetric_difference(
|
||||
set(config.items("env:teensy31")))
|
||||
|
||||
|
||||
def test_init_incorrect_board(clirunner):
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -14,10 +14,8 @@
|
||||
|
||||
import json
|
||||
import re
|
||||
from os.path import basename
|
||||
|
||||
from platformio import util
|
||||
from platformio.commands.init import cli as cmd_init
|
||||
from platformio import exception
|
||||
from platformio.commands.lib import cli as cmd_lib
|
||||
|
||||
|
||||
@@ -37,15 +35,21 @@ def test_search(clirunner, validate_cliresult):
|
||||
def test_global_install_registry(clirunner, validate_cliresult,
|
||||
isolated_pio_home):
|
||||
result = clirunner.invoke(cmd_lib, [
|
||||
"-g", "install", "58", "OneWire",
|
||||
"http://dl.platformio.org/libraries/archives/3/5174.tar.gz",
|
||||
"ArduinoJson@5.6.7", "ArduinoJson@>5.6"
|
||||
"-g", "install", "64", "ArduinoJson@~5.10.0", "547@2.2.4",
|
||||
"AsyncMqttClient@<=0.8.2", "999@77d4eb3f8a"
|
||||
])
|
||||
validate_cliresult(result)
|
||||
|
||||
# install unknown library
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "install", "Unknown"])
|
||||
assert result.exit_code != 0
|
||||
assert isinstance(result.exception, exception.LibNotFound)
|
||||
|
||||
items1 = [d.basename for d in isolated_pio_home.join("lib").listdir()]
|
||||
items2 = [
|
||||
"DHT22_ID58", "ArduinoJson_ID64", "ArduinoJson_ID64@5.6.7",
|
||||
"OneWire_ID1", "ESPAsyncTCP_ID305"
|
||||
"ArduinoJson_ID64", "ArduinoJson_ID64@5.10.1", "NeoPixelBus_ID547",
|
||||
"AsyncMqttClient_ID346", "ESPAsyncTCP_ID305", "AsyncTCP_ID1826",
|
||||
"RFcontrol_ID999"
|
||||
]
|
||||
assert set(items1) == set(items2)
|
||||
|
||||
@@ -53,13 +57,27 @@ def test_global_install_registry(clirunner, validate_cliresult,
|
||||
def test_global_install_archive(clirunner, validate_cliresult,
|
||||
isolated_pio_home):
|
||||
result = clirunner.invoke(cmd_lib, [
|
||||
"-g", "install", "https://github.com/adafruit/Adafruit-ST7735-Library/"
|
||||
"archive/master.zip",
|
||||
"http://www.airspayce.com/mikem/arduino/RadioHead/RadioHead-1.62.zip"
|
||||
"-g", "install",
|
||||
"http://www.airspayce.com/mikem/arduino/RadioHead/RadioHead-1.62.zip",
|
||||
"https://github.com/bblanchon/ArduinoJson/archive/v5.8.2.zip",
|
||||
"https://github.com/bblanchon/ArduinoJson/archive/v5.8.2.zip@5.8.2",
|
||||
"http://dl.platformio.org/libraries/archives/0/9540.tar.gz",
|
||||
"https://github.com/Pedroalbuquerque/ESP32WebServer/archive/master.zip"
|
||||
])
|
||||
validate_cliresult(result)
|
||||
|
||||
# incorrect requirements
|
||||
result = clirunner.invoke(cmd_lib, [
|
||||
"-g", "install",
|
||||
"https://github.com/bblanchon/ArduinoJson/archive/v5.8.2.zip@1.2.3"
|
||||
])
|
||||
assert result.exit_code != 0
|
||||
|
||||
items1 = [d.basename for d in isolated_pio_home.join("lib").listdir()]
|
||||
items2 = ["Adafruit ST7735 Library", "RadioHead"]
|
||||
items2 = [
|
||||
"RadioHead-1.62", "ArduinoJson", "DallasTemperature_ID54",
|
||||
"OneWire_ID1", "ESP32WebServer"
|
||||
]
|
||||
assert set(items1) >= set(items2)
|
||||
|
||||
|
||||
@@ -71,99 +89,191 @@ def test_global_install_repository(clirunner, validate_cliresult,
|
||||
"-g",
|
||||
"install",
|
||||
"https://github.com/gioblu/PJON.git#3.0",
|
||||
"https://github.com/gioblu/PJON.git#6.2",
|
||||
"https://github.com/bblanchon/ArduinoJson.git",
|
||||
"https://gitlab.com/ivankravets/rs485-nodeproto.git",
|
||||
"https://github.com/platformio/platformio-libmirror.git",
|
||||
# "https://developer.mbed.org/users/simon/code/TextLCD/",
|
||||
"knolleary/pubsubclient"
|
||||
"knolleary/pubsubclient#bef58148582f956dfa772687db80c44e2279a163"
|
||||
])
|
||||
validate_cliresult(result)
|
||||
items1 = [d.basename for d in isolated_pio_home.join("lib").listdir()]
|
||||
items2 = ["PJON", "ESPAsyncTCP", "PubSubClient"]
|
||||
assert set(items2) & set(items1)
|
||||
items2 = [
|
||||
"PJON", "PJON@src-79de467ebe19de18287becff0a1fb42d",
|
||||
"ArduinoJson@src-69ebddd821f771debe7ee734d3c7fa81", "rs485-nodeproto",
|
||||
"platformio-libmirror", "PubSubClient"
|
||||
]
|
||||
assert set(items1) >= set(items2)
|
||||
|
||||
|
||||
def test_global_lib_list(clirunner, validate_cliresult, isolated_pio_home):
|
||||
def test_install_duplicates(clirunner, validate_cliresult, without_internet):
|
||||
# registry
|
||||
result = clirunner.invoke(cmd_lib, [
|
||||
"-g", "install",
|
||||
"http://dl.platformio.org/libraries/archives/0/9540.tar.gz"
|
||||
])
|
||||
validate_cliresult(result)
|
||||
assert "is already installed" in result.output
|
||||
|
||||
# by ID
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "install", "999"])
|
||||
validate_cliresult(result)
|
||||
assert "is already installed" in result.output
|
||||
|
||||
# archive
|
||||
result = clirunner.invoke(cmd_lib, [
|
||||
"-g", "install",
|
||||
"http://www.airspayce.com/mikem/arduino/RadioHead/RadioHead-1.62.zip"
|
||||
])
|
||||
validate_cliresult(result)
|
||||
assert "is already installed" in result.output
|
||||
|
||||
# repository
|
||||
result = clirunner.invoke(cmd_lib, [
|
||||
"-g", "install",
|
||||
"https://github.com/platformio/platformio-libmirror.git"
|
||||
])
|
||||
validate_cliresult(result)
|
||||
assert "is already installed" in result.output
|
||||
|
||||
|
||||
def test_global_lib_list(clirunner, validate_cliresult):
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "list"])
|
||||
validate_cliresult(result)
|
||||
assert all([n in result.output for n in ("OneWire", "DHT22", "64")])
|
||||
assert all([
|
||||
n in result.output for n in
|
||||
("Source: https://github.com/Pedroalbuquerque/ESP32WebServer/archive/master.zip",
|
||||
"Version: 5.10.1",
|
||||
"Source: git+https://github.com/gioblu/PJON.git#3.0",
|
||||
"Version: 1fb26fd", "RadioHead-1.62")
|
||||
])
|
||||
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "list", "--json-output"])
|
||||
assert all([
|
||||
n in result.output
|
||||
for n in ("PJON", "git+https://github.com/knolleary/pubsubclient")
|
||||
n in result.output for n in
|
||||
("__pkg_dir",
|
||||
'"__src_url": "git+https://gitlab.com/ivankravets/rs485-nodeproto.git"',
|
||||
'"version": "5.10.1"')
|
||||
])
|
||||
items1 = [i['name'] for i in json.loads(result.output)]
|
||||
items2 = [
|
||||
"OneWire", "DHT22", "PJON", "ESPAsyncTCP", "ArduinoJson",
|
||||
"pubsubclient", "rs485-nodeproto", "Adafruit ST7735 Library",
|
||||
"RadioHead"
|
||||
"ESP32WebServer", "ArduinoJson", "ArduinoJson", "ArduinoJson",
|
||||
"ArduinoJson", "AsyncMqttClient", "AsyncTCP", "DallasTemperature",
|
||||
"ESPAsyncTCP", "NeoPixelBus", "OneWire", "PJON", "PJON",
|
||||
"PubSubClient", "RFcontrol", "RadioHead-1.62", "platformio-libmirror",
|
||||
"rs485-nodeproto"
|
||||
]
|
||||
assert set(items1) == set(items2)
|
||||
assert sorted(items1) == sorted(items2)
|
||||
|
||||
versions1 = [
|
||||
"{name}@{version}".format(**item) for item in json.loads(result.output)
|
||||
]
|
||||
versions2 = [
|
||||
'ArduinoJson@5.8.2', 'ArduinoJson@5.10.1', 'AsyncMqttClient@0.8.2',
|
||||
'AsyncTCP@1.0.1', 'NeoPixelBus@2.2.4', 'PJON@07fe9aa', 'PJON@1fb26fd',
|
||||
'PubSubClient@bef5814', 'RFcontrol@77d4eb3f8a', 'RadioHead-1.62@0.0.0'
|
||||
]
|
||||
assert set(versions1) >= set(versions2)
|
||||
|
||||
|
||||
def test_global_lib_show(clirunner, validate_cliresult, isolated_pio_home):
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "show", "64@5.6.7"])
|
||||
def test_global_lib_update_check(clirunner, validate_cliresult):
|
||||
result = clirunner.invoke(
|
||||
cmd_lib, ["-g", "update", "--only-check", "--json-output"])
|
||||
validate_cliresult(result)
|
||||
assert all([
|
||||
s in result.output for s in ("Json", "arduino", "atmelavr", "5.6.7")
|
||||
])
|
||||
output = json.loads(result.output)
|
||||
assert set(["RFcontrol",
|
||||
"NeoPixelBus"]) == set([l['name'] for l in output])
|
||||
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "show", "ArduinoJson@>5.6.7"])
|
||||
|
||||
def test_global_lib_update(clirunner, validate_cliresult):
|
||||
# update library using package directory
|
||||
result = clirunner.invoke(
|
||||
cmd_lib,
|
||||
["-g", "update", "NeoPixelBus", "--only-check", "--json-output"])
|
||||
validate_cliresult(result)
|
||||
assert all(
|
||||
[s in result.output for s in ("ArduinoJson", "arduino", "atmelavr")])
|
||||
assert "5.6.7" not in result.output
|
||||
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "show", "1"])
|
||||
oudated = json.loads(result.output)
|
||||
assert len(oudated) == 1
|
||||
assert "__pkg_dir" in oudated[0]
|
||||
result = clirunner.invoke(cmd_lib,
|
||||
["-g", "update", oudated[0]['__pkg_dir']])
|
||||
validate_cliresult(result)
|
||||
assert "OneWire" in result.output
|
||||
assert "Uninstalling NeoPixelBus @ 2.2.4" in result.output
|
||||
|
||||
|
||||
def test_global_lib_update(clirunner, validate_cliresult, isolated_pio_home):
|
||||
# update rest libraries
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "update"])
|
||||
validate_cliresult(result)
|
||||
assert all([s in result.output for s in ("[Up-to-date]", "[VCS]")])
|
||||
assert result.output.count("[Fixed]") == 6
|
||||
assert result.output.count("[Up-to-date]") == 11
|
||||
assert "Uninstalling RFcontrol @ 77d4eb3f8a" in result.output
|
||||
|
||||
# update unknown library
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "update", "Unknown"])
|
||||
assert result.exit_code != 0
|
||||
assert isinstance(result.exception, exception.UnknownPackage)
|
||||
|
||||
|
||||
def test_global_lib_uninstall(clirunner, validate_cliresult,
|
||||
isolated_pio_home):
|
||||
# uninstall using package directory
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "list", "--json-output"])
|
||||
validate_cliresult(result)
|
||||
items = json.loads(result.output)
|
||||
result = clirunner.invoke(cmd_lib,
|
||||
["-g", "uninstall", items[5]['__pkg_dir']])
|
||||
validate_cliresult(result)
|
||||
assert "Uninstalling AsyncTCP" in result.output
|
||||
|
||||
# uninstall the rest libraries
|
||||
result = clirunner.invoke(cmd_lib, [
|
||||
"-g", "uninstall", "1", "ArduinoJson@!=5.6.7", "TextLCD",
|
||||
"Adafruit ST7735 Library"
|
||||
"-g", "uninstall", "1", "https://github.com/bblanchon/ArduinoJson.git",
|
||||
"ArduinoJson@!=5.6.7", "RFcontrol"
|
||||
])
|
||||
validate_cliresult(result)
|
||||
|
||||
items1 = [d.basename for d in isolated_pio_home.join("lib").listdir()]
|
||||
items2 = [
|
||||
"DHT22_ID58", "ArduinoJson_ID64@5.6.7", "ESPAsyncTCP_ID305",
|
||||
"pubsubclient", "PJON", "rs485-nodeproto", "RadioHead_ID124"
|
||||
"RadioHead-1.62", "rs485-nodeproto", "platformio-libmirror",
|
||||
"PubSubClient", "ArduinoJson@src-69ebddd821f771debe7ee734d3c7fa81",
|
||||
"ESPAsyncTCP_ID305", "DallasTemperature_ID54", "NeoPixelBus_ID547",
|
||||
"PJON", "AsyncMqttClient_ID346", "ArduinoJson_ID64",
|
||||
"PJON@src-79de467ebe19de18287becff0a1fb42d", "ESP32WebServer"
|
||||
]
|
||||
assert set(items1) == set(items2)
|
||||
|
||||
# uninstall unknown library
|
||||
result = clirunner.invoke(cmd_lib, ["-g", "uninstall", "Unknown"])
|
||||
assert result.exit_code != 0
|
||||
assert isinstance(result.exception, exception.UnknownPackage)
|
||||
|
||||
def test_project_lib_complex(clirunner, validate_cliresult, tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
# init
|
||||
result = clirunner.invoke(cmd_init)
|
||||
validate_cliresult(result)
|
||||
|
||||
# isntall
|
||||
result = clirunner.invoke(cmd_lib, ["install", "54", "ArduinoJson"])
|
||||
validate_cliresult(result)
|
||||
items1 = [
|
||||
d.basename
|
||||
for d in tmpdir.join(basename(util.get_projectlibdeps_dir()))
|
||||
.listdir()
|
||||
]
|
||||
items2 = ["DallasTemperature_ID54", "OneWire_ID1", "ArduinoJson_ID64"]
|
||||
assert set(items1) == set(items2)
|
||||
def test_lib_show(clirunner, validate_cliresult):
|
||||
result = clirunner.invoke(cmd_lib, ["show", "64"])
|
||||
validate_cliresult(result)
|
||||
assert all(
|
||||
[s in result.output for s in ("ArduinoJson", "Arduino", "Atmel AVR")])
|
||||
result = clirunner.invoke(cmd_lib, ["show", "OneWire", "--json-output"])
|
||||
validate_cliresult(result)
|
||||
assert "OneWire" in result.output
|
||||
|
||||
# list
|
||||
result = clirunner.invoke(cmd_lib, ["list", "--json-output"])
|
||||
validate_cliresult(result)
|
||||
items1 = [i['name'] for i in json.loads(result.output)]
|
||||
items2 = ["DallasTemperature", "OneWire", "ArduinoJson"]
|
||||
assert set(items1) == set(items2)
|
||||
|
||||
# update
|
||||
result = clirunner.invoke(cmd_lib, ["update"])
|
||||
validate_cliresult(result)
|
||||
assert "[Up-to-date]" in result.output
|
||||
def test_lib_builtin(clirunner, validate_cliresult):
|
||||
result = clirunner.invoke(cmd_lib, ["builtin"])
|
||||
validate_cliresult(result)
|
||||
result = clirunner.invoke(cmd_lib, ["builtin", "--json-output"])
|
||||
validate_cliresult(result)
|
||||
|
||||
|
||||
def test_lib_stats(clirunner, validate_cliresult):
|
||||
result = clirunner.invoke(cmd_lib, ["stats"])
|
||||
validate_cliresult(result)
|
||||
assert all([
|
||||
s in result.output
|
||||
for s in ("UPDATED", "POPULAR", "https://platformio.org/lib/show")
|
||||
])
|
||||
|
||||
result = clirunner.invoke(cmd_lib, ["stats", "--json-output"])
|
||||
validate_cliresult(result)
|
||||
assert set([
|
||||
"dlweek", "added", "updated", "topkeywords", "dlmonth", "dlday",
|
||||
"lastkeywords"
|
||||
]) == set(json.loads(result.output).keys())
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -13,36 +13,18 @@
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
from os.path import join
|
||||
|
||||
from platformio import exception, util
|
||||
from platformio import exception
|
||||
from platformio.commands import platform as cli_platform
|
||||
|
||||
|
||||
def test_list_json_output(clirunner, validate_cliresult):
|
||||
result = clirunner.invoke(cli_platform.platform_list, ["--json-output"])
|
||||
validate_cliresult(result)
|
||||
list_result = json.loads(result.output)
|
||||
assert isinstance(list_result, list)
|
||||
assert len(list_result)
|
||||
platforms = [item['name'] for item in list_result]
|
||||
assert "titiva" in platforms
|
||||
|
||||
|
||||
def test_list_raw_output(clirunner, validate_cliresult):
|
||||
result = clirunner.invoke(cli_platform.platform_list)
|
||||
validate_cliresult(result)
|
||||
assert "teensy" in result.output
|
||||
|
||||
|
||||
def test_search_json_output(clirunner, validate_cliresult):
|
||||
def test_search_json_output(clirunner, validate_cliresult, isolated_pio_home):
|
||||
result = clirunner.invoke(cli_platform.platform_search,
|
||||
["arduino", "--json-output"])
|
||||
validate_cliresult(result)
|
||||
search_result = json.loads(result.output)
|
||||
assert isinstance(search_result, list)
|
||||
assert len(search_result)
|
||||
assert search_result
|
||||
platforms = [item['name'] for item in search_result]
|
||||
assert "atmelsam" in platforms
|
||||
|
||||
@@ -53,67 +35,79 @@ def test_search_raw_output(clirunner, validate_cliresult):
|
||||
assert "teensy" in result.output
|
||||
|
||||
|
||||
def test_install_uknown_from_registry(clirunner, validate_cliresult):
|
||||
result = clirunner.invoke(cli_platform.platform_install,
|
||||
["uknown-platform"])
|
||||
assert result.exit_code == -1
|
||||
assert isinstance(result.exception, exception.UnknownPackage)
|
||||
|
||||
|
||||
def test_install_uknown_version(clirunner, validate_cliresult):
|
||||
def test_install_unknown_version(clirunner):
|
||||
result = clirunner.invoke(cli_platform.platform_install,
|
||||
["atmelavr@99.99.99"])
|
||||
assert result.exit_code == -1
|
||||
assert isinstance(result.exception, exception.UndefinedPackageVersion)
|
||||
|
||||
|
||||
def test_complex(clirunner, validate_cliresult):
|
||||
with clirunner.isolated_filesystem():
|
||||
os.environ["PLATFORMIO_HOME_DIR"] = os.getcwd()
|
||||
try:
|
||||
result = clirunner.invoke(
|
||||
cli_platform.platform_install,
|
||||
["teensy", "--with-package", "framework-arduinoteensy"])
|
||||
validate_cliresult(result)
|
||||
assert all([
|
||||
s in result.output
|
||||
for s in ("teensy", "Downloading", "Unpacking")
|
||||
])
|
||||
def test_install_unknown_from_registry(clirunner):
|
||||
result = clirunner.invoke(cli_platform.platform_install,
|
||||
["unknown-platform"])
|
||||
assert result.exit_code == -1
|
||||
assert isinstance(result.exception, exception.UnknownPackage)
|
||||
|
||||
# show platform information
|
||||
result = clirunner.invoke(cli_platform.platform_show, ["teensy"])
|
||||
validate_cliresult(result)
|
||||
assert "teensy" in result.output
|
||||
|
||||
# list platforms
|
||||
result = clirunner.invoke(cli_platform.platform_list,
|
||||
["--json-output"])
|
||||
validate_cliresult(result)
|
||||
list_result = json.loads(result.output)
|
||||
assert isinstance(list_result, list)
|
||||
assert len(list_result) == 1
|
||||
assert list_result[0]["name"] == "teensy"
|
||||
assert list_result[0]["packages"] == ["framework-arduinoteensy"]
|
||||
def test_install_known_version(clirunner, validate_cliresult,
|
||||
isolated_pio_home):
|
||||
result = clirunner.invoke(cli_platform.platform_install, [
|
||||
"atmelavr@1.2.0", "--skip-default-package", "--with-package",
|
||||
"tool-avrdude"
|
||||
])
|
||||
validate_cliresult(result)
|
||||
assert "atmelavr @ 1.2.0" in result.output
|
||||
assert "Installing tool-avrdude @" in result.output
|
||||
assert len(isolated_pio_home.join("packages").listdir()) == 1
|
||||
|
||||
# try to install again
|
||||
result = clirunner.invoke(cli_platform.platform_install,
|
||||
["teensy"])
|
||||
validate_cliresult(result)
|
||||
assert "is already installed" in result.output
|
||||
|
||||
# try to update
|
||||
for _ in range(2):
|
||||
result = clirunner.invoke(cli_platform.platform_update)
|
||||
validate_cliresult(result)
|
||||
assert "teensy" in result.output
|
||||
assert "Up-to-date" in result.output
|
||||
assert "Out-of-date" not in result.output
|
||||
def test_install_from_vcs(clirunner, validate_cliresult, isolated_pio_home):
|
||||
result = clirunner.invoke(cli_platform.platform_install, [
|
||||
"https://github.com/platformio/"
|
||||
"platform-espressif8266.git#feature/stage", "--skip-default-package"
|
||||
])
|
||||
validate_cliresult(result)
|
||||
assert "espressif8266" in result.output
|
||||
assert len(isolated_pio_home.join("packages").listdir()) == 1
|
||||
|
||||
# try to uninstall
|
||||
result = clirunner.invoke(cli_platform.platform_uninstall,
|
||||
["teensy"])
|
||||
validate_cliresult(result)
|
||||
for folder in ("platforms", "packages"):
|
||||
assert len(os.listdir(join(util.get_home_dir(), folder))) == 0
|
||||
finally:
|
||||
del os.environ["PLATFORMIO_HOME_DIR"]
|
||||
|
||||
def test_list_json_output(clirunner, validate_cliresult):
|
||||
result = clirunner.invoke(cli_platform.platform_list, ["--json-output"])
|
||||
validate_cliresult(result)
|
||||
list_result = json.loads(result.output)
|
||||
assert isinstance(list_result, list)
|
||||
assert list_result
|
||||
platforms = [item['name'] for item in list_result]
|
||||
assert set(["atmelavr", "espressif8266"]) == set(platforms)
|
||||
|
||||
|
||||
def test_list_raw_output(clirunner, validate_cliresult):
|
||||
result = clirunner.invoke(cli_platform.platform_list)
|
||||
validate_cliresult(result)
|
||||
assert all(
|
||||
[s in result.output for s in ("atmelavr", "espressif8266")])
|
||||
|
||||
|
||||
def test_update_check(clirunner, validate_cliresult, isolated_pio_home):
|
||||
result = clirunner.invoke(cli_platform.platform_update,
|
||||
["--only-check", "--json-output"])
|
||||
validate_cliresult(result)
|
||||
output = json.loads(result.output)
|
||||
assert len(output) == 1
|
||||
assert output[0]['name'] == "atmelavr"
|
||||
assert len(isolated_pio_home.join("packages").listdir()) == 1
|
||||
|
||||
|
||||
def test_update_raw(clirunner, validate_cliresult, isolated_pio_home):
|
||||
result = clirunner.invoke(cli_platform.platform_update)
|
||||
validate_cliresult(result)
|
||||
assert "Uninstalling atmelavr @ 1.2.0:" in result.output
|
||||
assert "PlatformManager: Installing atmelavr @" in result.output
|
||||
assert len(isolated_pio_home.join("packages").listdir()) == 1
|
||||
|
||||
|
||||
def test_uninstall(clirunner, validate_cliresult, isolated_pio_home):
|
||||
result = clirunner.invoke(cli_platform.platform_uninstall,
|
||||
["atmelavr", "espressif8266"])
|
||||
validate_cliresult(result)
|
||||
assert not isolated_pio_home.join("platforms").listdir()
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -19,6 +19,6 @@ from platformio.commands.settings import cli
|
||||
def test_settings_check(clirunner, validate_cliresult):
|
||||
result = clirunner.invoke(cli, ["get"])
|
||||
validate_cliresult(result)
|
||||
assert len(result.output)
|
||||
assert result.output
|
||||
for item in app.DEFAULT_SETTINGS.items():
|
||||
assert item[0] in result.output
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -20,10 +20,11 @@ from platformio import util
|
||||
|
||||
|
||||
def test_local_env():
|
||||
result = util.exec_command(["platformio", "test", "-d",
|
||||
join("examples", "unit-testing", "calculator"),
|
||||
"-e", "native"])
|
||||
result = util.exec_command([
|
||||
"platformio", "test", "-d",
|
||||
join("examples", "unit-testing", "calculator"), "-e", "native"
|
||||
])
|
||||
if result['returncode'] != 1:
|
||||
pytest.fail(result)
|
||||
assert all(
|
||||
[s in result['out'] for s in ("PASSED", "IGNORED", "FAILED")])
|
||||
assert all([s in result['out']
|
||||
for s in ("PASSED", "IGNORED", "FAILED")]), result['out']
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -16,11 +16,7 @@ from platformio.commands.update import cli as cmd_update
|
||||
|
||||
|
||||
def test_update(clirunner, validate_cliresult):
|
||||
matches = (
|
||||
"Platform Manager",
|
||||
"Up-to-date",
|
||||
"Library Manager"
|
||||
)
|
||||
matches = ("Platform Manager", "Up-to-date", "Library Manager")
|
||||
result = clirunner.invoke(cmd_update, ["--only-check"])
|
||||
validate_cliresult(result)
|
||||
assert all([m in result.output for m in matches])
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -15,27 +15,26 @@
|
||||
import os
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
from click.testing import CliRunner
|
||||
|
||||
requests.packages.urllib3.disable_warnings()
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def clirunner():
|
||||
return CliRunner()
|
||||
from platformio import util
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def validate_cliresult():
|
||||
|
||||
def decorator(result):
|
||||
assert result.exit_code == 0
|
||||
assert not result.exception
|
||||
assert result.exit_code == 0, result.output
|
||||
assert not result.exception, result.output
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def clirunner():
|
||||
return CliRunner()
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def isolated_pio_home(request, tmpdir_factory):
|
||||
home_dir = tmpdir_factory.mktemp(".platformio")
|
||||
@@ -46,3 +45,8 @@ def isolated_pio_home(request, tmpdir_factory):
|
||||
|
||||
request.addfinalizer(fin)
|
||||
return home_dir
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def without_internet(monkeypatch):
|
||||
monkeypatch.setattr(util, "_internet_on", lambda: False)
|
||||
|
||||
@@ -26,12 +26,15 @@ Foo foo(&fooCallback);
|
||||
|
||||
//
|
||||
|
||||
template<class T> T Add(T n1, T n2) {
|
||||
return n1 + n2;
|
||||
}
|
||||
|
||||
void setup() {
|
||||
struct Item item1;
|
||||
myFunction(&item1);
|
||||
}
|
||||
|
||||
|
||||
void loop() {
|
||||
|
||||
}
|
||||
@@ -40,8 +43,10 @@ void myFunction(struct Item *item) {
|
||||
|
||||
}
|
||||
|
||||
#warning "Line number is 43"
|
||||
#warning "Line number is 46"
|
||||
|
||||
void fooCallback(){
|
||||
|
||||
}
|
||||
|
||||
// юнікод
|
||||
|
||||
97
tests/test_builder.py
Normal file
97
tests/test_builder.py
Normal file
@@ -0,0 +1,97 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from platformio.commands.run import cli as cmd_run
|
||||
|
||||
|
||||
def test_build_flags(clirunner, validate_cliresult, tmpdir):
|
||||
build_flags = [("-D TEST_INT=13", "-DTEST_INT=13"),
|
||||
("-DTEST_SINGLE_MACRO", "-DTEST_SINGLE_MACRO"),
|
||||
('-DTEST_STR_SPACE="Andrew Smith"',
|
||||
'"-DTEST_STR_SPACE=Andrew Smith"')]
|
||||
|
||||
tmpdir.join("platformio.ini").write("""
|
||||
[env:native]
|
||||
platform = native
|
||||
extra_scripts = extra.py
|
||||
build_flags = %s
|
||||
""" % " ".join([f[0] for f in build_flags]))
|
||||
|
||||
tmpdir.join("extra.py").write("""
|
||||
Import("projenv")
|
||||
|
||||
projenv.Append(CPPDEFINES="POST_SCRIPT_MACRO")
|
||||
""")
|
||||
|
||||
tmpdir.mkdir("src").join("main.cpp").write("""
|
||||
#if !defined(TEST_INT) || TEST_INT != 13
|
||||
#error "TEST_INT"
|
||||
#endif
|
||||
|
||||
#ifndef TEST_STR_SPACE
|
||||
#error "TEST_STR_SPACE"
|
||||
#endif
|
||||
|
||||
#ifndef POST_SCRIPT_MACRO
|
||||
#error "POST_SCRIPT_MACRO"
|
||||
#endif
|
||||
|
||||
int main() {
|
||||
}
|
||||
""")
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_run, ["--project-dir", str(tmpdir), "--verbose"])
|
||||
validate_cliresult(result)
|
||||
build_output = result.output[result.output.find(
|
||||
"Scanning dependencies..."):]
|
||||
for flag in build_flags:
|
||||
assert flag[1] in build_output, flag
|
||||
|
||||
|
||||
def test_build_unflags(clirunner, validate_cliresult, tmpdir):
|
||||
tmpdir.join("platformio.ini").write("""
|
||||
[env:native]
|
||||
platform = native
|
||||
build_unflags = -DTMP_MACRO1=45 -I. -DNON_EXISTING_MACRO -lunknownLib -Os
|
||||
extra_scripts = pre:extra.py
|
||||
""")
|
||||
|
||||
tmpdir.join("extra.py").write("""
|
||||
Import("env")
|
||||
env.Append(CPPPATH="%s")
|
||||
env.Append(CPPDEFINES="TMP_MACRO1")
|
||||
env.Append(CPPDEFINES=["TMP_MACRO2"])
|
||||
env.Append(CPPDEFINES=("TMP_MACRO3", 13))
|
||||
env.Append(CCFLAGS=["-Os"])
|
||||
env.Append(LIBS=["unknownLib"])
|
||||
""" % str(tmpdir))
|
||||
|
||||
tmpdir.mkdir("src").join("main.c").write("""
|
||||
#ifdef TMP_MACRO1
|
||||
#error "TMP_MACRO1 should be removed"
|
||||
#endif
|
||||
|
||||
int main() {
|
||||
}
|
||||
""")
|
||||
|
||||
result = clirunner.invoke(
|
||||
cmd_run, ["--project-dir", str(tmpdir), "--verbose"])
|
||||
validate_cliresult(result)
|
||||
build_output = result.output[result.output.find(
|
||||
"Scanning dependencies..."):]
|
||||
assert "-DTMP_MACRO1" not in build_output
|
||||
assert "-Os" not in build_output
|
||||
assert str(tmpdir) not in build_output
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -12,6 +12,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import random
|
||||
from glob import glob
|
||||
from os import listdir, walk
|
||||
from os.path import dirname, getsize, isdir, isfile, join, normpath
|
||||
@@ -19,43 +20,71 @@ from os.path import dirname, getsize, isdir, isfile, join, normpath
|
||||
import pytest
|
||||
|
||||
from platformio import util
|
||||
from platformio.managers.platform import PlatformFactory, PlatformManager
|
||||
|
||||
|
||||
def pytest_generate_tests(metafunc):
|
||||
if "pioproject_dir" not in metafunc.fixturenames:
|
||||
return
|
||||
example_dirs = normpath(join(dirname(__file__), "..", "examples"))
|
||||
project_dirs = []
|
||||
for root, _, files in walk(example_dirs):
|
||||
if "platformio.ini" not in files or ".skiptest" in files:
|
||||
examples_dirs = []
|
||||
|
||||
# repo examples
|
||||
examples_dirs.append(normpath(join(dirname(__file__), "..", "examples")))
|
||||
|
||||
# dev/platforms
|
||||
for manifest in PlatformManager().get_installed():
|
||||
p = PlatformFactory.newPlatform(manifest['__pkg_dir'])
|
||||
if not p.is_embedded():
|
||||
continue
|
||||
project_dirs.append(root)
|
||||
examples_dir = join(p.get_dir(), "examples")
|
||||
assert isdir(examples_dir)
|
||||
examples_dirs.append(examples_dir)
|
||||
|
||||
project_dirs = []
|
||||
for examples_dir in examples_dirs:
|
||||
platform_examples = []
|
||||
for root, _, files in walk(examples_dir):
|
||||
if "platformio.ini" not in files or ".skiptest" in files:
|
||||
continue
|
||||
platform_examples.append(root)
|
||||
|
||||
# test random 3 examples
|
||||
random.shuffle(platform_examples)
|
||||
project_dirs.extend(platform_examples[:3])
|
||||
project_dirs.sort()
|
||||
metafunc.parametrize("pioproject_dir", project_dirs)
|
||||
|
||||
|
||||
@pytest.mark.examples
|
||||
def test_run(pioproject_dir):
|
||||
if isdir(join(pioproject_dir, ".pioenvs")):
|
||||
util.rmtree_(join(pioproject_dir, ".pioenvs"))
|
||||
with util.cd(pioproject_dir):
|
||||
build_dir = util.get_projectbuild_dir()
|
||||
if isdir(build_dir):
|
||||
util.rmtree_(build_dir)
|
||||
|
||||
result = util.exec_command(
|
||||
["platformio", "--force", "run", "--project-dir", pioproject_dir]
|
||||
)
|
||||
if result['returncode'] != 0:
|
||||
pytest.fail(result)
|
||||
env_names = []
|
||||
for section in util.load_project_config().sections():
|
||||
if section.startswith("env:"):
|
||||
env_names.append(section[4:])
|
||||
|
||||
# check .elf file
|
||||
pioenvs_dir = join(pioproject_dir, ".pioenvs")
|
||||
for item in listdir(pioenvs_dir):
|
||||
if not isdir(item):
|
||||
continue
|
||||
assert isfile(join(pioenvs_dir, item, "firmware.elf"))
|
||||
# check .hex or .bin files
|
||||
firmwares = []
|
||||
for ext in ("bin", "hex"):
|
||||
firmwares += glob(join(pioenvs_dir, item, "firmware*.%s" % ext))
|
||||
if not firmwares:
|
||||
pytest.fail("Missed firmware file")
|
||||
for firmware in firmwares:
|
||||
assert getsize(firmware) > 0
|
||||
result = util.exec_command(
|
||||
["platformio", "run", "-e",
|
||||
random.choice(env_names)])
|
||||
if result['returncode'] != 0:
|
||||
pytest.fail(result)
|
||||
|
||||
assert isdir(build_dir)
|
||||
|
||||
# check .elf file
|
||||
for item in listdir(build_dir):
|
||||
if not isdir(item):
|
||||
continue
|
||||
assert isfile(join(build_dir, item, "firmware.elf"))
|
||||
# check .hex or .bin files
|
||||
firmwares = []
|
||||
for ext in ("bin", "hex"):
|
||||
firmwares += glob(join(build_dir, item, "firmware*.%s" % ext))
|
||||
if not firmwares:
|
||||
pytest.fail("Missed firmware file")
|
||||
for firmware in firmwares:
|
||||
assert getsize(firmware) > 0
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -42,7 +42,7 @@ def test_warning_line(clirunner, validate_cliresult):
|
||||
validate_cliresult(result)
|
||||
assert ('basic.ino:16:14: warning: #warning "Line number is 16"' in
|
||||
result.output)
|
||||
assert ('basic.ino:43:2: warning: #warning "Line number is 43"' in
|
||||
assert ('basic.ino:46:2: warning: #warning "Line number is 46"' in
|
||||
result.output)
|
||||
result = clirunner.invoke(
|
||||
cmd_ci, [join(INOTEST_DIR, "strmultilines"), "-b", "uno"])
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -13,6 +13,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import re
|
||||
from time import time
|
||||
|
||||
from platformio import app, maintenance
|
||||
@@ -43,9 +44,7 @@ def test_after_upgrade_2_to_3(clirunner, validate_cliresult,
|
||||
|
||||
result = clirunner.invoke(cli_pio, ["settings", "get"])
|
||||
validate_cliresult(result)
|
||||
assert "upgraded to 3"
|
||||
assert isolated_pio_home.join("platforms", "native",
|
||||
"platform.json").check()
|
||||
assert "upgraded to 3" in result.output
|
||||
|
||||
# check PlatformIO 3.0 boards
|
||||
assert board_ids == set([p.basename[:-5] for p in boards.listdir()])
|
||||
@@ -56,8 +55,7 @@ def test_after_upgrade_2_to_3(clirunner, validate_cliresult,
|
||||
assert board_ids == set([b['id'] for b in json.loads(result.output)])
|
||||
|
||||
|
||||
def test_after_upgrade_silence(clirunner, validate_cliresult,
|
||||
isolated_pio_home):
|
||||
def test_after_upgrade_silence(clirunner, validate_cliresult):
|
||||
app.set_state_item("last_version", "2.11.2")
|
||||
result = clirunner.invoke(cli_pio, ["boards", "--json-output"])
|
||||
validate_cliresult(result)
|
||||
@@ -65,7 +63,7 @@ def test_after_upgrade_silence(clirunner, validate_cliresult,
|
||||
assert any([b['id'] == "uno" for b in boards])
|
||||
|
||||
|
||||
def test_check_pio_upgrade(clirunner, validate_cliresult, isolated_pio_home):
|
||||
def test_check_pio_upgrade(clirunner, validate_cliresult):
|
||||
|
||||
def _patch_pio_version(version):
|
||||
maintenance.__version__ = version
|
||||
@@ -95,7 +93,7 @@ def test_check_pio_upgrade(clirunner, validate_cliresult, isolated_pio_home):
|
||||
_patch_pio_version(origin_version)
|
||||
|
||||
|
||||
def test_check_lib_updates(clirunner, validate_cliresult, isolated_pio_home):
|
||||
def test_check_lib_updates(clirunner, validate_cliresult):
|
||||
# install obsolete library
|
||||
result = clirunner.invoke(cli_pio,
|
||||
["lib", "-g", "install", "ArduinoJson@<5.7"])
|
||||
@@ -112,8 +110,7 @@ def test_check_lib_updates(clirunner, validate_cliresult, isolated_pio_home):
|
||||
result.output)
|
||||
|
||||
|
||||
def test_check_and_update_libraries(clirunner, validate_cliresult,
|
||||
isolated_pio_home):
|
||||
def test_check_and_update_libraries(clirunner, validate_cliresult):
|
||||
# enable library auto-updates
|
||||
result = clirunner.invoke(
|
||||
cli_pio, ["settings", "set", "auto_update_libraries", "Yes"])
|
||||
@@ -135,7 +132,8 @@ def test_check_and_update_libraries(clirunner, validate_cliresult,
|
||||
assert ("There are the new updates for libraries (ArduinoJson)" in
|
||||
result.output)
|
||||
assert "Please wait while updating libraries" in result.output
|
||||
assert "[Out-of-date]" in result.output
|
||||
assert re.search(r"Updating ArduinoJson\s+@ 5.6.7\s+\[[\d\.]+\]",
|
||||
result.output)
|
||||
|
||||
# check updated version
|
||||
result = clirunner.invoke(cli_pio, ["lib", "-g", "list", "--json-output"])
|
||||
@@ -154,7 +152,7 @@ def test_check_platform_updates(clirunner, validate_cliresult,
|
||||
manifest['version'] = "0.0.0"
|
||||
manifest_path.write(json.dumps(manifest))
|
||||
# reset cached manifests
|
||||
PlatformManager().reset_cache()
|
||||
PlatformManager().cache_reset()
|
||||
|
||||
# reset check time
|
||||
interval = int(app.get_setting("check_platforms_interval")) * 3600 * 24
|
||||
@@ -166,8 +164,7 @@ def test_check_platform_updates(clirunner, validate_cliresult,
|
||||
assert "There are the new updates for platforms (native)" in result.output
|
||||
|
||||
|
||||
def test_check_and_update_platforms(clirunner, validate_cliresult,
|
||||
isolated_pio_home):
|
||||
def test_check_and_update_platforms(clirunner, validate_cliresult):
|
||||
# enable library auto-updates
|
||||
result = clirunner.invoke(
|
||||
cli_pio, ["settings", "set", "auto_update_platforms", "Yes"])
|
||||
@@ -188,7 +185,7 @@ def test_check_and_update_platforms(clirunner, validate_cliresult,
|
||||
validate_cliresult(result)
|
||||
assert "There are the new updates for platforms (native)" in result.output
|
||||
assert "Please wait while updating platforms" in result.output
|
||||
assert "[Out-of-date]" in result.output
|
||||
assert re.search(r"Updating native\s+@ 0.0.0\s+\[[\d\.]+\]", result.output)
|
||||
|
||||
# check updated version
|
||||
result = clirunner.invoke(cli_pio, ["platform", "list", "--json-output"])
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -12,77 +12,210 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
from os.path import join
|
||||
|
||||
from platformio import util
|
||||
from platformio.managers.package import BasePkgManager
|
||||
from platformio.managers.package import PackageManager
|
||||
|
||||
|
||||
def test_pkg_name_parser():
|
||||
def test_pkg_input_parser():
|
||||
items = [
|
||||
["PkgName", ("PkgName", None, None)],
|
||||
[("PkgName", "!=1.2.3,<2.0"), ("PkgName", "!=1.2.3,<2.0", None)],
|
||||
["PkgName@1.2.3", ("PkgName", "1.2.3", None)],
|
||||
[("PkgName@1.2.3", "1.2.5"), ("PkgName@1.2.3", "1.2.5", None)],
|
||||
["id:13", ("id:13", None, None)],
|
||||
["id:13@~1.2.3", ("id:13", "~1.2.3", None)], [
|
||||
["id=13", ("id=13", None, None)],
|
||||
["id=13@~1.2.3", ("id=13", "~1.2.3", None)],
|
||||
[
|
||||
util.get_home_dir(),
|
||||
(".platformio", None, "file://" + util.get_home_dir())
|
||||
], [
|
||||
],
|
||||
[
|
||||
"LocalName=" + util.get_home_dir(),
|
||||
("LocalName", None, "file://" + util.get_home_dir())
|
||||
], [
|
||||
],
|
||||
[
|
||||
"LocalName=%s@>2.3.0" % util.get_home_dir(),
|
||||
("LocalName", ">2.3.0", "file://" + util.get_home_dir())
|
||||
],
|
||||
[
|
||||
"https://github.com/user/package.git",
|
||||
("package", None, "git+https://github.com/user/package.git")
|
||||
], [
|
||||
"https://gitlab.com/user/package.git",
|
||||
("package", None, "git+https://gitlab.com/user/package.git")
|
||||
], [
|
||||
],
|
||||
[
|
||||
"MyPackage=https://gitlab.com/user/package.git",
|
||||
("MyPackage", None, "git+https://gitlab.com/user/package.git")
|
||||
],
|
||||
[
|
||||
"MyPackage=https://gitlab.com/user/package.git@3.2.1,!=2",
|
||||
("MyPackage", "3.2.1,!=2",
|
||||
"git+https://gitlab.com/user/package.git")
|
||||
],
|
||||
[
|
||||
"https://somedomain.com/path/LibraryName-1.2.3.zip",
|
||||
("LibraryName-1.2.3", None,
|
||||
"https://somedomain.com/path/LibraryName-1.2.3.zip")
|
||||
],
|
||||
[
|
||||
"https://github.com/user/package/archive/branch.zip",
|
||||
("branch", None,
|
||||
"https://github.com/user/package/archive/branch.zip")
|
||||
], [
|
||||
],
|
||||
[
|
||||
"https://github.com/user/package/archive/branch.zip@~1.2.3",
|
||||
("branch", "~1.2.3",
|
||||
"https://github.com/user/package/archive/branch.zip")
|
||||
],
|
||||
[
|
||||
"https://github.com/user/package/archive/branch.tar.gz",
|
||||
("branch", None,
|
||||
("branch.tar", None,
|
||||
"https://github.com/user/package/archive/branch.tar.gz")
|
||||
], [
|
||||
],
|
||||
[
|
||||
"https://github.com/user/package/archive/branch.tar.gz@!=5",
|
||||
("branch.tar", "!=5",
|
||||
"https://github.com/user/package/archive/branch.tar.gz")
|
||||
],
|
||||
[
|
||||
"https://developer.mbed.org/users/user/code/package/",
|
||||
("package", None,
|
||||
"hg+https://developer.mbed.org/users/user/code/package/")
|
||||
], [
|
||||
],
|
||||
[
|
||||
"https://os.mbed.com/users/user/code/package/",
|
||||
("package", None,
|
||||
"hg+https://os.mbed.com/users/user/code/package/")
|
||||
],
|
||||
[
|
||||
"https://github.com/user/package#v1.2.3",
|
||||
("package", None, "git+https://github.com/user/package#v1.2.3")
|
||||
], [
|
||||
],
|
||||
[
|
||||
"https://github.com/user/package.git#branch",
|
||||
("package", None, "git+https://github.com/user/package.git#branch")
|
||||
], [
|
||||
],
|
||||
[
|
||||
"PkgName=https://github.com/user/package.git#a13d344fg56",
|
||||
("PkgName", None,
|
||||
"git+https://github.com/user/package.git#a13d344fg56")
|
||||
], [
|
||||
],
|
||||
[
|
||||
"user/package",
|
||||
("package", None, "git+https://github.com/user/package")
|
||||
],
|
||||
[
|
||||
"PkgName=user/package",
|
||||
("PkgName", None, "git+https://github.com/user/package")
|
||||
], [
|
||||
],
|
||||
[
|
||||
"PkgName=user/package#master",
|
||||
("PkgName", None, "git+https://github.com/user/package#master")
|
||||
], [
|
||||
],
|
||||
[
|
||||
"git+https://github.com/user/package",
|
||||
("package", None, "git+https://github.com/user/package")
|
||||
], [
|
||||
],
|
||||
[
|
||||
"hg+https://example.com/user/package",
|
||||
("package", None, "hg+https://example.com/user/package")
|
||||
], [
|
||||
],
|
||||
[
|
||||
"git@github.com:user/package.git",
|
||||
("package", None, "git@github.com:user/package.git")
|
||||
], [
|
||||
("package", None, "git+git@github.com:user/package.git")
|
||||
],
|
||||
[
|
||||
"git@github.com:user/package.git#v1.2.0",
|
||||
("package", None, "git@github.com:user/package.git#v1.2.0")
|
||||
], [
|
||||
("package", None, "git+git@github.com:user/package.git#v1.2.0")
|
||||
],
|
||||
[
|
||||
"LocalName=git@github.com:user/package.git#v1.2.0@~1.2.0",
|
||||
("LocalName", "~1.2.0", "git+git@github.com:user/package.git#v1.2.0")
|
||||
],
|
||||
[
|
||||
"git+ssh://git@gitlab.private-server.com/user/package#1.2.0",
|
||||
("package", None,
|
||||
"git+ssh://git@gitlab.private-server.com/user/package#1.2.0")
|
||||
],
|
||||
[
|
||||
"git+ssh://user@gitlab.private-server.com:1234/package#1.2.0",
|
||||
("package", None,
|
||||
"git+ssh://user@gitlab.private-server.com:1234/package#1.2.0")
|
||||
],
|
||||
[
|
||||
"LocalName=git+ssh://user@gitlab.private-server.com:1234"
|
||||
"/package#1.2.0@!=13",
|
||||
("LocalName", "!=13",
|
||||
"git+ssh://user@gitlab.private-server.com:1234/package#1.2.0")
|
||||
]
|
||||
]
|
||||
for params, result in items:
|
||||
if isinstance(params, tuple):
|
||||
assert BasePkgManager.parse_pkg_name(*params) == result
|
||||
assert PackageManager.parse_pkg_uri(*params) == result
|
||||
else:
|
||||
assert BasePkgManager.parse_pkg_name(params) == result
|
||||
assert PackageManager.parse_pkg_uri(params) == result
|
||||
|
||||
|
||||
def test_install_packages(isolated_pio_home, tmpdir):
|
||||
packages = [
|
||||
dict(id=1, name="name_1", version="shasum"),
|
||||
dict(id=1, name="name_1", version="2.0.0"),
|
||||
dict(id=1, name="name_1", version="2.1.0"),
|
||||
dict(id=1, name="name_1", version="1.2"),
|
||||
dict(id=1, name="name_1", version="1.0.0"),
|
||||
dict(name="name_2", version="1.0.0"),
|
||||
dict(name="name_2", version="2.0.0",
|
||||
__src_url="git+https://github.com"),
|
||||
dict(name="name_2", version="3.0.0",
|
||||
__src_url="git+https://github2.com"),
|
||||
dict(name="name_2", version="4.0.0",
|
||||
__src_url="git+https://github2.com")
|
||||
]
|
||||
|
||||
pm = PackageManager(join(util.get_home_dir(), "packages"))
|
||||
for package in packages:
|
||||
tmp_dir = tmpdir.mkdir("tmp-package")
|
||||
tmp_dir.join("package.json").write(json.dumps(package))
|
||||
pm._install_from_url(package['name'], "file://%s" % str(tmp_dir))
|
||||
tmp_dir.remove(rec=1)
|
||||
|
||||
assert len(pm.get_installed()) == len(packages) - 1
|
||||
|
||||
pkg_dirnames = [
|
||||
'name_1_ID1', 'name_1_ID1@1.0.0', 'name_1_ID1@1.2',
|
||||
'name_1_ID1@2.0.0', 'name_1_ID1@shasum', 'name_2',
|
||||
'name_2@src-177cbce1f0705580d17790fda1cc2ef5',
|
||||
'name_2@src-f863b537ab00f4c7b5011fc44b120e1f'
|
||||
]
|
||||
assert set([p.basename for p in isolated_pio_home.join(
|
||||
"packages").listdir()]) == set(pkg_dirnames)
|
||||
|
||||
|
||||
def test_get_package():
|
||||
tests = [
|
||||
[("unknown", ), None],
|
||||
[("1", ), None],
|
||||
[("id=1", "shasum"), dict(id=1, name="name_1", version="shasum")],
|
||||
[("id=1", "*"), dict(id=1, name="name_1", version="2.1.0")],
|
||||
[("id=1", "^1"), dict(id=1, name="name_1", version="1.2")],
|
||||
[("id=1", "^1"), dict(id=1, name="name_1", version="1.2")],
|
||||
[("name_1", "<2"), dict(id=1, name="name_1", version="1.2")],
|
||||
[("name_1", ">2"), None],
|
||||
[("name_1", "2-0-0"), None],
|
||||
[("name_2", ), dict(name="name_2", version="4.0.0")],
|
||||
[("url_has_higher_priority", None, "git+https://github.com"),
|
||||
dict(name="name_2", version="2.0.0",
|
||||
__src_url="git+https://github.com")],
|
||||
[("name_2", None, "git+https://github.com"),
|
||||
dict(name="name_2", version="2.0.0",
|
||||
__src_url="git+https://github.com")],
|
||||
]
|
||||
|
||||
pm = PackageManager(join(util.get_home_dir(), "packages"))
|
||||
for test in tests:
|
||||
manifest = pm.get_package(*test[0])
|
||||
if test[1] is None:
|
||||
assert manifest is None, test
|
||||
continue
|
||||
for key, value in test[1].items():
|
||||
assert manifest[key] == value, test
|
||||
|
||||
36
tests/test_misc.py
Normal file
36
tests/test_misc.py
Normal file
@@ -0,0 +1,36 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from platformio import exception, util
|
||||
|
||||
|
||||
def test_ping_internet_ips():
|
||||
for ip in util.PING_INTERNET_IPS:
|
||||
requests.get("http://%s" % ip, allow_redirects=False, timeout=2)
|
||||
|
||||
|
||||
def test_api_internet_offline(without_internet, isolated_pio_home):
|
||||
with pytest.raises(exception.InternetIsOffline):
|
||||
util.get_api_result("/stats")
|
||||
|
||||
|
||||
def test_api_cache(monkeypatch, isolated_pio_home):
|
||||
api_kwargs = {"url": "/stats", "cache_valid": "10s"}
|
||||
result = util.get_api_result(**api_kwargs)
|
||||
assert result and "boards" in result
|
||||
monkeypatch.setattr(util, '_internet_on', lambda: False)
|
||||
assert util.get_api_result(**api_kwargs) == result
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -16,33 +16,29 @@ import pytest
|
||||
import requests
|
||||
|
||||
|
||||
def pytest_generate_tests(metafunc):
|
||||
if "package_data" not in metafunc.fixturenames:
|
||||
return
|
||||
def validate_response(r):
|
||||
assert r.status_code == 200, r.url
|
||||
assert int(r.headers['Content-Length']) > 0, r.url
|
||||
assert r.headers['Content-Type'] in ("application/gzip",
|
||||
"application/octet-stream")
|
||||
|
||||
|
||||
def test_packages():
|
||||
pkgs_manifest = requests.get(
|
||||
"https://dl.bintray.com/platformio/dl-packages/manifest.json").json()
|
||||
assert isinstance(pkgs_manifest, dict)
|
||||
packages = []
|
||||
items = []
|
||||
for _, variants in pkgs_manifest.iteritems():
|
||||
for item in variants:
|
||||
packages.append(item)
|
||||
metafunc.parametrize("package_data", packages)
|
||||
items.append(item)
|
||||
|
||||
for item in items:
|
||||
assert item['url'].endswith(".tar.gz"), item
|
||||
|
||||
def validate_response(req):
|
||||
assert req.status_code == 200
|
||||
assert int(req.headers['Content-Length']) > 0
|
||||
assert req.headers['Content-Type'] in ("application/gzip",
|
||||
"application/octet-stream")
|
||||
r = requests.head(item['url'], allow_redirects=True)
|
||||
validate_response(r)
|
||||
|
||||
if "X-Checksum-Sha1" not in r.headers:
|
||||
return pytest.skip("X-Checksum-Sha1 is not provided")
|
||||
|
||||
def test_package(package_data):
|
||||
assert package_data['url'].endswith(".tar.gz")
|
||||
|
||||
r = requests.head(package_data['url'], allow_redirects=True)
|
||||
validate_response(r)
|
||||
|
||||
if "X-Checksum-Sha1" not in r.headers:
|
||||
return pytest.skip("X-Checksum-Sha1 is not provided")
|
||||
|
||||
assert package_data['sha1'] == r.headers.get("X-Checksum-Sha1")
|
||||
assert item['sha1'] == r.headers.get("X-Checksum-Sha1")[0:40], item
|
||||
|
||||
17
tox.ini
17
tox.ini
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -20,10 +20,10 @@ basepython = python2.7
|
||||
usedevelop = True
|
||||
deps =
|
||||
isort
|
||||
flake8
|
||||
yapf
|
||||
pylint
|
||||
pytest
|
||||
pytest-xdist
|
||||
commands = python --version
|
||||
|
||||
[testenv:docs]
|
||||
@@ -31,6 +31,7 @@ basepython = python2.7
|
||||
deps =
|
||||
sphinx
|
||||
sphinx_rtd_theme
|
||||
restructuredtext-lint
|
||||
commands =
|
||||
sphinx-build -W -b html -d {envtmpdir}/doctrees docs docs/_build/html
|
||||
sphinx-build -W -b latex -d {envtmpdir}/doctrees docs docs/_build/latex
|
||||
@@ -46,10 +47,8 @@ commands =
|
||||
[testenv:lint]
|
||||
basepython = python2.7
|
||||
deps =
|
||||
flake8
|
||||
pylint
|
||||
commands =
|
||||
flake8 ./platformio
|
||||
pylint --rcfile=./.pylintrc ./platformio
|
||||
|
||||
[testenv]
|
||||
@@ -59,8 +58,18 @@ deps =
|
||||
pytest
|
||||
commands =
|
||||
{envpython} --version
|
||||
{envpython} -c "print 'travis_fold:start:install_devplatforms'"
|
||||
{envpython} scripts/install_devplatforms.py
|
||||
{envpython} -c "print 'travis_fold:end:install_devplatforms'"
|
||||
py.test -v --basetemp="{envtmpdir}" tests
|
||||
|
||||
[testenv:skipexamples]
|
||||
basepython = python2.7
|
||||
deps =
|
||||
pytest
|
||||
commands =
|
||||
py.test -v --basetemp="{envtmpdir}" tests --ignore tests/test_examples.py
|
||||
|
||||
[testenv:coverage]
|
||||
basepython = python2.7
|
||||
passenv = *
|
||||
|
||||
Reference in New Issue
Block a user