forked from platformio/platformio-core
Compare commits
1449 Commits
cpp17-qtcr
...
feature/v7
Author | SHA1 | Date | |
---|---|---|---|
76b6de55d1 | |||
d9a5b9def3 | |||
3347e4b63f | |||
c475578db6 | |||
2bad42ecb1 | |||
f3cfcd54a7 | |||
6ffd9124ba | |||
0acfc25d56 | |||
e9433de50f | |||
9d1593da0b | |||
fcba901611 | |||
0e3249e8b1 | |||
0d647e164b | |||
c01ef88265 | |||
9fb9e586a0 | |||
28bd200cd6 | |||
25f7749e35 | |||
56be27fb0b | |||
32991356f3 | |||
dbe58b49bf | |||
d36e39418e | |||
c28740cfb1 | |||
430acc87de | |||
c0d97287dd | |||
0f3dbe623d | |||
6449115635 | |||
d085a02068 | |||
76a11a75b7 | |||
12e7979ec6 | |||
18413f54f6 | |||
d684233315 | |||
93018930ab | |||
621b24b665 | |||
7606dd4faf | |||
aa06d21abe | |||
042f8dc668 | |||
c4f76848a7 | |||
e1ff9a469d | |||
2239616484 | |||
55be7181b3 | |||
f519a9d524 | |||
f4319f670c | |||
80fc335528 | |||
353f440335 | |||
3e9ca48588 | |||
255e91b51c | |||
ce91ef6e08 | |||
7ba086bdcb | |||
adf94843ea | |||
e3e08d9691 | |||
84c7ede0e1 | |||
28c90652bc | |||
a75da327d0 | |||
adf4012b96 | |||
1fe806269d | |||
ffacd17387 | |||
4742ffc9d8 | |||
700c705317 | |||
17ba91977d | |||
f31f9fa616 | |||
485f801c74 | |||
adab425c6d | |||
aabbbef944 | |||
14ce28e028 | |||
ca1f633f9c | |||
a2f3e85760 | |||
f422b5e05c | |||
ba58db3079 | |||
4729d9f55d | |||
41bd751ec2 | |||
c74c9778a1 | |||
f2d16e7631 | |||
b181406a1f | |||
dc16f80ffc | |||
125be4bfd4 | |||
14907579cd | |||
b0a1f3ae16 | |||
195304bbea | |||
e4c4f2ac50 | |||
77e6d1b099 | |||
cf4da42b25 | |||
51bf17515e | |||
1e2c37c190 | |||
204a60dd52 | |||
0f554d2f31 | |||
f382aae66b | |||
998da59f7c | |||
4cad98601d | |||
34545d3f12 | |||
127b422d25 | |||
8c61f0f6b6 | |||
fb93c1937c | |||
827bd09c61 | |||
984d63983d | |||
11df021750 | |||
ac6d94860b | |||
b238c55e53 | |||
961ab6b35e | |||
e1f34c7ea0 | |||
f70e6d50c6 | |||
540465291a | |||
0b3c0144e6 | |||
7ab27ddf9d | |||
e78bf51f68 | |||
5f8c15b96a | |||
9c61ef544d | |||
5548197a74 | |||
2458309d55 | |||
7229e1cce4 | |||
3e95134721 | |||
687189a142 | |||
51b4cd88db | |||
fe52b79eb2 | |||
091c96eb07 | |||
f2eead6ece | |||
c2b3097618 | |||
2728c90441 | |||
5cac6d8b88 | |||
bd34c0f437 | |||
f1c445be15 | |||
b88c393b4e | |||
897844ebc1 | |||
69acd5c9b4 | |||
00409fc096 | |||
33f2cd5dd5 | |||
562fb22a70 | |||
b75bdbd320 | |||
a0f8def616 | |||
c946613019 | |||
2ee8214485 | |||
7e89e551ae | |||
6972c9c100 | |||
5cfaea91d6 | |||
ce735c0ae5 | |||
007dc7e96d | |||
aa0df36c8a | |||
99224d7d4e | |||
532759c0c6 | |||
fb43d2508a | |||
07944a9d5b | |||
8b6a4b8ce8 | |||
6e75dc0d57 | |||
a733f3c868 | |||
65397fe059 | |||
48a823d39e | |||
1f7bda7136 | |||
6b2d04b810 | |||
c9b3e4ed65 | |||
527e7f16f6 | |||
30fad62d05 | |||
f8b5266c1e | |||
9170eee6e4 | |||
89f4574680 | |||
831f7f52bc | |||
ff6b6df9ce | |||
dccc14b507 | |||
3a21f48c9c | |||
54ff3a8d4e | |||
fbb752b321 | |||
4474175e52 | |||
c3b8f2d3c0 | |||
a983075dac | |||
3268b516a9 | |||
451a3fc87b | |||
c4126ea5b3 | |||
1d44b3e9c8 | |||
154244b7e3 | |||
33abe19831 | |||
a3ad3103ef | |||
65b31c69b0 | |||
d2fd0f242e | |||
e3557760df | |||
6313042291 | |||
5f75e36efd | |||
9deb7f4275 | |||
9affc023a2 | |||
fb2f850f1d | |||
45da8da093 | |||
b135a73945 | |||
5c9b373b65 | |||
0da1a38df5 | |||
0fe6bf262e | |||
390755c499 | |||
deca77d1a3 | |||
bc2e51d51f | |||
bce70d4945 | |||
940fa327f5 | |||
db8f027f30 | |||
39b61d50e6 | |||
f85c3081fe | |||
2a1fd273ee | |||
a423a4dde4 | |||
abda3edad6 | |||
be4d016f61 | |||
68e62c7137 | |||
bf8f1e9efb | |||
a102fd2d48 | |||
ff221b103a | |||
646aa4f45b | |||
325d4c16b8 | |||
f47083b86b | |||
3d48f3ec04 | |||
837ea85c3c | |||
9585e2a3e3 | |||
5396882e75 | |||
109c537d86 | |||
b239628ac3 | |||
25c7c60f0d | |||
8a38442bba | |||
205b29560f | |||
bbcd92b7c6 | |||
3b3fbecbf3 | |||
a3e66d6325 | |||
355f57e888 | |||
6eff31b5d3 | |||
01423a7659 | |||
0f9a5f8eee | |||
1c419ef71a | |||
01ab1fa4c0 | |||
0ff46bdd88 | |||
dd033bf675 | |||
a28a3d31c9 | |||
450f48ba81 | |||
813861ddae | |||
939b9b9112 | |||
98edf7609f | |||
9f0efdeb5c | |||
3fd063d8ed | |||
1b55da0af2 | |||
4dc44868ea | |||
f720cd841c | |||
53f1d82890 | |||
e78efff33b | |||
a754a28cd8 | |||
1d97982230 | |||
e022b67161 | |||
82de26d401 | |||
31218060db | |||
e25b170b34 | |||
326ebcf593 | |||
8b604c1a03 | |||
f219f35ac8 | |||
4d89593b05 | |||
3881a8c677 | |||
e9cf551101 | |||
5ffa42a5a2 | |||
a5052433f2 | |||
425332040e | |||
3a230dfb51 | |||
292dc3fd71 | |||
e48dfbaadc | |||
c0d2abc9a7 | |||
d017a8197e | |||
378528abfc | |||
91487f179e | |||
363fee4ba0 | |||
41cc735979 | |||
c9235a5276 | |||
355f5afab9 | |||
2b36c7086a | |||
f819cbb4b8 | |||
e3c33596db | |||
1bcec6654d | |||
9df692529b | |||
141d6fc4a6 | |||
6bc915f7db | |||
4ae24a619f | |||
7f7bc76b20 | |||
55e7b36dc4 | |||
395a4053aa | |||
cb65bdf22f | |||
6ea7ded483 | |||
eeb0116f28 | |||
63ca19541f | |||
e0f839a372 | |||
4fc6b26db5 | |||
4388cd4321 | |||
71afa639e2 | |||
89ffd82275 | |||
148ce1a897 | |||
fc12dda765 | |||
58a59f8ae8 | |||
41fb1ca8bd | |||
bba5bc9d0f | |||
f8f3e9863e | |||
eb20f3410a | |||
681b90e6b2 | |||
c016d6827b | |||
f840577066 | |||
475c5d2a3c | |||
f9cbf6cb97 | |||
d728e0e873 | |||
7876626f04 | |||
f6aa95a4fe | |||
4596acab81 | |||
09f1269440 | |||
939f8e0812 | |||
1d0d89a4fa | |||
2908efd337 | |||
9dc6ed031f | |||
21c4f091e2 | |||
2c94fb2aad | |||
2312ca929d | |||
6f0b1fbb91 | |||
c8eea40dd0 | |||
53cd43b676 | |||
981266646c | |||
0acf968b2d | |||
1e5a728f3c | |||
c4d178e50e | |||
b991d9f25c | |||
82d380d895 | |||
9344f3cd81 | |||
6ee9cc04fb | |||
a0c959be28 | |||
df896ad401 | |||
743fc8e636 | |||
fa255ff8b3 | |||
97a7cdd2a2 | |||
02a63a6954 | |||
7f38e222c9 | |||
f71317dad9 | |||
4444a0db99 | |||
e8ffa244e5 | |||
7e9b637143 | |||
0d9ee75b05 | |||
66fe55668e | |||
5c3ae15bee | |||
58a1d5d96e | |||
ab15da4f4b | |||
71bb84f3f2 | |||
faff0fb56c | |||
0fb064eba3 | |||
bea5e87543 | |||
62e9589851 | |||
7d86eebe77 | |||
0bba598c61 | |||
4b446b0d72 | |||
f43f41cc53 | |||
00c5d30ce9 | |||
269d5e0a3e | |||
331ff2dc9c | |||
a24cf50413 | |||
8d33a3d151 | |||
d9ff250f82 | |||
f2d206ca54 | |||
d7c9dc2411 | |||
3e6725bb5f | |||
c3e287672e | |||
a387f9708a | |||
07bfa8ce4a | |||
3dfb936f3c | |||
e39438791c | |||
c7060f93e8 | |||
8794b2a3a1 | |||
cabe7d8c11 | |||
56cc7ce270 | |||
5b13aeda52 | |||
674d00183e | |||
598d2b24de | |||
4c4fb5029e | |||
2c4055d9e1 | |||
efbe3d4aa6 | |||
c785c8c6f3 | |||
9b4a045413 | |||
7c650c2c08 | |||
1422b77298 | |||
1af508272b | |||
5073313c33 | |||
18b6aad369 | |||
097de2be98 | |||
188c65ef7b | |||
b2a04f265e | |||
15d53c95c0 | |||
0d57a799b5 | |||
464b167e65 | |||
380652eb52 | |||
4350c4ca48 | |||
8835a03cd9 | |||
61ba8afee6 | |||
199e3d8958 | |||
9f09657997 | |||
2e64056787 | |||
83d2173748 | |||
1503eb5d41 | |||
6db3eb8e33 | |||
355222b0c0 | |||
2fbd766fd9 | |||
fb5e99473f | |||
de4ba4cbe1 | |||
42f1197de8 | |||
2337dbd2cd | |||
17360b0ed2 | |||
5ee79f1724 | |||
20067c5736 | |||
d43c5696cc | |||
8970f36f1a | |||
75716d26ff | |||
d0ca48661c | |||
4121882a9d | |||
9c38cf6621 | |||
6395a032e5 | |||
a0387bd16e | |||
4d4aec4f57 | |||
7bbfaab891 | |||
337e7fe43a | |||
38f03224d3 | |||
48655ad728 | |||
5de541e493 | |||
527d61296f | |||
2141a09736 | |||
190ebcccfe | |||
a1d9798594 | |||
bf3942e7cc | |||
5ec5660fa6 | |||
fb09077c38 | |||
ec5bf1b5e7 | |||
bf7fb15941 | |||
b35f1ea572 | |||
7e6cb84c87 | |||
bf769e1a9e | |||
476bf20923 | |||
3277ac3a18 | |||
93ce9b0c5e | |||
b11925a9ec | |||
7f8784e2a8 | |||
ca7a100392 | |||
32c2e33edf | |||
30fc00098d | |||
1dd62361c7 | |||
c870c09d67 | |||
30b00e7a9d | |||
c763f4b3a3 | |||
9800fb7b2c | |||
3b66f4270c | |||
dc14bd7362 | |||
4be5185ed3 | |||
1ea0adf6af | |||
7cb40ef3b0 | |||
044bf61a4d | |||
e0f9cb8c26 | |||
d6d1c6b327 | |||
4c177c1ad3 | |||
490af8ac37 | |||
ca48e6c172 | |||
7533c369d4 | |||
cd8024c762 | |||
0b4aedbeeb | |||
3d2ac4698c | |||
e0a3b81877 | |||
af21c50aec | |||
1cbc424488 | |||
887e542cb2 | |||
780c62d925 | |||
122ebed16d | |||
158aabbdf2 | |||
a8c3f2bdf6 | |||
8814f4e92d | |||
ba5f61f92b | |||
43dd429aa2 | |||
cd8179a41f | |||
10136729ab | |||
c5d7c4f88e | |||
e3796cfda1 | |||
847fdd4deb | |||
c56b35f504 | |||
bc9d9ac2db | |||
e2f0d96f09 | |||
4e78c3ec40 | |||
dfffd5e97b | |||
8c13d13f80 | |||
32d501bed1 | |||
17a7293967 | |||
59902abd09 | |||
a4756987a4 | |||
b04c1591c2 | |||
83c4e5f463 | |||
8ada1c2b34 | |||
b7b01dd6a0 | |||
5c2673cd71 | |||
09f7ff2db3 | |||
5e8eb77090 | |||
a0493e6ac4 | |||
4d755f2692 | |||
fcb676abc6 | |||
fa0de1dad4 | |||
6653c02487 | |||
0939b43899 | |||
537558d410 | |||
7c9e0393f8 | |||
9ddf73baa6 | |||
699da0a8fb | |||
fd8c9786c0 | |||
410324b2c7 | |||
36d470279c | |||
e498119e0d | |||
9ff117b0fb | |||
2695690b34 | |||
8dc20f93d5 | |||
f8d21e5b32 | |||
72ac6c86df | |||
8c2a7df53e | |||
d92e36efa0 | |||
741e9a40b3 | |||
7527143fff | |||
a23fef010f | |||
cd4f5541ac | |||
73089b3cb0 | |||
3a70c902a9 | |||
bedbae6311 | |||
842679c32b | |||
10ff4ae77a | |||
bc325ab2cc | |||
a31a7f2b06 | |||
4278574450 | |||
6f8f2511c2 | |||
5282124664 | |||
83bb6611b9 | |||
dcc02c3e14 | |||
f070399cad | |||
b9920b286f | |||
d278f8f215 | |||
3c5c65769c | |||
2f7362951c | |||
f4535190a3 | |||
236c4570cf | |||
5844c536a4 | |||
6627fd5790 | |||
25074d80d3 | |||
f032663b33 | |||
d24702eb29 | |||
9051677d74 | |||
7637286efa | |||
31a24e1652 | |||
c8c4028a23 | |||
0bd27a36e9 | |||
ddfe5a6c03 | |||
ee93ca1615 | |||
4c2aca4956 | |||
dd14b5e2ed | |||
6464420c1c | |||
79ec493c79 | |||
abb464707d | |||
7c846b8968 | |||
84c2e0a3d6 | |||
c2ddc89e46 | |||
1495e24e1e | |||
6e16b43568 | |||
6c18b37d54 | |||
6134db8e81 | |||
3cf62f8fa6 | |||
523b6dfa98 | |||
3928cb522e | |||
de856ee730 | |||
d3b7508bd5 | |||
6c71a3bea2 | |||
d2e27f5385 | |||
2a5de43964 | |||
029e66cd06 | |||
96fb8c74f9 | |||
b006f53010 | |||
19d518fc4c | |||
f01cd7570c | |||
ffebfd4376 | |||
e4264a6a51 | |||
d85bc0f7f8 | |||
1445a91fab | |||
3b878747f2 | |||
401f8a4891 | |||
6bec593b93 | |||
aef49a8bff | |||
772e25df49 | |||
3363b3a516 | |||
1f096fe03f | |||
32e440bec7 | |||
99b5204802 | |||
3c17b31d5e | |||
89a80f158e | |||
c42db2ec22 | |||
6a3b6f0d44 | |||
ca2622b7a6 | |||
b9a9fd4f43 | |||
1ea6d47110 | |||
256acf7e23 | |||
284ccc9e8a | |||
655eedd7b0 | |||
bb6490d6f2 | |||
300b7b2138 | |||
86c4bd69d2 | |||
dd63c8002a | |||
13fc8508b3 | |||
a76933990c | |||
7e3e394707 | |||
cee3f4d90f | |||
c557473cfb | |||
f893fcf135 | |||
092326cb91 | |||
92a5c1bac6 | |||
4b2f0eb1d5 | |||
9ae67fdad9 | |||
5142feba7a | |||
8cbe7bc7a6 | |||
d8f36b6534 | |||
58d533a3bb | |||
18e130fd12 | |||
b72c1636f7 | |||
f68c18d1e5 | |||
db6b8a6dbc | |||
5afa0a955e | |||
ca3b3717d3 | |||
d4784c05f5 | |||
7a01da7039 | |||
42690d3fa7 | |||
50cbc4d4e2 | |||
63c2278a83 | |||
4bccaae945 | |||
e12bc9fe5f | |||
ac63cf0240 | |||
30709fd0b3 | |||
6f9985125d | |||
743a3e2c02 | |||
bd21ff0d3e | |||
46858fff39 | |||
854c549e1c | |||
4b5bc91abb | |||
375c396b7b | |||
7aaa9c028b | |||
7f351bc7c8 | |||
c42fe32972 | |||
a6e61a7a5a | |||
4bc3e3cf95 | |||
4a7a8b8b68 | |||
51ab0bbd3c | |||
30937df4e6 | |||
b15a4e746a | |||
1b17234c41 | |||
26f897cb55 | |||
99d049a6dd | |||
f3c3402b35 | |||
55b9c446f1 | |||
e3ca0c6f04 | |||
4ff591bd7e | |||
b02335a294 | |||
cc3ea65faa | |||
206bb38f54 | |||
10da6bf5c6 | |||
22860cd4e5 | |||
0b8a595288 | |||
7e7856e44c | |||
b104b840c4 | |||
32386bec18 | |||
db366b3163 | |||
472c80159d | |||
4a95148cd0 | |||
11a43b2693 | |||
12fb02db6e | |||
52f8e98eed | |||
dcc63da2ef | |||
756bb07d1a | |||
d2be7033e9 | |||
27ccdc76a0 | |||
dcecd5f922 | |||
506a08c7cf | |||
e2892d5d4c | |||
0ce7885833 | |||
6b7e8ebe97 | |||
6e5aee5ef3 | |||
4aebf8c9d7 | |||
1f75430fab | |||
2564b9eb78 | |||
cf558036d0 | |||
b568eb68d6 | |||
19006378a8 | |||
a19c4dbcda | |||
22a0a20666 | |||
440bb1e6f4 | |||
87dffa36b8 | |||
bd052d0ce0 | |||
73dd29c59c | |||
460a983ab2 | |||
ea94f65159 | |||
6f6460fd4e | |||
5f812409d4 | |||
87f2e86928 | |||
626640cc05 | |||
f5e0ccecc3 | |||
598769fe1b | |||
f7e24f2093 | |||
9b141bf5a8 | |||
9d2adb37f3 | |||
97e2d24cd1 | |||
720732eba6 | |||
37c6f20747 | |||
e27c1c39e4 | |||
1e000027c7 | |||
e3fea07596 | |||
06ed9ba77d | |||
f4d9769450 | |||
9da7c42be4 | |||
3419558265 | |||
61383f9b08 | |||
be0acaed40 | |||
0c4c4ac657 | |||
bb8b115a0b | |||
2e2735a49c | |||
7badd54c89 | |||
4dfc561551 | |||
3c2afeba89 | |||
d2d46f4aea | |||
ccc7d9c9a4 | |||
45fcb40a5c | |||
1585b829be | |||
0ceae62701 | |||
f2bdb17c55 | |||
83b00ac80c | |||
a76e445ed9 | |||
edff591c90 | |||
cb7148d018 | |||
38afa07dbe | |||
92073a4ccd | |||
abf6304818 | |||
9a86175701 | |||
b764a2220f | |||
3776233233 | |||
0d92e8fc17 | |||
40422eac2e | |||
0fb4b1e109 | |||
44ecc7c666 | |||
26d659c433 | |||
58c4145809 | |||
fe08ce7795 | |||
9163e9e67d | |||
7acae6461e | |||
e7a172b8dd | |||
b90e89a791 | |||
db11244f49 | |||
54f0748201 | |||
575f0ae300 | |||
7a100fb0b0 | |||
d01d314f47 | |||
e5e2210768 | |||
d22b479bd3 | |||
19853b0b66 | |||
ce62514a17 | |||
4a4ba5594b | |||
af5a820862 | |||
40e4e38e0c | |||
cb1c825747 | |||
8c27754045 | |||
3247e661e9 | |||
7c93167d52 | |||
79b2bfdefe | |||
de7d710943 | |||
b88a29e652 | |||
ed0b12dcf9 | |||
280bede0e9 | |||
e6938f8f39 | |||
6d705172f5 | |||
8fff7084db | |||
e75bf27b5f | |||
2c99607d3d | |||
c09af13b7f | |||
ee6b498ca9 | |||
65f2f02d93 | |||
960edb5611 | |||
cda7a97e67 | |||
c520700276 | |||
a7654a6098 | |||
814679522a | |||
4249349c2b | |||
d065646d3e | |||
0cf7aeeec9 | |||
277ccdafb6 | |||
5b00f6fb95 | |||
3f46a97b6b | |||
3989979ca3 | |||
50eda82e27 | |||
daa3481862 | |||
2d94000dd5 | |||
e3eb155d76 | |||
f95e23118c | |||
82778473fe | |||
dae3b9665b | |||
f19058df65 | |||
3c7bec7c61 | |||
c4388a6904 | |||
6d1e637518 | |||
bbd56d6eb0 | |||
0b317ef04b | |||
c0cfbe2ce0 | |||
3ed5d41df5 | |||
517ee6532f | |||
653f22f85b | |||
38906478d3 | |||
e81d83b8c2 | |||
b12d9f62b9 | |||
0849e5faad | |||
1a4419059d | |||
4ef1333abc | |||
2b11f64ef1 | |||
5b98f432f2 | |||
76779e6af4 | |||
738d537266 | |||
327d5990d6 | |||
16021d0df7 | |||
b37a74dfd9 | |||
d02f02731f | |||
4295c54c67 | |||
fb1e4fa02b | |||
62b8a63b80 | |||
ab3c832f5e | |||
d380e7ea01 | |||
e69fd5e682 | |||
285f19e132 | |||
4151f53e14 | |||
5895fb9faf | |||
19e22d74f3 | |||
26ed6a5548 | |||
05dd7dd811 | |||
8b694f3734 | |||
c9026a1b9c | |||
9b221a06c8 | |||
f88904e246 | |||
e3533dcb01 | |||
8edb5ffe20 | |||
90e6cd7b46 | |||
1fa73fb632 | |||
a615af233a | |||
4817e13823 | |||
ee43b86742 | |||
93bfc57dea | |||
a568a5c356 | |||
0b21977e48 | |||
2f7668aef5 | |||
72fa6eebba | |||
2f6a417168 | |||
faa63727ab | |||
a2b1a0a0a7 | |||
0d7bc09c49 | |||
f57ca747a9 | |||
624421e4b0 | |||
943c6bc59c | |||
9ce0b0e25b | |||
df3a13fc61 | |||
5a0a215bfc | |||
eaff7f307c | |||
8d63591ce8 | |||
0e3aa29689 | |||
a56b19ff65 | |||
62b7ec271f | |||
5515bef3d7 | |||
092f5de231 | |||
81fdd75aac | |||
f63b2f79e0 | |||
0501d55c8f | |||
fe6f51369e | |||
8f454c7e9c | |||
965feccfdc | |||
5e18f9bbda | |||
541fcbf015 | |||
16f5374474 | |||
b414745aa1 | |||
696d95bf1b | |||
1269ce064a | |||
9097d455db | |||
1615159014 | |||
e4e1e72c30 | |||
43329b7748 | |||
2280865936 | |||
fb2f3c8836 | |||
e2f21212b7 | |||
d7597d0992 | |||
c21876ebe3 | |||
76bea5b7a7 | |||
a03d82ff1a | |||
f555656c92 | |||
f289ebd1f3 | |||
41b3646012 | |||
8de5db4b48 | |||
d8be12dcdd | |||
71f9401e23 | |||
cdd63dec65 | |||
279fdfc47a | |||
feda42f18f | |||
d86f7fc25e | |||
e4fb675d5f | |||
25e786e6a5 | |||
fd01e98cb1 | |||
2a88cdb8df | |||
be8f842061 | |||
fcb81ae074 | |||
7d9c018b44 | |||
a6e12532f8 | |||
bd202f55ce | |||
f7b5a7bed8 | |||
6123d6f9bf | |||
6c8173d1aa | |||
d2f857d176 | |||
1e2afafbc4 | |||
927c5c5e36 | |||
b2ea96b4a7 | |||
6afb53dd7d | |||
d7477833d6 | |||
7624645626 | |||
53753c0127 | |||
95604ff66a | |||
99e0d1071a | |||
13aacbcc05 | |||
b137b25169 | |||
b44fb101c4 | |||
accc8ac254 | |||
435a526140 | |||
346580d955 | |||
81f343dbe8 | |||
fa443f2e5f | |||
a25a86e42f | |||
1ffa924483 | |||
463a16a68f | |||
d2adca8d68 | |||
057bf89894 | |||
c9037982d7 | |||
ce1264564f | |||
61ffab376d | |||
f3bcaae4e4 | |||
2201214717 | |||
eba4231cdc | |||
de0a810fcf | |||
644fc36c32 | |||
41144bffeb | |||
c84709dd9d | |||
f28651eaf7 | |||
9e40eb992e | |||
f445cb7895 | |||
dfc0ecdf69 | |||
6f11f812f8 | |||
4191a9bc3c | |||
f2fbdafe64 | |||
22a037b213 | |||
dbe3ab6c97 | |||
6bed610af3 | |||
4d9547066b | |||
54c18ae0c6 | |||
e49fb9f0d0 | |||
33da2af31e | |||
bcb3678055 | |||
28da2d245b | |||
e6864adfb6 | |||
8562319638 | |||
6be17cec37 | |||
f34e6e9c4c | |||
e8051838a3 | |||
f1f5497d8d | |||
1b44ba4ce0 | |||
a4d2dc856c | |||
7964d1c2bf | |||
5df5dd155f | |||
89cce21161 | |||
0bdef36e2a | |||
e549a07901 | |||
98603dad66 | |||
c37fbda7a8 | |||
34ea4d8f41 | |||
452a76105f | |||
4982676ca8 | |||
83d115acca | |||
86bd0f7c37 | |||
83fe00a0cf | |||
526abc6a9f | |||
63feda6efc | |||
c9b3dedbb0 | |||
dae8dfe1fc | |||
100def7609 | |||
8594012fa1 | |||
27400f66a9 | |||
bb1e590222 | |||
a4b414010d | |||
1d72a96654 | |||
9b85ed86a9 | |||
e36066a9a2 | |||
8082158a16 | |||
1a8567a6da | |||
b17cbe30e2 | |||
8aadc88dd5 | |||
f3d26fae64 | |||
828d6f5baf | |||
2003806481 | |||
362823c1e1 | |||
9c10e00234 | |||
a4cef2fbd8 | |||
e5fca99b52 | |||
f4c692eed2 | |||
2e0688db5f | |||
ac2b358f87 | |||
251a2c9fa4 | |||
0064d4b2c5 | |||
ebbac6b483 | |||
d5373a62f4 | |||
681b91a6a4 | |||
8c66352994 | |||
4e1ec1215a | |||
6981894060 | |||
57c92e877c | |||
e8c0b8504a | |||
93bbe8f2a3 | |||
c78bb1f572 | |||
7256102785 | |||
fc907c568d | |||
9e078ff4d7 | |||
5658e7f718 | |||
111eb55a9f | |||
0630ec5503 | |||
38cc493eb7 | |||
254507c3a3 | |||
7cdcc9099b | |||
fb046c43ea | |||
73ddf80fc1 | |||
a5a224ac6f | |||
c56dfda833 | |||
6081f9ff1b | |||
f3c7d71b3b | |||
5748bf9549 | |||
84a0a6a418 | |||
1ee9f183cc | |||
55e8523925 | |||
c9efe24959 | |||
69aff39205 | |||
f6e9e15253 | |||
b7f685ed62 | |||
6e03eff303 | |||
3e0b95e1e1 | |||
a32997ceba | |||
63674d85e8 | |||
56848ece7a | |||
449722f08c | |||
949b4562c7 | |||
75f68c8be1 | |||
1b117712cf | |||
11356af502 | |||
9dbdf7fc8d | |||
dec38273b6 | |||
5098f5f420 | |||
d32fd72d13 | |||
a4692d5457 | |||
24ea7aaede | |||
b7f10982c3 | |||
8f28d1ad43 | |||
d5db2f0eb7 | |||
fe69f3de04 | |||
5534394b06 | |||
24fc2f7e14 | |||
5b23c9a294 | |||
7338a02b48 | |||
8555e83cb1 | |||
39494d18bf | |||
aab42c3cff | |||
f5a23c3817 | |||
b3eb81c3b4 | |||
4f4c88aca9 | |||
c3ad3ebb57 | |||
f13734dda4 | |||
24e63e7a02 | |||
a163048396 | |||
55f8471aff | |||
04e9f38e0e | |||
90972e9ce0 | |||
6e8f60a27a | |||
014090c407 | |||
e40b251c06 | |||
414a194c9d | |||
7bffe3993d | |||
3828e6d15e | |||
85c582bc93 | |||
ea1c9dec12 | |||
6753121a6a | |||
f63d899c42 | |||
7219c9f806 | |||
df2f1d10fd | |||
3f71067b67 | |||
8dc68a01fd | |||
9e0ded958c | |||
68243aa95b | |||
507df1f507 | |||
1800c29b44 | |||
0343548f6e | |||
5cb5c9713e | |||
5e2c5c793f | |||
3022cb6955 | |||
4687665ff3 | |||
001f075a49 | |||
7d78e4a60a | |||
2786bfbeb8 | |||
d3049a8d62 | |||
831a2582ed | |||
0919019123 | |||
7dd9c99c91 | |||
326c24911a | |||
133fa1495b | |||
7c040ed99f | |||
f88a2de8a9 | |||
a24ec8b07a | |||
d6ad6f96e8 | |||
411764854b | |||
973f77012f | |||
1d80da2559 | |||
00d298935a | |||
4a9a478243 | |||
9040bbb75a | |||
abcc4c0a12 | |||
ceb3a19b81 | |||
2a2f7825cc | |||
a0e9f6a92d | |||
dbc73f5086 | |||
78a67b754e | |||
de4b02eaf1 | |||
751c82fd29 | |||
8c8a94fc71 | |||
1174958e8b | |||
6399de7a66 | |||
c0f2275b61 | |||
256a9ee45d | |||
c835ce780a | |||
d7b7d2de6e | |||
1dd0635e5e | |||
67506511c3 | |||
3fbb4cde36 | |||
9aaa80a213 | |||
acb6cbffa0 | |||
6a70ab74bc | |||
852c252302 | |||
3a670b55b6 | |||
d01435f4f2 | |||
f1638c9cd7 | |||
4943504898 | |||
7d7480c120 | |||
78182fea0a | |||
947e57b5b4 | |||
e0e4a594e9 | |||
4839fe37a3 | |||
9914b7ea38 | |||
f86ed97820 | |||
8d8b0807e2 | |||
e3c6237430 | |||
e964c7fa5c | |||
f1e84e145c | |||
2e2773fa6b | |||
a9c7a27d47 | |||
e41ecb19cf | |||
5b091b602f | |||
768681c4f2 | |||
2e4e5c1873 | |||
4a61806e60 | |||
883187f9ac | |||
2d9a5031e9 | |||
39c93f6512 | |||
a7905b373e | |||
a7c82ff9b9 | |||
5b4b4a4051 | |||
c348fec609 | |||
4af17356f3 | |||
384e5052bc | |||
a5adae1491 | |||
fe62b810db | |||
ee78496058 | |||
8afe4bae87 | |||
b04bb2b740 | |||
3d46f0d72f | |||
a65d973660 | |||
df83d90c06 | |||
a1d55f2529 | |||
aa097f3fd6 | |||
e0b72202fd | |||
e8769fff7d | |||
ed33652534 | |||
d1c1f972a6 | |||
6008275aae | |||
edf8bb3945 | |||
dd7d133263 | |||
b6f783674b | |||
eab70fae3b | |||
fed40ef104 | |||
6d087f5a38 | |||
0edcf33547 | |||
443417b0f4 | |||
369e994b0d | |||
55469327c6 | |||
27f326673c | |||
e6fd766fff | |||
7da3ccfacb | |||
624d6b3b0b | |||
9528083a66 | |||
55408f6ccb | |||
dce5a39b10 | |||
03a23876a7 | |||
775357dd94 | |||
d10cbb2823 | |||
63a2465bac | |||
d97ed52e91 | |||
e1dc12c14d | |||
7c755d4e2d | |||
55b786d9f0 | |||
131f4be4ea | |||
d819617d2b | |||
b9219a2b62 | |||
554e378dd6 | |||
cc11402bc9 | |||
40220f92c1 | |||
8c4d9021c2 | |||
efefb02d86 | |||
3ee281aaf9 | |||
097b6d5097 | |||
6cdaf05f98 | |||
3be0f58c30 | |||
f3489a3b01 | |||
173dbeb24a | |||
0607b86818 | |||
1282a65bcb | |||
45d3207dfe | |||
76b46f59e9 | |||
19fa108f61 | |||
2372d06591 | |||
7015375892 | |||
e9bf2b361f | |||
51b790b767 | |||
ac84431361 | |||
7dc8463da9 | |||
71ae579bc0 | |||
5036d25b60 | |||
ff6d169862 | |||
dde8898aae | |||
72cc23ef46 | |||
5390b4ed42 | |||
17c7d90d52 | |||
5c3b5be613 | |||
5ab7769745 | |||
05374d1145 | |||
311e10f91e | |||
2b94791387 | |||
fbcae11cd0 | |||
0d6eff2a9a | |||
6a9b7fdb6d | |||
e8f703648a | |||
710f82de0f | |||
bee35acfa6 | |||
90fdaf80e4 | |||
27feb1ddd7 | |||
2be7e0f7e6 | |||
186ab70bf9 | |||
0fa9006e45 | |||
60c83bae93 | |||
553c398c8e | |||
1c90bb383f | |||
4281225b02 | |||
14dc9c6c43 | |||
c9e10b1a3e | |||
915c850760 | |||
2c3f430203 | |||
1a152ed7fa | |||
5953480807 | |||
b5c1a195be | |||
310cc086c6 | |||
61d6cd3c18 | |||
cccabf5330 | |||
6f33460afd | |||
603d524aaf | |||
eb2cd001b6 | |||
b5b57790be | |||
286f4ef961 | |||
ad28d1906c | |||
dfdccac67d | |||
b8c2752237 | |||
834c7b0def | |||
5bfe70142e | |||
b35c5a22bb | |||
eecc825c90 | |||
3823c22dad | |||
551bd3dbfe | |||
7e9956963a | |||
80c24a1993 | |||
66091bae24 | |||
73d4f10f4b | |||
ee7ea77fc3 | |||
32e1cbe2a3 | |||
3539724843 | |||
940b25f158 | |||
37e601e5b5 | |||
0230374709 | |||
86db237e5d | |||
1542b1cebb | |||
990071af5c | |||
f543e00307 | |||
34b4f8265a | |||
a366d1af2a | |||
ebe5785a91 | |||
887d46725b | |||
a326b718f2 | |||
c14b298cb9 | |||
9cca8f3f55 | |||
f5cee56740 | |||
972d183d85 | |||
eebdf04357 | |||
9ede20a367 | |||
b0c3e22a52 | |||
a78db17784 | |||
dbb9998f69 | |||
2745dbd124 | |||
c0357daf01 | |||
064fa6027d | |||
779e02a05e | |||
e222d0356a | |||
d2ae333bb8 | |||
764c42a810 | |||
18b18f1c3d | |||
b54a8b40a4 | |||
edf724d20d | |||
622a190a61 | |||
5b4a78ba20 | |||
44b85f6e4b | |||
7f1f760645 | |||
54d8c96c30 | |||
c6ab7827e7 | |||
ae26079e2e | |||
3e993156f2 | |||
3b2fafd789 | |||
72ebaddcb8 | |||
5a9950cc19 | |||
cf29d7e400 | |||
244dba3614 | |||
21886517e1 | |||
3996236729 | |||
560cb3ac82 | |||
81c7e23ae9 | |||
0b8bd6d4fc | |||
7c271c8207 | |||
58947d91a6 | |||
20096be990 | |||
7c8508b651 | |||
b56d0fdd9b | |||
d0cc06f766 | |||
d8d2b215d1 | |||
c478d383b4 | |||
e01cd1c037 | |||
e63019c469 | |||
90a325a1b2 | |||
698594525f | |||
fd540148f3 | |||
078a024931 | |||
f8193b2419 | |||
808ba603c5 | |||
61d70fa688 | |||
493a33e754 | |||
bd75c3e559 | |||
cb9e72a879 | |||
9d2fd4982f | |||
eed9a0e376 | |||
d77dbb2cca | |||
7810946484 | |||
e2906e3be5 | |||
0a8b66ee95 | |||
8ff270c5f7 | |||
4012a86cac | |||
dd4fff3a79 | |||
0ed99b7687 | |||
2c389ae11e | |||
15ff8f9d2a | |||
bd4d3b914b | |||
59b02120b6 | |||
92655c30c1 | |||
484567f242 | |||
ef6e70a38b | |||
e695e30a9b | |||
65e67b64bd | |||
ddbe339541 | |||
b2c0e6a8c2 | |||
f9384ded27 | |||
4488f25ce0 | |||
52b22b5784 | |||
5a356140d6 | |||
e79de0108c | |||
985f31877c | |||
11a71b7fbb | |||
7f26c11c9d | |||
9b93fcd947 | |||
733ca5174b | |||
bd897d780b | |||
429065d2b9 | |||
b90734f1e2 | |||
db97a7d9d3 | |||
6ff67aeadf | |||
dd7d282d17 | |||
4e637ae58a | |||
1ec2e55322 | |||
556eb3f8c1 | |||
76b49ebc95 | |||
e82443a302 | |||
5de86a6416 | |||
3f3c8cabb8 | |||
cd59aa9afb | |||
34e12e575b | |||
4c8c261ab4 | |||
099bb3b9ff | |||
c623a6aacc | |||
ce7356794d | |||
523494f9cf | |||
0edc867d45 | |||
ce4c45a075 | |||
e29941e3eb | |||
86ce3595f6 | |||
6e958b8415 | |||
d485703768 | |||
109e2107d1 | |||
3469905365 | |||
75b3846f8f | |||
a9ec38208c | |||
c38b9a4144 | |||
b6128aeaa1 | |||
881782be05 | |||
0c05930501 | |||
b96f2a19b5 | |||
c1906714ee | |||
32181d1bd2 | |||
7dfb413d87 | |||
7934a96ad1 | |||
abddbf9c7d | |||
77e66241f7 | |||
4b3f2e19a4 | |||
b29c6485a8 | |||
f4dba7a68c | |||
2817408db3 | |||
9ff3c758eb | |||
3dcc189740 | |||
4a12d1954e | |||
e4d645110a | |||
01a32067d5 | |||
fc5ce4739c | |||
ae7b8f9ecf | |||
0f5d2d6821 | |||
48eca22a00 | |||
5e164493a8 | |||
ead99208f2 | |||
4f5ad05792 | |||
bc52e72605 | |||
038674835a | |||
00f21c17ca | |||
818a1508a0 | |||
2d9480a6a7 | |||
0bec4e25c8 | |||
950a540df4 | |||
2e66c5f807 | |||
7033c2616b |
5
.github/ISSUE_TEMPLATE.md
vendored
5
.github/ISSUE_TEMPLATE.md
vendored
@ -6,9 +6,8 @@ What kind of issue is this?
|
||||
use [Community Forums](https://community.platformio.org) or [Premium Support](https://platformio.org/support)
|
||||
|
||||
- [ ] **PlatformIO IDE**.
|
||||
All issues related to PlatformIO IDE should be reported to appropriate repository:
|
||||
[PlatformIO IDE for Atom](https://github.com/platformio/platformio-atom-ide/issues) or
|
||||
[PlatformIO IDE for VSCode](https://github.com/platformio/platformio-vscode-ide/issues)
|
||||
All issues related to PlatformIO IDE should be reported to the
|
||||
[PlatformIO IDE for VSCode](https://github.com/platformio/platformio-vscode-ide/issues) repository
|
||||
|
||||
- [ ] **Development Platform or Board**.
|
||||
All issues (building, uploading, adding new boards, etc.) related to PlatformIO development platforms
|
||||
|
24
.github/workflows/core.yml
vendored
24
.github/workflows/core.yml
vendored
@ -7,30 +7,37 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
python-version: [2.7, 3.7, 3.8]
|
||||
os: [ubuntu-20.04, windows-latest, macos-latest]
|
||||
python-version: ["3.6", "3.7", "3.11", "3.12"]
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v1
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install tox
|
||||
|
||||
- name: Core System Info
|
||||
run: |
|
||||
tox -e py
|
||||
|
||||
- name: Python Lint
|
||||
if: ${{ matrix.python-version != '3.6' }}
|
||||
run: |
|
||||
tox -e lint
|
||||
|
||||
- name: Integration Tests
|
||||
env:
|
||||
TEST_EMAIL_LOGIN: ${{ secrets.TEST_EMAIL_LOGIN }}
|
||||
TEST_EMAIL_PASSWORD: ${{ secrets.TEST_EMAIL_PASSWORD }}
|
||||
TEST_EMAIL_IMAP_SERVER: ${{ secrets.TEST_EMAIL_IMAP_SERVER }}
|
||||
if: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
tox -e testcore
|
||||
|
||||
@ -42,3 +49,4 @@ jobs:
|
||||
job_name: '*Core*'
|
||||
commit: true
|
||||
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}
|
||||
token: ${{ secrets.SLACK_GITHUB_TOKEN }}
|
||||
|
46
.github/workflows/deployment.yml
vendored
Normal file
46
.github/workflows/deployment.yml
vendored
Normal file
@ -0,0 +1,46 @@
|
||||
name: Deployment
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "release/**"
|
||||
|
||||
jobs:
|
||||
deployment:
|
||||
runs-on: ubuntu-latest
|
||||
environment: production
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.11"
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install tox wheel
|
||||
|
||||
- name: Deployment Tests
|
||||
env:
|
||||
TEST_EMAIL_LOGIN: ${{ secrets.TEST_EMAIL_LOGIN }}
|
||||
TEST_EMAIL_PASSWORD: ${{ secrets.TEST_EMAIL_PASSWORD }}
|
||||
TEST_EMAIL_IMAP_SERVER: ${{ secrets.TEST_EMAIL_IMAP_SERVER }}
|
||||
run: |
|
||||
tox -e testcore
|
||||
|
||||
- name: Build Python source tarball
|
||||
# run: python setup.py sdist bdist_wheel
|
||||
run: python setup.py sdist
|
||||
|
||||
- name: Publish package to PyPI
|
||||
if: ${{ github.ref == 'refs/heads/master' }}
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
user: __token__
|
||||
password: ${{ secrets.PYPI_API_TOKEN }}
|
85
.github/workflows/docs.yml
vendored
85
.github/workflows/docs.yml
vendored
@ -4,15 +4,16 @@ on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build Docs
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v1
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.7
|
||||
python-version: "3.11"
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
@ -29,4 +30,80 @@ jobs:
|
||||
type: ${{ job.status }}
|
||||
job_name: '*Docs*'
|
||||
commit: true
|
||||
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}
|
||||
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}
|
||||
token: ${{ secrets.SLACK_GITHUB_TOKEN }}
|
||||
|
||||
- name: Preserve Docs
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
run: |
|
||||
tar -czvf docs.tar.gz -C docs/_build html rtdpage
|
||||
|
||||
- name: Save artifact
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: docs
|
||||
path: ./docs.tar.gz
|
||||
|
||||
deploy:
|
||||
name: Deploy Docs
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.event_name == 'push' && (github.ref == 'refs/heads/develop' || github.ref == 'refs/heads/master') }}
|
||||
env:
|
||||
DOCS_REPO: platformio/platformio-docs
|
||||
DOCS_DIR: platformio-docs
|
||||
LATEST_DOCS_DIR: latest-docs
|
||||
RELEASE_BUILD: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
steps:
|
||||
- name: Download artifact
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: docs
|
||||
- name: Unpack artifact
|
||||
run: |
|
||||
mkdir ./${{ env.LATEST_DOCS_DIR }}
|
||||
tar -xzf ./docs.tar.gz -C ./${{ env.LATEST_DOCS_DIR }}
|
||||
- name: Delete Artifact
|
||||
uses: geekyeggo/delete-artifact@v2
|
||||
with:
|
||||
name: docs
|
||||
- name: Select Docs type
|
||||
id: get-destination-dir
|
||||
run: |
|
||||
if [[ ${{ env.RELEASE_BUILD }} == true ]]; then
|
||||
echo "::set-output name=dst_dir::stable"
|
||||
else
|
||||
echo "::set-output name=dst_dir::latest"
|
||||
fi
|
||||
- name: Checkout latest Docs
|
||||
continue-on-error: true
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: ${{ env.DOCS_REPO }}
|
||||
path: ${{ env.DOCS_DIR }}
|
||||
ref: gh-pages
|
||||
- name: Synchronize Docs
|
||||
run: |
|
||||
rm -rf ${{ env.DOCS_DIR }}/.git
|
||||
rm -rf ${{ env.DOCS_DIR }}/en/${{ steps.get-destination-dir.outputs.dst_dir }}
|
||||
mkdir -p ${{ env.DOCS_DIR }}/en/${{ steps.get-destination-dir.outputs.dst_dir }}
|
||||
cp -rf ${{ env.LATEST_DOCS_DIR }}/html/* ${{ env.DOCS_DIR }}/en/${{ steps.get-destination-dir.outputs.dst_dir }}
|
||||
if [[ ${{ env.RELEASE_BUILD }} == false ]]; then
|
||||
rm -rf ${{ env.DOCS_DIR }}/page
|
||||
mkdir -p ${{ env.DOCS_DIR }}/page
|
||||
cp -rf ${{ env.LATEST_DOCS_DIR }}/rtdpage/* ${{ env.DOCS_DIR }}/page
|
||||
fi
|
||||
- name: Validate Docs
|
||||
run: |
|
||||
if [ -z "$(ls -A ${{ env.DOCS_DIR }})" ]; then
|
||||
echo "Docs folder is empty. Aborting!"
|
||||
exit 1
|
||||
fi
|
||||
- name: Deploy to Github Pages
|
||||
uses: peaceiris/actions-gh-pages@v3
|
||||
with:
|
||||
personal_token: ${{ secrets.DEPLOY_GH_DOCS_TOKEN }}
|
||||
external_repository: ${{ env.DOCS_REPO }}
|
||||
publish_dir: ./${{ env.DOCS_DIR }}
|
||||
commit_message: Sync Docs
|
||||
|
27
.github/workflows/examples.yml
vendored
27
.github/workflows/examples.yml
vendored
@ -2,22 +2,28 @@ name: Examples
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-16.04, windows-latest, macos-latest]
|
||||
python-version: [2.7, 3.7]
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
env:
|
||||
PIO_INSTALL_DEVPLATFORM_OWNERNAMES: "platformio"
|
||||
PIO_INSTALL_DEVPLATFORM_NAMES: "aceinna_imu,atmelavr,atmelmegaavr,atmelsam,espressif32,espressif8266,nordicnrf52,raspberrypi,ststm32,teensy"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v1
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
python-version: "3.11"
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
@ -25,22 +31,15 @@ jobs:
|
||||
|
||||
- name: Run on Linux
|
||||
if: startsWith(matrix.os, 'ubuntu')
|
||||
env:
|
||||
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,siwigsm,intel_mcs51,aceinna_imu"
|
||||
run: |
|
||||
# ChipKIT issue: install 32-bit support for GCC PIC32
|
||||
sudo apt-get install libc6-i386
|
||||
# Free space
|
||||
sudo apt clean
|
||||
docker rmi $(docker image ls -aq)
|
||||
df -h
|
||||
# Run
|
||||
tox -e testexamples
|
||||
|
||||
- name: Run on macOS
|
||||
if: startsWith(matrix.os, 'macos')
|
||||
env:
|
||||
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,siwigsm,microchippic32,gd32v,nuclei,lattice_ice40"
|
||||
run: |
|
||||
df -h
|
||||
tox -e testexamples
|
||||
@ -50,7 +49,6 @@ jobs:
|
||||
env:
|
||||
PLATFORMIO_CORE_DIR: C:/pio
|
||||
PLATFORMIO_WORKSPACE_DIR: C:/pio-workspace/$PROJECT_HASH
|
||||
PIO_INSTALL_DEVPLATFORMS_IGNORE: "ststm8,infineonxmc,siwigsm,riscv_gap"
|
||||
run: |
|
||||
tox -e testexamples
|
||||
|
||||
@ -62,3 +60,4 @@ jobs:
|
||||
job_name: '*Examples*'
|
||||
commit: true
|
||||
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}
|
||||
token: ${{ secrets.SLACK_GITHUB_TOKEN }}
|
||||
|
69
.github/workflows/projects.yml
vendored
Normal file
69
.github/workflows/projects.yml
vendored
Normal file
@ -0,0 +1,69 @@
|
||||
name: Projects
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
project:
|
||||
- marlin:
|
||||
repository: "MarlinFirmware/Marlin"
|
||||
folder: "Marlin"
|
||||
config_dir: "Marlin"
|
||||
env_name: "mega2560"
|
||||
- esphome:
|
||||
repository: "esphome/esphome"
|
||||
folder: "esphome"
|
||||
config_dir: "esphome"
|
||||
env_name: "esp32-arduino"
|
||||
- smartknob:
|
||||
repository: "scottbez1/smartknob"
|
||||
folder: "smartknob"
|
||||
config_dir: "smartknob"
|
||||
env_name: "view"
|
||||
- espurna:
|
||||
repository: "xoseperez/espurna"
|
||||
folder: "espurna"
|
||||
config_dir: "espurna/code"
|
||||
env_name: "nodemcu-lolin"
|
||||
- OpenMQTTGateway:
|
||||
repository: "1technophile/OpenMQTTGateway"
|
||||
folder: "OpenMQTTGateway"
|
||||
config_dir: "OpenMQTTGateway"
|
||||
env_name: "esp32-m5atom-lite"
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
exclude:
|
||||
- os: windows-latest
|
||||
project: {"esphome": "", "repository": "esphome/esphome", "folder": "esphome", "config_dir": "esphome", "env_name": "esp32-arduino"}
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.11
|
||||
|
||||
- name: Install PlatformIO
|
||||
run: pip install -U .
|
||||
|
||||
- name: Check out ${{ matrix.project.repository }}
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
repository: ${{ matrix.project.repository }}
|
||||
path: ${{ matrix.project.folder }}
|
||||
|
||||
- name: Install ESPHome dependencies
|
||||
# Requires esptool package as it's used in a custom prescript
|
||||
if: ${{ contains(matrix.project.repository, 'esphome') }}
|
||||
run: pip install esptool==3.*
|
||||
|
||||
- name: Compile ${{ matrix.project.repository }}
|
||||
run: pio run -d ${{ matrix.project.config_dir }} -e ${{ matrix.project.env_name }}
|
||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@ -1,6 +1,6 @@
|
||||
*.egg-info
|
||||
*.pyc
|
||||
.pioenvs
|
||||
__pycache__
|
||||
.tox
|
||||
docs/_build
|
||||
dist
|
||||
|
@ -1,3 +0,0 @@
|
||||
[settings]
|
||||
line_length=88
|
||||
known_third_party=OpenSSL, SCons, autobahn, jsonrpc, twisted, zope
|
18
.pylintrc
18
.pylintrc
@ -3,20 +3,10 @@ output-format=colorized
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
disable=
|
||||
bad-continuation,
|
||||
bad-whitespace,
|
||||
missing-docstring,
|
||||
ungrouped-imports,
|
||||
invalid-name,
|
||||
cyclic-import,
|
||||
duplicate-code,
|
||||
superfluous-parens,
|
||||
invalid-name,
|
||||
too-few-public-methods,
|
||||
useless-object-inheritance,
|
||||
useless-import-alias,
|
||||
fixme,
|
||||
bad-option-value,
|
||||
|
||||
; PY2 Compat
|
||||
super-with-arguments,
|
||||
raise-missing-from
|
||||
consider-using-f-string,
|
||||
cyclic-import,
|
||||
use-dict-literal
|
||||
|
@ -1,12 +0,0 @@
|
||||
# See https://docs.readthedocs.io/en/stable/config-file/index.html
|
||||
|
||||
version: 2
|
||||
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
|
||||
formats:
|
||||
- pdf
|
||||
|
||||
submodules:
|
||||
include: all
|
3
CODE_OF_CONDUCT.md
Normal file
3
CODE_OF_CONDUCT.md
Normal file
@ -0,0 +1,3 @@
|
||||
# Code of Conduct
|
||||
|
||||
See https://piolabs.com/legal/code-of-conduct.html
|
@ -3,19 +3,20 @@ Contributing
|
||||
|
||||
To get started, <a href="https://cla-assistant.io/platformio/platformio-core">sign the Contributor License Agreement</a>.
|
||||
|
||||
1. Fork the repository on GitHub.
|
||||
1. Fork the repository on GitHub
|
||||
2. Clone repository `git clone --recursive https://github.com/YourGithubUsername/platformio-core.git`
|
||||
3. Run `pip install tox`
|
||||
4. Go to the root of project where is located `tox.ini` and run `tox -e py37`
|
||||
4. Go to the root of the PlatformIO Core project where `tox.ini` is located (``cd platformio-core``) and run `tox -e py39`.
|
||||
You can replace `py39` with your own Python version. For example, `py311` means Python 3.11.
|
||||
5. Activate current development environment:
|
||||
|
||||
* Windows: `.tox\py37\Scripts\activate`
|
||||
* Bash/ZSH: `source .tox/py37/bin/activate`
|
||||
* Fish: `source .tox/py37/bin/activate.fish`
|
||||
* Windows: `.tox\py39\Scripts\activate`
|
||||
* Bash/ZSH: `source .tox/py39/bin/activate`
|
||||
* Fish: `source .tox/py39/bin/activate.fish`
|
||||
|
||||
6. Make changes to code, documentation, etc.
|
||||
7. Lint source code `make before-commit`
|
||||
8. Run the tests `make test`
|
||||
9. Build documentation `tox -e docs` (creates a directory _build under docs where you can find the html)
|
||||
10. Commit changes to your forked repository
|
||||
11. Submit a Pull Request on GitHub.
|
||||
11. Submit a Pull Request on GitHub
|
||||
|
456
HISTORY.rst
456
HISTORY.rst
@ -1,148 +1,368 @@
|
||||
Release Notes
|
||||
=============
|
||||
|
||||
.. |PIOCONF| replace:: `"platformio.ini" <https://docs.platformio.org/en/latest/projectconf.html>`__ configuration file
|
||||
.. |LIBRARYJSON| replace:: `library.json <https://docs.platformio.org/en/latest/manifests/library-json/index.html>`__
|
||||
.. |LDF| replace:: `LDF <https://docs.platformio.org/en/latest/librarymanager/ldf.html>`__
|
||||
.. |INTERPOLATION| replace:: `Interpolation of Values <https://docs.platformio.org/en/latest/projectconf/interpolation.html>`__
|
||||
.. |UNITTESTING| replace:: `Unit Testing <https://docs.platformio.org/en/latest/advanced/unit-testing/index.html>`__
|
||||
.. |DEBUGGING| replace:: `Debugging <https://docs.platformio.org/en/latest/plus/debugging.html>`__
|
||||
.. |STATICCODEANALYSIS| replace:: `Static Code Analysis <https://docs.platformio.org/en/latest/advanced/static-code-analysis/index.html>`__
|
||||
|
||||
.. _release_notes_6:
|
||||
|
||||
PlatformIO Core 6
|
||||
-----------------
|
||||
|
||||
Unlock the true potential of embedded software development with
|
||||
PlatformIO's collaborative ecosystem, embracing declarative principles,
|
||||
test-driven methodologies, and modern toolchains for unrivaled success.
|
||||
|
||||
6.1.15 (2024-??-??)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Resolved an issue where the |LDF| couldn't locate a library dependency declared via version control system repository (`issue #4885 <https://github.com/platformio/platformio-core/issues/4885>`_)
|
||||
|
||||
6.1.14 (2024-03-21)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Introduced the ``--json-output`` option to the `pio test <https://docs.platformio.org/en/latest/core/userguide/cmd_test.html>`__ command, enabling users to generate test results in the JSON format
|
||||
* Upgraded the build engine to the latest version of SCons (4.7.0) to improve build performance, reliability, and compatibility with other tools and systems (`release notes <https://github.com/SCons/scons/releases/tag/4.7.0>`__)
|
||||
* Broadened version support for the ``pyelftools`` dependency, enabling compatibility with lower versions and facilitating integration with a wider range of third-party tools (`issue #4834 <https://github.com/platformio/platformio-core/issues/4834>`_)
|
||||
* Addressed an issue where passing a relative path (``--project-dir``) to the `pio project init <https://docs.platformio.org/en/latest/core/userguide/project/cmd_init.html>`__ command resulted in an error (`issue #4847 <https://github.com/platformio/platformio-core/issues/4847>`_)
|
||||
* Enhanced |STATICCODEANALYSIS| to accommodate scenarios where custom ``src_dir`` or ``include_dir`` are located outside the project folder (`pull #4874 <https://github.com/platformio/platformio-core/pull/4874>`_)
|
||||
* Corrected the validation of ``symlink://`` `package specifications <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_install.html#local-folder>`__ , resolving an issue that caused the package manager to repeatedly reinstall dependencies (`pull #4870 <https://github.com/platformio/platformio-core/pull/4870>`_)
|
||||
* Resolved an issue related to the relative package path in the `pio pkg publish <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_publish.html>`__ command
|
||||
* Resolved an issue where the |LDF| selected an incorrect library version (`issue #4860 <https://github.com/platformio/platformio-core/issues/4860>`_)
|
||||
* Resolved an issue with the ``hexlify`` filter in the `device monitor <https://docs.platformio.org/en/latest/core/userguide/device/cmd_monitor.html>`__ command, ensuring proper representation of characters with Unicode code points higher than 127 (`issue #4732 <https://github.com/platformio/platformio-core/issues/4732>`_)
|
||||
|
||||
6.1.13 (2024-01-12)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Expanded support for SCons variables declared in the legacy format ``${SCONS_VARNAME}`` (`issue #4828 <https://github.com/platformio/platformio-core/issues/4828>`_)
|
||||
|
||||
6.1.12 (2024-01-10)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Added support for Python 3.12
|
||||
* Introduced the capability to launch the debug server in a separate process (`issue #4722 <https://github.com/platformio/platformio-core/issues/4722>`_)
|
||||
* Introduced a warning during the verification of MCU maximum RAM usage, signaling when the allocated RAM surpasses 100% (`issue #4791 <https://github.com/platformio/platformio-core/issues/4791>`_)
|
||||
* Drastically enhanced the speed of project building when operating in verbose mode (`issue #4783 <https://github.com/platformio/platformio-core/issues/4783>`_)
|
||||
* Upgraded the build engine to the latest version of SCons (4.6.0) to improve build performance, reliability, and compatibility with other tools and systems (`release notes <https://github.com/SCons/scons/releases/tag/4.6.0>`__)
|
||||
* Enhanced the handling of built-in variables in |PIOCONF| during |INTERPOLATION| (`issue #4695 <https://github.com/platformio/platformio-core/issues/4695>`_)
|
||||
* Enhanced PIP dependency declarations for improved reliability and extended support to include Python 3.6 (`issue #4819 <https://github.com/platformio/platformio-core/issues/4819>`_)
|
||||
* Implemented automatic installation of missing dependencies when utilizing a SOCKS proxy (`issue #4822 <https://github.com/platformio/platformio-core/issues/4822>`_)
|
||||
* Implemented a fail-safe mechanism to terminate a debugging session if an unknown CLI option is passed (`issue #4699 <https://github.com/platformio/platformio-core/issues/4699>`_)
|
||||
* Rectified an issue where ``${platformio.name}`` erroneously represented ``None`` as the default `project name <https://docs.platformio.org/en/latest/projectconf/sections/platformio/options/generic/name.html>`__ (`issue #4717 <https://github.com/platformio/platformio-core/issues/4717>`_)
|
||||
* Resolved an issue where the ``COMPILATIONDB_INCLUDE_TOOLCHAIN`` setting was not correctly applying to private libraries (`issue #4762 <https://github.com/platformio/platformio-core/issues/4762>`_)
|
||||
* Resolved an issue where ``get_systype()`` inaccurately returned the architecture when executed within a Docker container on a 64-bit kernel with a 32-bit userspace (`issue #4777 <https://github.com/platformio/platformio-core/issues/4777>`_)
|
||||
* Resolved an issue with incorrect handling of the ``check_src_filters`` option when used in multiple environments (`issue #4788 <https://github.com/platformio/platformio-core/issues/4788>`_)
|
||||
* Resolved an issue where running `pio project metadata <https://docs.platformio.org/en/latest/core/userguide/project/cmd_metadata.html>`__ resulted in duplicated "include" entries (`issue #4723 <https://github.com/platformio/platformio-core/issues/4723>`_)
|
||||
* Resolved an issue where native debugging failed on the host machine (`issue #4745 <https://github.com/platformio/platformio-core/issues/4745>`_)
|
||||
* Resolved an issue where custom debug configurations were being inadvertently overwritten in VSCode's ``launch.json`` (`issue #4810 <https://github.com/platformio/platformio-core/issues/4810>`_)
|
||||
|
||||
6.1.11 (2023-08-31)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Resolved a possible issue that may cause generated projects for `PlatformIO IDE for VSCode <https://docs.platformio.org/en/latest/integration/ide/vscode.html>`__ to fail to launch a debug session because of a missing "objdump" binary when GDB is not part of the toolchain package
|
||||
* Resolved a regression issue that resulted in the malfunction of the Memory Inspection feature within `PIO Home <https://docs.platformio.org/en/latest/home/index.html>`__
|
||||
|
||||
6.1.10 (2023-08-11)
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Resolved an issue that caused generated projects for `PlatformIO IDE for VSCode <https://docs.platformio.org/en/latest/integration/ide/vscode.html>`__ to break when the ``-iprefix`` compiler flag was used
|
||||
* Resolved an issue encountered while utilizing the `pio pkg exec <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_exec.html>`__ command on the Windows platform to execute Python scripts from a package
|
||||
* Implemented a crucial improvement to the `pio run <https://docs.platformio.org/en/latest/core/userguide/cmd_run.html>`__ command, guaranteeing that the ``monitor`` target is not executed if any of the preceding targets, such as ``upload``, encounter failures
|
||||
* `Cppcheck <https://docs.platformio.org/en/latest/plus/check-tools/cppcheck.html>`__ v2.11 with new checks, CLI commands and various analysis improvements
|
||||
* Resolved a critical issue that arose on macOS ARM platforms due to the Python "requests" module, leading to a "ModuleNotFoundError: No module named 'chardet'" (`issue #4702 <https://github.com/platformio/platformio-core/issues/4702>`_)
|
||||
|
||||
6.1.9 (2023-07-06)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Rectified a regression bug that occurred when the ``-include`` flag was passed via the `build_flags <https://docs.platformio.org/en/latest/projectconf/sections/env/options/build/build_flags.html>`__ option as a relative path and subsequently expanded (`issue #4683 <https://github.com/platformio/platformio-core/issues/4683>`_)
|
||||
* Resolved an issue that resulted in unresolved absolute toolchain paths when generating the `Compilation database "compile_commands.json" <https://docs.platformio.org/en/latest/integration/compile_commands.html>`__ (`issue #4684 <https://github.com/platformio/platformio-core/issues/4684>`_)
|
||||
|
||||
6.1.8 (2023-07-05)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Added a new ``--lint`` option to the `pio project config <https://docs.platformio.org/en/latest/core/userguide/project/cmd_config.html>`__ command, enabling users to efficiently perform linting on the |PIOCONF|
|
||||
* Enhanced the parsing of the |PIOCONF| to provide comprehensive diagnostic information
|
||||
* Expanded the functionality of the |LIBRARYJSON| manifest by allowing the use of the underscore symbol in the `keywords <https://docs.platformio.org/en/latest/manifests/library-json/fields/keywords.html>`__ field
|
||||
* Optimized project integration templates to address the issue of long paths on Windows (`issue #4652 <https://github.com/platformio/platformio-core/issues/4652>`_)
|
||||
* Refactored |UNITTESTING| engine to resolve compiler warnings with "-Wpedantic" option (`pull #4671 <https://github.com/platformio/platformio-core/pull/4671>`_)
|
||||
* Eliminated erroneous warning regarding the use of obsolete PlatformIO Core when downgrading to the stable version (`issue #4664 <https://github.com/platformio/platformio-core/issues/4664>`_)
|
||||
* Updated the `pio project metadata <https://docs.platformio.org/en/latest/core/userguide/project/cmd_metadata.html>`__ command to return C/C++ flags as parsed Unix shell arguments when dumping project build metadata
|
||||
* Resolved a critical issue related to the usage of the ``-include`` flag within the `build_flags <https://docs.platformio.org/en/latest/projectconf/sections/env/options/build/build_flags.html>`__ option, specifically when employing dynamic variables (`issue #4682 <https://github.com/platformio/platformio-core/issues/4682>`_)
|
||||
* Removed PlatformIO IDE for Atom from the documentation as `Atom has been deprecated <https://github.blog/2022-06-08-sunsetting-atom/>`__
|
||||
|
||||
6.1.7 (2023-05-08)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Introduced a new ``--sample-code`` option to the `pio project init <https://docs.platformio.org/en/latest/core/userguide/project/cmd_init.html>`__ command, which allows users to include sample code in the newly created project
|
||||
* Added validation for `project working environment names <https://docs.platformio.org/en/latest/projectconf/sections/env/index.html#working-env-name>`__ to ensure that they only contain lowercase letters ``a-z``, numbers ``0-9``, and special characters ``_`` (underscore) and ``-`` (hyphen)
|
||||
* Added the ability to show a detailed library dependency tree only in `verbose mode <https://docs.platformio.org/en/latest/core/userguide/cmd_run.html#cmdoption-pio-run-v>`__, which can help you understand the relationship between libraries and troubleshoot issues more effectively (`issue #4517 <https://github.com/platformio/platformio-core/issues/4517>`_)
|
||||
* Added the ability to run only the `device monitor <https://docs.platformio.org/en/latest/core/userguide/device/cmd_monitor.html>`__ when using the `pio run -t monitor <https://docs.platformio.org/en/latest/core/userguide/cmd_run.html>`__ command, saving you time and resources by skipping the build process
|
||||
* Implemented a new feature to store device monitor logs in the project's ``logs`` folder, making it easier to access and review device monitor logs for your projects (`issue #4596 <https://github.com/platformio/platformio-core/issues/4596>`_)
|
||||
* Improved support for projects located on Windows network drives, including Network Shared Folder, Dropbox, OneDrive, Google Drive, and other similar services (`issue #3417 <https://github.com/platformio/platformio-core/issues/3417>`_)
|
||||
* Improved source file filtering functionality for the `Static Code Analysis <https://docs.platformio.org/en/latest/advanced/static-code-analysis/index.html>`__ feature, making it easier to analyze only the code you need to
|
||||
* Upgraded the build engine to the latest version of SCons (4.5.2) to improve build performance, reliability, and compatibility with other tools and systems (`release notes <https://github.com/SCons/scons/releases/tag/4.5.2>`__)
|
||||
* Implemented a fix for shell injection vulnerabilities when converting INO files to CPP, ensuring your code is safe and secure (`issue #4532 <https://github.com/platformio/platformio-core/issues/4532>`_)
|
||||
* Restored the project generator for the `NetBeans IDE <https://docs.platformio.org/en/latest/integration/ide/netbeans.html>`__, providing you with more flexibility and options for your development workflow
|
||||
* Resolved installation issues with PIO Remote on Raspberry Pi and other small form-factor PCs (`issue #4425 <https://github.com/platformio/platformio-core/issues/4425>`_, `issue #4493 <https://github.com/platformio/platformio-core/issues/4493>`_, `issue #4607 <https://github.com/platformio/platformio-core/issues/4607>`_)
|
||||
* Resolved an issue where the `build_cache_dir <https://docs.platformio.org/en/latest/projectconf/sections/platformio/options/directory/build_cache_dir.html>`__ setting was not being recognized consistently across multiple environments (`issue #4574 <https://github.com/platformio/platformio-core/issues/4574>`_)
|
||||
* Resolved an issue where organization details could not be updated using the `pio org update <https://docs.platformio.org/en/latest/core/userguide/org/cmd_update.html>`__ command
|
||||
* Resolved an issue where the incorrect debugging environment was generated for VSCode in "Auto" mode (`issue #4597 <https://github.com/platformio/platformio-core/issues/4597>`_)
|
||||
* Resolved an issue where native tests would fail if a custom program name was specified (`issue #4546 <https://github.com/platformio/platformio-core/issues/4546>`_)
|
||||
* Resolved an issue where the PlatformIO |DEBUGGING| solution was not escaping the tool installation process into MI2 correctly (`issue #4565 <https://github.com/platformio/platformio-core/issues/4565>`_)
|
||||
* Resolved an issue where multiple targets were not executed sequentially (`issue #4604 <https://github.com/platformio/platformio-core/issues/4604>`_)
|
||||
* Resolved an issue where upgrading PlatformIO Core fails on Windows with Python 3.11 (`issue #4540 <https://github.com/platformio/platformio-core/issues/4540>`_)
|
||||
|
||||
6.1.6 (2023-01-23)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Added support for Python 3.11
|
||||
* Added a new `name <https://docs.platformio.org/en/latest/projectconf/sections/platformio/options/generic/description.html>`__ configuration option to customize a project name (`pull #4498 <https://github.com/platformio/platformio-core/pull/4498>`_)
|
||||
* Made assets (templates, ``99-platformio-udev.rules``) part of Python's module (`issue #4458 <https://github.com/platformio/platformio-core/issues/4458>`_)
|
||||
* Updated `Clang-Tidy <https://docs.platformio.org/en/latest/plus/check-tools/clang-tidy.html>`__ check tool to v15.0.5 with new diagnostics and bugfixes
|
||||
* Removed dependency on the "zeroconf" package and install it only when a user lists mDNS devices (issue with zeroconf's LGPL license)
|
||||
* Show the real error message instead of "Can not remove temporary directory" when |PIOCONF| is broken (`issue #4480 <https://github.com/platformio/platformio-core/issues/4480>`_)
|
||||
* Fixed an issue with an incorrect test summary when a testcase name includes a colon (`issue #4508 <https://github.com/platformio/platformio-core/issues/4508>`_)
|
||||
* Fixed an issue when `extends <https://docs.platformio.org/en/latest/projectconf/sections/env/options/advanced/extends.html>`__ did not override options in the right order (`issue #4462 <https://github.com/platformio/platformio-core/issues/4462>`_)
|
||||
* Fixed an issue when `pio pkg list <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_list.html>`__ and `pio pkg uninstall <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_uninstall.html>`__ commands fail if there are circular dependencies in the |LIBRARYJSON| manifests (`issue #4475 <https://github.com/platformio/platformio-core/issues/4475>`_)
|
||||
|
||||
6.1.5 (2022-11-01)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Added a new `enable_proxy_strict_ssl <https://docs.platformio.org/en/latest/core/userguide/cmd_settings.html>`__ setting to disable the proxy server certificate verification (`issue #4432 <https://github.com/platformio/platformio-core/issues/4432>`_)
|
||||
* Documented `PlatformIO Core Proxy Configuration <https://docs.platformio.org/en/latest/core/installation/proxy-configuration.html>`__
|
||||
* Speeded up device port finder by avoiding loading board HWIDs from development platforms
|
||||
* Improved caching of build metadata in debug mode
|
||||
* Fixed an issue when `pio pkg install --storage-dir <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_install.html>`__ command requires PlatformIO project (`issue #4410 <https://github.com/platformio/platformio-core/issues/4410>`_)
|
||||
|
||||
6.1.4 (2022-08-12)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Added support for accepting the original FileNode environment in a "callback" function when using `Build Middlewares <https://docs.platformio.org/en/latest/scripting/middlewares.html>`__ (`pull #4380 <https://github.com/platformio/platformio-core/pull/4380>`_)
|
||||
* Improved device port finder when using dual channel UART converter (`issue #4367 <https://github.com/platformio/platformio-core/issues/4367>`_)
|
||||
* Improved project dependency resolving when using the `pio project init --ide <https://docs.platformio.org/en/latest/core/userguide/project/cmd_init.html>`__ command
|
||||
* Upgraded build engine to the SCons 4.4.0 (`release notes <https://github.com/SCons/scons/releases/tag/4.4.0>`__)
|
||||
* Keep custom "unwantedRecommendations" when generating projects for VSCode (`issue #4383 <https://github.com/platformio/platformio-core/issues/4383>`_)
|
||||
* Do not resolve project dependencies for the ``cleanall`` target (`issue #4344 <https://github.com/platformio/platformio-core/issues/4344>`_)
|
||||
* Warn about calling "env.BuildSources" in a POST-type script (`issue #4385 <https://github.com/platformio/platformio-core/issues/4385>`_)
|
||||
* Fixed an issue when escaping macros/defines for IDE integration (`issue #4360 <https://github.com/platformio/platformio-core/issues/4360>`_)
|
||||
* Fixed an issue when the "cleanall" target removes dependencies from all working environments (`issue #4386 <https://github.com/platformio/platformio-core/issues/4386>`_)
|
||||
|
||||
6.1.3 (2022-07-18)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Fixed a regression bug when opening device monitor without any filters (`issue #4363 <https://github.com/platformio/platformio-core/issues/4363>`_)
|
||||
|
||||
6.1.2 (2022-07-18)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Export a ``PIO_UNIT_TESTING`` macro to the project source files and dependent libraries in the |UNITTESTING| mode
|
||||
* Improved detection of Windows architecture (`issue #4353 <https://github.com/platformio/platformio-core/issues/4353>`_)
|
||||
* Warn about unknown `device monitor filters <https://docs.platformio.org/en/latest/core/userguide/device/cmd_monitor.html#filters>`__ (`issue #4362 <https://github.com/platformio/platformio-core/issues/4362>`_)
|
||||
* Fixed a regression bug when `libArchive <https://docs.platformio.org/en/latest/manifests/library-json/fields/build/libarchive.html>`__ option declared in the |LIBRARYJSON| manifest was ignored (`issue #4351 <https://github.com/platformio/platformio-core/issues/4351>`_)
|
||||
* Fixed an issue when the `pio pkg publish <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_publish.html>`__ command didn't work with Python 3.6 (`issue #4352 <https://github.com/platformio/platformio-core/issues/4352>`_)
|
||||
|
||||
6.1.1 (2022-07-11)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Added new ``monitor_encoding`` project configuration option to configure `Device Monitor <https://docs.platformio.org/en/latest/core/userguide/device/cmd_monitor.html>`__ (`issue #4350 <https://github.com/platformio/platformio-core/issues/4350>`_)
|
||||
* Allowed specifying project environments for `pio ci <https://docs.platformio.org/en/latest/core/userguide/cmd_ci.html>`__ command (`issue #4347 <https://github.com/platformio/platformio-core/issues/4347>`_)
|
||||
* Show "TimeoutError" only in the verbose mode when can not find a serial port
|
||||
* Fixed an issue when a serial port was not automatically detected if the board has predefined HWIDs
|
||||
* Fixed an issue with endless scanning of project dependencies (`issue #4349 <https://github.com/platformio/platformio-core/issues/4349>`_)
|
||||
* Fixed an issue with |LDF| when incompatible libraries were used for the working project environment with the missed framework (`pull #4346 <https://github.com/platformio/platformio-core/pull/4346>`_)
|
||||
|
||||
6.1.0 (2022-07-06)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* **Device Manager**
|
||||
|
||||
- Automatically reconnect device monitor if a connection fails
|
||||
- Added new `pio device monitor --no-reconnect <https://docs.platformio.org/en/latest/core/userguide/device/cmd_monitor.html#cmdoption-pio-device-monitor-no-reconnect>`__ option to disable automatic reconnection
|
||||
- Handle device monitor disconnects more gracefully (`issue #3939 <https://github.com/platformio/platformio-core/issues/3939>`_)
|
||||
- Improved a serial port finder for `Black Magic Probe <https://docs.platformio.org/en/latest/plus/debug-tools/blackmagic.html>`__ (`issue #4023 <https://github.com/platformio/platformio-core/issues/4023>`_)
|
||||
- Improved a serial port finder for a board with predefined HWIDs
|
||||
- Replaced ``monitor_flags`` with independent project configuration options: `monitor_parity <https://docs.platformio.org/en/latest/projectconf/section_env_monitor.html#monitor-parity>`__, `monitor_eol <https://docs.platformio.org/en/latest/projectconf/section_env_monitor.html#monitor-eol>`__, `monitor_raw <https://docs.platformio.org/en/latest/projectconf/section_env_monitor.html#monitor-raw>`__, `monitor_echo <https://docs.platformio.org/en/latest/projectconf/section_env_monitor.html#monitor-echo>`__
|
||||
- Fixed an issue when the monitor filters were not applied in their order (`issue #4320 <https://github.com/platformio/platformio-core/issues/4320>`_)
|
||||
|
||||
* **Unit Testing**
|
||||
|
||||
- Updated "Getting Started" documentation for `GoogleTest <https://docs.platformio.org/en/latest/advanced/unit-testing/frameworks/googletest.html>`__ testing and mocking framework
|
||||
- Export |UNITTESTING| flags only to the project build environment (``projenv``, files in "src" folder)
|
||||
- Merged the "building" stage with "uploading" for the embedded target (`issue #4307 <https://github.com/platformio/platformio-core/issues/4307>`_)
|
||||
- Do not resolve dependencies from the project "src" folder when the `test_build_src <https://docs.platformio.org/en/latest//projectconf/section_env_test.html#test-build-src>`__ option is not enabled
|
||||
- Do not immediately terminate a testing program when results are received
|
||||
- Fixed an issue when a custom `pio test --project-config <https://docs.platformio.org/en/latest/core/userguide/cmd_test.html#cmdoption-pio-test-c>`__ was not handled properly (`issue #4299 <https://github.com/platformio/platformio-core/issues/4299>`_)
|
||||
- Fixed an issue when testing results were wrong in the verbose mode (`issue #4336 <https://github.com/platformio/platformio-core/issues/4336>`_)
|
||||
|
||||
* **Build System**
|
||||
|
||||
- Significantly improved support for `Pre & Post Actions <https://docs.platformio.org/en/latest/scripting/actions.html>`__
|
||||
|
||||
* Allowed to declare actions in the `PRE-type scripts <https://docs.platformio.org/en/latest/scripting/launch_types.html>`__ even if the target is not ready yet
|
||||
* Allowed library maintainers to use Pre & Post Actions in the library `extraScript <https://docs.platformio.org/en/latest/manifests/library-json/fields/build/extrascript.html>`__
|
||||
|
||||
- Documented `Stringification <https://docs.platformio.org/en/latest/projectconf/section_env_build.html#stringification>`__ – converting a macro argument into a string constant (`issue #4310 <https://github.com/platformio/platformio-core/issues/4310>`_)
|
||||
- Added new `pio run --monitor-port <https://docs.platformio.org/en/latest/core/userguide/cmd_run.html#cmdoption-pio-run-monitor-port>`__ option to specify custom device monitor port to the ``monitor`` target (`issue #4337 <https://github.com/platformio/platformio-core/issues/4337>`_)
|
||||
- Added ``env.StringifyMacro(value)`` helper function for the `Advanced Scripting <https://docs.platformio.org/en/latest/scripting/index.html>`__
|
||||
- Allowed to ``Import("projenv")`` in a library extra script (`issue #4305 <https://github.com/platformio/platformio-core/issues/4305>`_)
|
||||
- Fixed an issue when the `build_unflags <https://docs.platformio.org/en/latest/projectconf/section_env_build.html#build-unflags>`__ operation ignores a flag value (`issue #4309 <https://github.com/platformio/platformio-core/issues/4309>`_)
|
||||
- Fixed an issue when the `build_unflags <https://docs.platformio.org/en/latest/projectconf/section_env_build.html#build-unflags>`__ option was not applied to the ``ASPPFLAGS`` scope
|
||||
- Fixed an issue on Windows OS when flags were wrapped to the temporary file while generating the `Compilation database "compile_commands.json" <https://docs.platformio.org/en/latest/integration/compile_commands.html>`__
|
||||
- Fixed an issue with the |LDF| when recursively scanning dependencies in the ``chain`` mode
|
||||
- Fixed a "PermissionError" on Windows when running "clean" or "cleanall" targets (`issue #4331 <https://github.com/platformio/platformio-core/issues/4331>`_)
|
||||
|
||||
* **Package Management**
|
||||
|
||||
- Fixed an issue when library dependencies were installed for the incompatible project environment (`issue #4338 <https://github.com/platformio/platformio-core/issues/4338>`_)
|
||||
|
||||
* **Miscellaneous**
|
||||
|
||||
- Warn about incompatible Bash version for the `Shell Completion <https://docs.platformio.org/en/latest/core/userguide/system/completion/index.html>`__ (`issue #4326 <https://github.com/platformio/platformio-core/issues/4326>`_)
|
||||
|
||||
6.0.2 (2022-06-01)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Control |UNITTESTING| verbosity with a new multilevel `pio test -v <https://docs.platformio.org/en/latest/core/userguide/cmd_test.html#cmdoption-pio-test-v>`__ command option (`issue #4276 <https://github.com/platformio/platformio-core/issues/4276>`_)
|
||||
* Follow symbolic links during searching for the unit test suites (`issue #4288 <https://github.com/platformio/platformio-core/issues/4288>`_)
|
||||
* Show a warning when testing an empty project without a test suite (`issue #4278 <https://github.com/platformio/platformio-core/issues/4278>`_)
|
||||
* Improved support for `Asking for input (prompts) <https://docs.platformio.org/en/latest/scripting/examples/asking_for_input.html>`_
|
||||
* Fixed an issue when the `build_src_flags <https://docs.platformio.org/en/latest/projectconf/section_env_build.html#build-src-flags>`__ option was applied outside the project scope (`issue #4277 <https://github.com/platformio/platformio-core/issues/4277>`_)
|
||||
* Fixed an issue with debugging assembly files without preprocessor (".s")
|
||||
|
||||
6.0.1 (2022-05-17)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
* Improved support for the renamed configuration options (`issue #4270 <https://github.com/platformio/platformio-core/issues/4270>`_)
|
||||
* Fixed an issue when calling the built-in `pio device monitor <https://docs.platformio.org/en/latest/core/userguide/device/cmd_monitor.html#filters>`__ filters
|
||||
* Fixed an issue when using |INTERPOLATION| and merging str+int options (`issue #4271 <https://github.com/platformio/platformio-core/issues/4271>`_)
|
||||
|
||||
6.0.0 (2022-05-16)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Please check the `Migration guide from 5.x to 6.0 <https://docs.platformio.org/en/latest/core/migration.html>`__.
|
||||
|
||||
* **Package Management**
|
||||
|
||||
- New unified Package Management CLI (``pio pkg``):
|
||||
|
||||
* `pio pkg exec <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_exec.html>`_ - run command from package tool (`issue #4163 <https://github.com/platformio/platformio-core/issues/4163>`_)
|
||||
* `pio pkg install <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_install.html>`_ - install the project dependencies or custom packages
|
||||
* `pio pkg list <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_list.html>`__ - list installed packages
|
||||
* `pio pkg outdated <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_outdated.html>`__ - check for project outdated packages
|
||||
* `pio pkg search <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_search.html>`__ - search for packages
|
||||
* `pio pkg show <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_show.html>`__ - show package information
|
||||
* `pio pkg uninstall <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_uninstall.html>`_ - uninstall the project dependencies or custom packages
|
||||
* `pio pkg update <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_update.html>`__ - update the project dependencies or custom packages
|
||||
|
||||
- Package Manifest
|
||||
|
||||
* Added support for `"scripts" <https://docs.platformio.org/en/latest/librarymanager/config.html#scripts>`__ (`issue #485 <https://github.com/platformio/platformio-core/issues/485>`_)
|
||||
* Added support for `multi-licensed <https://docs.platformio.org/en/latest/librarymanager/config.html#license>`__ packages using SPDX Expressions (`issue #4037 <https://github.com/platformio/platformio-core/issues/4037>`_)
|
||||
* Added support for `"dependencies" <https://docs.platformio.org/en/latest/librarymanager/config.html#dependencies>`__ declared in a "tool" package manifest
|
||||
|
||||
- Added support for `symbolic links <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_install.html#local-folder>`__ allowing pointing the local source folder to the Package Manager (`issue #3348 <https://github.com/platformio/platformio-core/issues/3348>`_)
|
||||
- Automatically install dependencies of the local (private) project libraries (`issue #2910 <https://github.com/platformio/platformio-core/issues/2910>`_)
|
||||
- Improved detection of a package type from the tarball archive (`issue #3828 <https://github.com/platformio/platformio-core/issues/3828>`_)
|
||||
- Ignore files according to the patterns declared in ".gitignore" when using the `pio package pack <https://docs.platformio.org/en/latest/core/userguide/pkg/cmd_pack.html>`__ command (`issue #4188 <https://github.com/platformio/platformio-core/issues/4188>`_)
|
||||
- Dropped automatic updates of global libraries and development platforms (`issue #4179 <https://github.com/platformio/platformio-core/issues/4179>`_)
|
||||
- Dropped support for the "pythonPackages" field in "platform.json" manifest in favor of `Extra Python Dependencies <https://docs.platformio.org/en/latest/scripting/examples/extra_python_packages.html>`__
|
||||
- Fixed an issue when manually removed dependencies from the |PIOCONF| were not uninstalled from the storage (`issue #3076 <https://github.com/platformio/platformio-core/issues/3076>`_)
|
||||
|
||||
* **Unit Testing**
|
||||
|
||||
- Refactored from scratch |UNITTESTING| solution and its documentation
|
||||
- New: `Test Hierarchy <https://docs.platformio.org/en/latest/advanced/unit-testing/structure.html>`_ (`issue #4135 <https://github.com/platformio/platformio-core/issues/4135>`_)
|
||||
- New: `Doctest <https://docs.platformio.org/en/latest/advanced/unit-testing/frameworks/doctest.html>`__ testing framework (`issue #4240 <https://github.com/platformio/platformio-core/issues/4240>`_)
|
||||
- New: `GoogleTest <https://docs.platformio.org/en/latest/advanced/unit-testing/frameworks/googletest.html>`__ testing and mocking framework (`issue #3572 <https://github.com/platformio/platformio-core/issues/3572>`_)
|
||||
- New: `Semihosting <https://docs.platformio.org/en/latest/advanced/unit-testing/semihosting.html>`__ (`issue #3516 <https://github.com/platformio/platformio-core/issues/3516>`_)
|
||||
- New: Hardware `Simulators <https://docs.platformio.org/en/latest/advanced/unit-testing/simulators/index.html>`__ for Unit Testing (QEMU, Renode, SimAVR, and custom solutions)
|
||||
- New: ``test`` `build configuration <https://docs.platformio.org/en/latest/projectconf/build_configurations.html>`__
|
||||
- Added support for a `custom testing framework <https://docs.platformio.org/en/latest/advanced/unit-testing/frameworks/custom/index.html>`_
|
||||
- Added support for a custom `testing command <https://docs.platformio.org/en/latest/projectconf/section_env_test.html#test-testing-command>`__
|
||||
- Added support for a `custom Unity library <https://docs.platformio.org/en/latest/advanced/unit-testing/frameworks/custom/examples/custom_unity_library.html>`__ (`issue #3980 <https://github.com/platformio/platformio-core/issues/3980>`_)
|
||||
- Added support for the ``socket://`` and ``rfc2217://`` protocols using `test_port <https://docs.platformio.org/en/latest/projectconf/section_env_test.html#test-port>`__ option (`issue #4229 <https://github.com/platformio/platformio-core/issues/4229>`_)
|
||||
- List available project tests with a new `pio test --list-tests <https://docs.platformio.org/en/latest/core/userguide/cmd_test.html#cmdoption-pio-test-list-tests>`__ option
|
||||
- Pass extra arguments to the testing program with a new `pio test --program-arg <https://docs.platformio.org/en/latest/core/userguide/cmd_test.html#cmdoption-pio-test-a>`__ option (`issue #3132 <https://github.com/platformio/platformio-core/issues/3132>`_)
|
||||
- Generate reports in JUnit and JSON formats using the `pio test <https://docs.platformio.org/en/latest/core/userguide/cmd_test.html>`__ command (`issue #2891 <https://github.com/platformio/platformio-core/issues/2891>`_)
|
||||
- Provide more information when the native program crashed on a host (errored with a non-zero return code) (`issue #3429 <https://github.com/platformio/platformio-core/issues/3429>`_)
|
||||
- Improved automatic detection of a testing serial port (`issue #4076 <https://github.com/platformio/platformio-core/issues/4076>`_)
|
||||
- Fixed an issue when command line parameters (``--ignore``, ``--filter``) do not override values defined in the |PIOCONF| (`issue #3845 <https://github.com/platformio/platformio-core/issues/3845>`_)
|
||||
- Renamed the "test_build_project_src" project configuration option to the `test_build_src <https://docs.platformio.org/en/latest//projectconf/section_env_test.html#test-build-src>`__
|
||||
- Removed the "test_transport" option in favor of the `Custom "unity_config.h" <https://docs.platformio.org/en/latest/advanced/unit-testing/frameworks/unity.html>`_
|
||||
|
||||
* **Static Code Analysis**
|
||||
|
||||
- Updated analysis tools:
|
||||
|
||||
* `Cppcheck <https://docs.platformio.org/en/latest/plus/check-tools/cppcheck.html>`__ v2.7 with various checker improvements and fixed false positives
|
||||
* `PVS-Studio <https://docs.platformio.org/en/latest/plus/check-tools/pvs-studio.html>`__ v7.18 with improved and updated semantic analysis system
|
||||
|
||||
- Added support for the custom `Clang-Tidy <https://docs.platformio.org/en/latest/plus/check-tools/clang-tidy.html>`__ configuration file (`issue #4186 <https://github.com/platformio/platformio-core/issues/4186>`_)
|
||||
- Added ability to override a tool version using the `platform_packages <https://docs.platformio.org/en/latest/projectconf/section_env_platform.html#platform-packages>`__ option (`issue #3798 <https://github.com/platformio/platformio-core/issues/3798>`_)
|
||||
- Fixed an issue with improper handling of defects that don't specify a source file (`issue #4237 <https://github.com/platformio/platformio-core/issues/4237>`_)
|
||||
|
||||
* **Build System**
|
||||
|
||||
- Show project dependency licenses when building in the verbose mode
|
||||
- Fixed an issue when |LDF| ignores the project `lib_deps <https://docs.platformio.org/en/latest/projectconf/section_env_library.html#lib-deps>`__ while resolving library dependencies (`issue #3598 <https://github.com/platformio/platformio-core/issues/3598>`_)
|
||||
- Fixed an issue with calling an extra script located outside a project (`issue #4220 <https://github.com/platformio/platformio-core/issues/4220>`_)
|
||||
- Fixed an issue when GCC preprocessor was applied to the ".s" assembly files on case-sensitive OS such as Window OS (`issue #3917 <https://github.com/platformio/platformio-core/issues/3917>`_)
|
||||
- Fixed an issue when |LDF| ignores `build_src_flags <https://docs.platformio.org/en/latest/projectconf/section_env_build.html#build-src-flags>`__ in the "deep+" mode (`issue #4253 <https://github.com/platformio/platformio-core/issues/4253>`_)
|
||||
|
||||
* **Integration**
|
||||
|
||||
- Added a new build variable (``COMPILATIONDB_INCLUDE_TOOLCHAIN``) to include toolchain paths in the compilation database (`issue #3735 <https://github.com/platformio/platformio-core/issues/3735>`_)
|
||||
- Changed a default path for compilation database `compile_commands.json <https://docs.platformio.org/en/latest/integration/compile_commands.html>`__ to the project root
|
||||
- Enhanced integration for Qt Creator (`issue #3046 <https://github.com/platformio/platformio-core/issues/3046>`_)
|
||||
|
||||
* **Project Configuration**
|
||||
|
||||
- Extended |INTERPOLATION| with ``${this}`` pattern (`issue #3953 <https://github.com/platformio/platformio-core/issues/3953>`_)
|
||||
- Embed environment name of the current section in the |PIOCONF| using ``${this.__env__}`` pattern
|
||||
- Renamed the "src_build_flags" project configuration option to the `build_src_flags <https://docs.platformio.org/en/latest/projectconf/section_env_build.html#build-src-flags>`__
|
||||
- Renamed the "src_filter" project configuration option to the `build_src_filter <https://docs.platformio.org/en/latest/projectconf/section_env_build.html#build-src-filter>`__
|
||||
|
||||
* **Miscellaneous**
|
||||
|
||||
- Pass extra arguments to the `native <https://docs.platformio.org/en/latest/platforms/native.html>`__ program with a new `pio run --program-arg <https://docs.platformio.org/en/latest/core/userguide/cmd_run.html#cmdoption-pio-run-a>`__ option (`issue #4246 <https://github.com/platformio/platformio-core/issues/4246>`_)
|
||||
- Improved PIO Remote setup on credit-card sized computers (Raspberry Pi, BeagleBon, etc) (`issue #3865 <https://github.com/platformio/platformio-core/issues/3865>`_)
|
||||
- Finally removed all tracks to the Python 2.7, the Python 3.6 is the minimum supported version.
|
||||
|
||||
.. _release_notes_5:
|
||||
|
||||
PlatformIO Core 5
|
||||
-----------------
|
||||
|
||||
**A professional collaborative platform for embedded development**
|
||||
|
||||
5.0.2 (2020-10-30)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
- Initialize a new project or update the existing passing working environment name and its options (`issue #3686 <https://github.com/platformio/platformio-core/issues/3686>`_)
|
||||
- Automatically build PlatformIO Core extra Python dependencies on a host machine if they are missed in the registry (`issue #3700 <https://github.com/platformio/platformio-core/issues/3700>`_)
|
||||
- Improved "core.call" RPC for PlatformIO Home (`issue #3671 <https://github.com/platformio/platformio-core/issues/3671>`_)
|
||||
- Fixed a "PermissionError: [WinError 5]" on Windows when an external repository is used with `lib_deps <https://docs.platformio.org/page/projectconf/section_env_library.html#lib-deps>`__ option (`issue #3664 <https://github.com/platformio/platformio-core/issues/3664>`_)
|
||||
- Fixed a "KeyError: 'versions'" when dependency does not exist in the registry (`issue #3666 <https://github.com/platformio/platformio-core/issues/3666>`_)
|
||||
- Fixed an issue with GCC linker when "native" dev-platform is used in pair with library dependencies (`issue #3669 <https://github.com/platformio/platformio-core/issues/3669>`_)
|
||||
- Fixed an "AssertionError: ensure_dir_exists" when checking library updates from simultaneous subprocesses (`issue #3677 <https://github.com/platformio/platformio-core/issues/3677>`_)
|
||||
- Fixed an issue when `pio package publish <https://docs.platformio.org/page/core/userguide/package/cmd_publish.html>`__ command removes original archive after submitting to the registry (`issue #3716 <https://github.com/platformio/platformio-core/issues/3716>`_)
|
||||
- Fixed an issue when multiple `pio lib install <https://docs.platformio.org/page/core/userguide/lib/cmd_install.html>`__ command with the same local library results in duplicates in ``lib_deps`` (`issue #3715 <https://github.com/platformio/platformio-core/issues/3715>`_)
|
||||
- Fixed an issue with a "wrong" timestamp in device monitor output using `"time" filter <https://docs.platformio.org/page/core/userguide/device/cmd_monitor.html#filters>`__ (`issue #3712 <https://github.com/platformio/platformio-core/issues/3712>`_)
|
||||
|
||||
5.0.1 (2020-09-10)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
- Added support for "owner" requirement when declaring ``dependencies`` using `library.json <https://docs.platformio.org/page/librarymanager/config.html#dependencies>`__
|
||||
- Fixed an issue when using a custom git/ssh package with `platform_packages <https://docs.platformio.org/page/projectconf/section_env_platform.html#platform-packages>`__ option (`issue #3624 <https://github.com/platformio/platformio-core/issues/3624>`_)
|
||||
- Fixed an issue with "ImportError: cannot import name '_get_backend' from 'cryptography.hazmat.backends'" when using `Remote Development <https://docs.platformio.org/page/plus/pio-remote.html>`__ on RaspberryPi device (`issue #3652 <https://github.com/platformio/platformio-core/issues/3652>`_)
|
||||
- Fixed an issue when `pio package unpublish <https://docs.platformio.org/page/core/userguide/package/cmd_unpublish.html>`__ command crashes (`issue #3660 <https://github.com/platformio/platformio-core/issues/3660>`_)
|
||||
- Fixed an issue when the package manager tries to install a built-in library from the registry (`issue #3662 <https://github.com/platformio/platformio-core/issues/3662>`_)
|
||||
- Fixed an issue with incorrect value for C++ language standard in IDE projects when an in-progress language standard is used (`issue #3653 <https://github.com/platformio/platformio-core/issues/3653>`_)
|
||||
- Fixed an issue with "Invalid simple block (semantic_version)" from library dependency that refs to an external source (repository, ZIP/Tar archives) (`issue #3658 <https://github.com/platformio/platformio-core/issues/3658>`_)
|
||||
- Fixed an issue when can not remove update or remove external dev-platform using PlatformIO Home (`issue #3663 <https://github.com/platformio/platformio-core/issues/3663>`_)
|
||||
|
||||
5.0.0 (2020-09-03)
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Please check `Migration guide from 4.x to 5.0 <https://docs.platformio.org/page/core/migration.html>`__.
|
||||
|
||||
* Integration with the new **PlatformIO Trusted Registry**
|
||||
|
||||
- Enterprise-grade package storage with high availability (multi replicas)
|
||||
- Secure, fast, and reliable global content delivery network (CDN)
|
||||
- Universal support for all packages:
|
||||
|
||||
* Libraries
|
||||
* Development platforms
|
||||
* Toolchains
|
||||
|
||||
- Built-in fine-grained access control (role-based, teams, organizations)
|
||||
- New CLI commands:
|
||||
|
||||
* `pio package <https://docs.platformio.org/page/core/userguide/package/index.html>`__ – manage packages in the registry
|
||||
* `pio access <https://docs.platformio.org/page/core/userguide/access/index.html>`__ – manage package access for users, teams, and maintainers
|
||||
|
||||
* Integration with the new **Account Management System**
|
||||
|
||||
- `Manage organizations <https://docs.platformio.org/page/core/userguide/org/index.html>`__
|
||||
- `Manage teams and team memberships <https://docs.platformio.org/page/core/userguide/team/index.html>`__
|
||||
|
||||
* New **Package Management System**
|
||||
|
||||
- Integrated PlatformIO Core with the new PlatformIO Registry
|
||||
- Support for owner-based dependency declaration (resolves name conflicts) (`issue #1824 <https://github.com/platformio/platformio-core/issues/1824>`_)
|
||||
- Automatically save dependencies to `"platformio.ini" <https://docs.platformio.org/page/projectconf.html>`__ when installing using PlatformIO CLI (`issue #2964 <https://github.com/platformio/platformio-core/issues/2964>`_)
|
||||
- Follow SemVer complaint version constraints when checking library updates `issue #1281 <https://github.com/platformio/platformio-core/issues/1281>`_)
|
||||
- Dropped support for "packageRepositories" section in "platform.json" manifest (please publish packages directly to the registry)
|
||||
|
||||
* **Build System**
|
||||
|
||||
- Upgraded build engine to the `SCons 4.0 - a next-generation software construction tool <https://scons.org/>`__
|
||||
|
||||
* `Configuration files are Python scripts <https://docs.platformio.org/page/projectconf/advanced_scripting.html>`__ – use the power of a real programming language to solve build problems
|
||||
* Built-in reliable and automatic dependency analysis
|
||||
* Improved support for parallel builds
|
||||
* Ability to `share built files in a cache <https://docs.platformio.org/page/projectconf/section_platformio.html#projectconf-pio-build-cache-dir>`__ to speed up multiple builds
|
||||
|
||||
- New `Custom Targets <https://docs.platformio.org/page/projectconf/advanced_scripting.html#custom-targets>`__
|
||||
|
||||
* Pre/Post processing based on dependent sources (another target, source file, etc.)
|
||||
* Command launcher with own arguments
|
||||
* Launch command with custom options declared in `"platformio.ini" <https://docs.platformio.org/page/projectconf.html>`__
|
||||
* Python callback as a target (use the power of Python interpreter and PlatformIO Build API)
|
||||
* List available project targets (including dev-platform specific and custom targets) with a new `pio run --list-targets <https://docs.platformio.org/page/core/userguide/cmd_run.html#cmdoption-platformio-run-list-targets>`__ command (`issue #3544 <https://github.com/platformio/platformio-core/issues/3544>`_)
|
||||
|
||||
- Enable "cyclic reference" for GCC linker only for the embedded dev-platforms (`issue #3570 <https://github.com/platformio/platformio-core/issues/3570>`_)
|
||||
- Automatically enable LDF dependency `chain+ mode (evaluates C/C++ Preprocessor conditional syntax) <https://docs.platformio.org/page/librarymanager/ldf.html#dependency-finder-mode>`__ for Arduino library when "library.property" has "depends" field (`issue #3607 <https://github.com/platformio/platformio-core/issues/3607>`_)
|
||||
- Fixed an issue with improper processing of source files added via multiple Build Middlewares (`issue #3531 <https://github.com/platformio/platformio-core/issues/3531>`_)
|
||||
- Fixed an issue with the ``clean`` target on Windows when project and build directories are located on different logical drives (`issue #3542 <https://github.com/platformio/platformio-core/issues/3542>`_)
|
||||
|
||||
* **Project Management**
|
||||
|
||||
- Added support for "globstar/`**`" (recursive) pattern for the different commands and configuration options (`pio ci <https://docs.platformio.org/page/core/userguide/cmd_ci.html>`__, `src_filter <https://docs.platformio.org/page/projectconf/section_env_build.html#src-filter>`__, `check_patterns <https://docs.platformio.org/page/projectconf/section_env_check.html#check-patterns>`__, `library.json > srcFilter <https://docs.platformio.org/page/librarymanager/config.html#srcfilter>`__). Python 3.5+ is required
|
||||
- Added a new ``-e, --environment`` option to `pio project init <https://docs.platformio.org/page/core/userguide/project/cmd_init.html#cmdoption-platformio-project-init-e>`__ command that helps to update a PlatformIO project using the existing environment
|
||||
- Dump build system data intended for IDE extensions/plugins using a new `pio project data <https://docs.platformio.org/page/core/userguide/project/cmd_data.html>`__ command
|
||||
- Do not generate ".travis.yml" for a new project, let the user have a choice
|
||||
|
||||
* **Unit Testing**
|
||||
|
||||
- Updated PIO Unit Testing support for Mbed framework and added compatibility with Mbed OS 6
|
||||
- Fixed an issue when running multiple test environments (`issue #3523 <https://github.com/platformio/platformio-core/issues/3523>`_)
|
||||
- Fixed an issue when Unit Testing engine fails with a custom project configuration file (`issue #3583 <https://github.com/platformio/platformio-core/issues/3583>`_)
|
||||
|
||||
* **Static Code Analysis**
|
||||
|
||||
- Updated analysis tools:
|
||||
|
||||
* `Cppcheck <https://docs.platformio.org/page/plus/check-tools/cppcheck.html>`__ v2.1 with a new "soundy" analysis option and improved code parser
|
||||
* `PVS-Studio <https://docs.platformio.org/page/plus/check-tools/pvs-studio.html>`__ v7.09 with a new file list analysis mode and an extended list of analysis diagnostics
|
||||
|
||||
- Added Cppcheck package for ARM-based single-board computers (`issue #3559 <https://github.com/platformio/platformio-core/issues/3559>`_)
|
||||
- Fixed an issue with PIO Check when a defect with a multiline error message is not reported in verbose mode (`issue #3631 <https://github.com/platformio/platformio-core/issues/3631>`_)
|
||||
|
||||
* **Miscellaneous**
|
||||
|
||||
- Display system-wide information using a new `pio system info <https://docs.platformio.org/page/core/userguide/system/cmd_info.html>`__ command (`issue #3521 <https://github.com/platformio/platformio-core/issues/3521>`_)
|
||||
- Remove unused data using a new `pio system prune <https://docs.platformio.org/page/core/userguide/system/cmd_prune.html>`__ command (`issue #3522 <https://github.com/platformio/platformio-core/issues/3522>`_)
|
||||
- Show ignored project environments only in the verbose mode (`issue #3641 <https://github.com/platformio/platformio-core/issues/3641>`_)
|
||||
- Do not escape compiler arguments in VSCode template on Windows.
|
||||
See `PlatformIO Core 5.0 history <https://github.com/platformio/platformio-core/blob/v5.2.5/HISTORY.rst>`__.
|
||||
|
||||
.. _release_notes_4:
|
||||
|
||||
PlatformIO Core 4
|
||||
-----------------
|
||||
|
||||
See `PlatformIO Core 4.0 history <https://docs.platformio.org/en/v4.3.4/core/history.html#platformio-core-4>`__.
|
||||
See `PlatformIO Core 4.0 history <https://github.com/platformio/platformio-core/blob/v4.3.4/HISTORY.rst>`__.
|
||||
|
||||
PlatformIO Core 3
|
||||
-----------------
|
||||
|
||||
See `PlatformIO Core 3.0 history <https://docs.platformio.org/en/v4.3.4/core/history.html#platformio-core-3>`__.
|
||||
See `PlatformIO Core 3.0 history <https://github.com/platformio/platformio-core/blob/v3.6.7/HISTORY.rst>`__.
|
||||
|
||||
PlatformIO Core 2
|
||||
-----------------
|
||||
|
||||
See `PlatformIO Core 2.0 history <https://docs.platformio.org/en/v4.3.4/core/history.html#platformio-core-2>`__.
|
||||
See `PlatformIO Core 2.0 history <https://github.com/platformio/platformio-core/blob/v2.11.2/HISTORY.rst>`__.
|
||||
|
||||
PlatformIO Core 1
|
||||
-----------------
|
||||
|
||||
See `PlatformIO Core 1.0 history <https://docs.platformio.org/en/v4.3.4/core/history.html#platformio-core-1>`__.
|
||||
See `PlatformIO Core 1.0 history <https://github.com/platformio/platformio-core/blob/v1.5.0/HISTORY.rst>`__.
|
||||
|
||||
PlatformIO Core Preview
|
||||
-----------------------
|
||||
|
||||
See `PlatformIO Core Preview history <https://docs.platformio.org/en/v4.3.4/core/history.html#platformio-core-preview>`__.
|
||||
See `PlatformIO Core Preview history <https://github.com/platformio/platformio-core/blob/v0.10.2/HISTORY.rst>`__.
|
||||
|
@ -1 +0,0 @@
|
||||
include LICENSE
|
14
Makefile
14
Makefile
@ -1,17 +1,17 @@
|
||||
lint:
|
||||
pylint -j 6 --rcfile=./.pylintrc ./platformio
|
||||
pylint -j 6 --rcfile=./.pylintrc ./tests
|
||||
pylint --rcfile=./.pylintrc ./tests
|
||||
pylint --rcfile=./.pylintrc ./platformio
|
||||
|
||||
isort:
|
||||
isort -rc ./platformio
|
||||
isort -rc ./tests
|
||||
isort ./platformio
|
||||
isort ./tests
|
||||
|
||||
format:
|
||||
black --target-version py27 ./platformio
|
||||
black --target-version py27 ./tests
|
||||
black ./platformio
|
||||
black ./tests
|
||||
|
||||
test:
|
||||
py.test --verbose --capture=no --exitfirst -n 6 --dist=loadscope tests --ignore tests/test_examples.py
|
||||
py.test --verbose --exitfirst -n 6 --dist=loadscope tests --ignore tests/test_examples.py
|
||||
|
||||
before-commit: isort format lint
|
||||
|
||||
|
118
README.rst
118
README.rst
@ -1,27 +1,28 @@
|
||||
PlatformIO
|
||||
==========
|
||||
PlatformIO Core
|
||||
===============
|
||||
|
||||
.. image:: https://github.com/platformio/platformio-core/workflows/Core/badge.svg
|
||||
:target: https://docs.platformio.org/page/core/index.html
|
||||
:target: https://docs.platformio.org/en/latest/core/index.html
|
||||
:alt: CI Build for PlatformIO Core
|
||||
.. image:: https://github.com/platformio/platformio-core/workflows/Examples/badge.svg
|
||||
:target: https://github.com/platformio/platformio-examples
|
||||
:alt: CI Build for dev-platform examples
|
||||
.. image:: https://github.com/platformio/platformio-core/workflows/Docs/badge.svg
|
||||
:target: https://docs.platformio.org?utm_source=github&utm_medium=core
|
||||
:alt: CI Build for Docs
|
||||
.. image:: https://github.com/platformio/platformio-core/workflows/Examples/badge.svg
|
||||
:target: https://github.com/platformio/platformio-examples
|
||||
:alt: CI Build for dev-platform examples
|
||||
.. image:: https://github.com/platformio/platformio-core/workflows/Projects/badge.svg
|
||||
:target: https://docs.platformio.org/en/latest/tutorials/index.html#projects
|
||||
:alt: CI Build for the Community Projects
|
||||
.. image:: https://img.shields.io/pypi/v/platformio.svg
|
||||
:target: https://pypi.python.org/pypi/platformio/
|
||||
:alt: Latest Version
|
||||
.. image:: https://img.shields.io/badge/license-Apache%202.0-blue.svg
|
||||
:target: https://pypi.python.org/pypi/platformio/
|
||||
:alt: License
|
||||
.. image:: https://img.shields.io/badge/PlatformIO-Labs-orange.svg
|
||||
:alt: Community Labs
|
||||
:alt: PlatformIO Labs
|
||||
:target: https://piolabs.com/?utm_source=github&utm_medium=core
|
||||
|
||||
**Quick Links:** `Web <https://platformio.org?utm_source=github&utm_medium=core>`_ |
|
||||
**Quick Links:** `Homepage <https://platformio.org?utm_source=github&utm_medium=core>`_ |
|
||||
`PlatformIO IDE <https://platformio.org/platformio-ide?utm_source=github&utm_medium=core>`_ |
|
||||
`Registry <https://registry.platformio.org?utm_source=github&utm_medium=core>`_ |
|
||||
`Project Examples <https://github.com/platformio/platformio-examples/>`__ |
|
||||
`Docs <https://docs.platformio.org?utm_source=github&utm_medium=core>`_ |
|
||||
`Donate <https://platformio.org/donate?utm_source=github&utm_medium=core>`_ |
|
||||
@ -35,101 +36,46 @@ PlatformIO
|
||||
.. image:: https://raw.githubusercontent.com/platformio/platformio-web/develop/app/images/platformio-ide-laptop.png
|
||||
:target: https://platformio.org?utm_source=github&utm_medium=core
|
||||
|
||||
`PlatformIO <https://platformio.org?utm_source=github&utm_medium=core>`_ is a professional collaborative platform for embedded development
|
||||
`PlatformIO <https://platformio.org>`_: Your Gateway to Embedded Software Development Excellence.
|
||||
|
||||
**A place where Developers and Teams have true Freedom! No more vendor lock-in!**
|
||||
Unlock the true potential of embedded software development with
|
||||
PlatformIO's collaborative ecosystem, embracing declarative principles,
|
||||
test-driven methodologies, and modern toolchains for unrivaled success.
|
||||
|
||||
* Open source, maximum permissive Apache 2.0 license
|
||||
* Cross-platform IDE and Unified Debugger
|
||||
* Static Code Analyzer and Remote Unit Testing
|
||||
* Multi-platform and Multi-architecture Build System
|
||||
* Firmware File Explorer and Memory Inspection.
|
||||
* Firmware File Explorer and Memory Inspection
|
||||
|
||||
Get Started
|
||||
-----------
|
||||
|
||||
* `What is PlatformIO? <https://docs.platformio.org/page/what-is-platformio.html?utm_source=github&utm_medium=core>`_
|
||||
* `What is PlatformIO? <https://docs.platformio.org/en/latest/what-is-platformio.html?utm_source=github&utm_medium=core>`_
|
||||
* `PlatformIO IDE <https://platformio.org/platformio-ide?utm_source=github&utm_medium=core>`_
|
||||
* `PlatformIO Core (CLI) <https://docs.platformio.org/page/core.html?utm_source=github&utm_medium=core>`_
|
||||
* `PlatformIO Core (CLI) <https://docs.platformio.org/en/latest/core.html?utm_source=github&utm_medium=core>`_
|
||||
* `Project Examples <https://github.com/platformio/platformio-examples?utm_source=github&utm_medium=core>`__
|
||||
|
||||
Solutions
|
||||
---------
|
||||
|
||||
* `Library Management <https://docs.platformio.org/page/librarymanager/index.html?utm_source=github&utm_medium=core>`_
|
||||
* `Desktop IDEs Integration <https://docs.platformio.org/page/ide.html?utm_source=github&utm_medium=core>`_
|
||||
* `Continuous Integration <https://docs.platformio.org/page/ci/index.html?utm_source=github&utm_medium=core>`_
|
||||
* `Library Management <https://docs.platformio.org/en/latest/librarymanager/index.html?utm_source=github&utm_medium=core>`_
|
||||
* `Desktop IDEs Integration <https://docs.platformio.org/en/latest/ide.html?utm_source=github&utm_medium=core>`_
|
||||
* `Continuous Integration <https://docs.platformio.org/en/latest/ci/index.html?utm_source=github&utm_medium=core>`_
|
||||
|
||||
**Advanced**
|
||||
|
||||
* `Debugging <https://docs.platformio.org/page/plus/debugging.html?utm_source=github&utm_medium=core>`_
|
||||
* `Unit Testing <https://docs.platformio.org/page/plus/unit-testing.html?utm_source=github&utm_medium=core>`_
|
||||
* `Static Code Analysis <https://docs.platformio.org/page/plus/pio-check.html?utm_source=github&utm_medium=core>`_
|
||||
* `Remote Development <https://docs.platformio.org/page/plus/pio-remote.html?utm_source=github&utm_medium=core>`_
|
||||
* `Debugging <https://docs.platformio.org/en/latest/plus/debugging.html?utm_source=github&utm_medium=core>`_
|
||||
* `Unit Testing <https://docs.platformio.org/en/latest/advanced/unit-testing/index.html?utm_source=github&utm_medium=core>`_
|
||||
* `Static Code Analysis <https://docs.platformio.org/en/latest/plus/pio-check.html?utm_source=github&utm_medium=core>`_
|
||||
* `Remote Development <https://docs.platformio.org/en/latest/plus/pio-remote.html?utm_source=github&utm_medium=core>`_
|
||||
|
||||
Registry
|
||||
--------
|
||||
|
||||
* `Libraries <https://platformio.org/lib?utm_source=github&utm_medium=core>`_
|
||||
* `Development Platforms <https://platformio.org/platforms?utm_source=github&utm_medium=core>`_
|
||||
* `Frameworks <https://platformio.org/frameworks?utm_source=github&utm_medium=core>`_
|
||||
* `Embedded Boards <https://platformio.org/boards?utm_source=github&utm_medium=core>`_
|
||||
|
||||
Development Platforms
|
||||
---------------------
|
||||
|
||||
* `Aceinna IMU <https://platformio.org/platforms/aceinna_imu?utm_source=github&utm_medium=core>`_
|
||||
* `ASR Microelectronics ASR605x <https://platformio.org/platforms/asrmicro650x?utm_source=github&utm_medium=core>`_
|
||||
* `Atmel AVR <https://platformio.org/platforms/atmelavr?utm_source=github&utm_medium=core>`_
|
||||
* `Atmel SAM <https://platformio.org/platforms/atmelsam?utm_source=github&utm_medium=core>`_
|
||||
* `Espressif 32 <https://platformio.org/platforms/espressif32?utm_source=github&utm_medium=core>`_
|
||||
* `Espressif 8266 <https://platformio.org/platforms/espressif8266?utm_source=github&utm_medium=core>`_
|
||||
* `Freescale Kinetis <https://platformio.org/platforms/freescalekinetis?utm_source=github&utm_medium=core>`_
|
||||
* `Infineon XMC <https://platformio.org/platforms/infineonxmc?utm_source=github&utm_medium=core>`_
|
||||
* `Intel ARC32 <https://platformio.org/platforms/intel_arc32?utm_source=github&utm_medium=core>`_
|
||||
* `Intel MCS-51 (8051) <https://platformio.org/platforms/intel_mcs51?utm_source=github&utm_medium=core>`_
|
||||
* `Kendryte K210 <https://platformio.org/platforms/kendryte210?utm_source=github&utm_medium=core>`_
|
||||
* `Lattice iCE40 <https://platformio.org/platforms/lattice_ice40?utm_source=github&utm_medium=core>`_
|
||||
* `Maxim 32 <https://platformio.org/platforms/maxim32?utm_source=github&utm_medium=core>`_
|
||||
* `Microchip PIC32 <https://platformio.org/platforms/microchippic32?utm_source=github&utm_medium=core>`_
|
||||
* `Nordic nRF51 <https://platformio.org/platforms/nordicnrf51?utm_source=github&utm_medium=core>`_
|
||||
* `Nordic nRF52 <https://platformio.org/platforms/nordicnrf52?utm_source=github&utm_medium=core>`_
|
||||
* `Nuclei <https://platformio.org/platforms/nuclei?utm_source=github&utm_medium=core>`_
|
||||
* `NXP LPC <https://platformio.org/platforms/nxplpc?utm_source=github&utm_medium=core>`_
|
||||
* `RISC-V <https://platformio.org/platforms/riscv?utm_source=github&utm_medium=core>`_
|
||||
* `RISC-V GAP <https://platformio.org/platforms/riscv_gap?utm_source=github&utm_medium=core>`_
|
||||
* `Shakti <https://platformio.org/platforms/shakti?utm_source=github&utm_medium=core>`_
|
||||
* `Silicon Labs EFM32 <https://platformio.org/platforms/siliconlabsefm32?utm_source=github&utm_medium=core>`_
|
||||
* `ST STM32 <https://platformio.org/platforms/ststm32?utm_source=github&utm_medium=core>`_
|
||||
* `ST STM8 <https://platformio.org/platforms/ststm8?utm_source=github&utm_medium=core>`_
|
||||
* `Teensy <https://platformio.org/platforms/teensy?utm_source=github&utm_medium=core>`_
|
||||
* `TI MSP430 <https://platformio.org/platforms/timsp430?utm_source=github&utm_medium=core>`_
|
||||
* `TI Tiva <https://platformio.org/platforms/titiva?utm_source=github&utm_medium=core>`_
|
||||
* `WIZNet W7500 <https://platformio.org/platforms/wiznet7500?utm_source=github&utm_medium=core>`_
|
||||
|
||||
Frameworks
|
||||
----------
|
||||
|
||||
* `Arduino <https://platformio.org/frameworks/arduino?utm_source=github&utm_medium=core>`_
|
||||
* `CMSIS <https://platformio.org/frameworks/cmsis?utm_source=github&utm_medium=core>`_
|
||||
* `ESP-IDF <https://platformio.org/frameworks/espidf?utm_source=github&utm_medium=core>`_
|
||||
* `ESP8266 Non-OS SDK <https://platformio.org/frameworks/esp8266-nonos-sdk?utm_source=github&utm_medium=core>`_
|
||||
* `ESP8266 RTOS SDK <https://platformio.org/frameworks/esp8266-rtos-sdk?utm_source=github&utm_medium=core>`_
|
||||
* `Freedom E SDK <https://platformio.org/frameworks/freedom-e-sdk?utm_source=github&utm_medium=core>`_
|
||||
* `GigaDevice GD32V SDK <https://platformio.org/frameworks/gd32vf103-sdk?utm_source=github&utm_medium=core>`_
|
||||
* `Kendryte Standalone SDK <https://platformio.org/frameworks/kendryte-standalone-sdk?utm_source=github&utm_medium=core>`_
|
||||
* `Kendryte FreeRTOS SDK <https://platformio.org/frameworks/kendryte-freertos-sdk?utm_source=github&utm_medium=core>`_
|
||||
* `libOpenCM3 <https://platformio.org/frameworks/libopencm3?utm_source=github&utm_medium=core>`_
|
||||
* `Mbed <https://platformio.org/frameworks/mbed?utm_source=github&utm_medium=core>`_
|
||||
* `Nuclei SDK <https://platformio.org/frameworks/nuclei-sdk?utm_source=github&utm_medium=core>`_
|
||||
* `PULP OS <https://platformio.org/frameworks/pulp-os?utm_source=github&utm_medium=core>`_
|
||||
* `Pumbaa <https://platformio.org/frameworks/pumbaa?utm_source=github&utm_medium=core>`_
|
||||
* `Shakti SDK <https://platformio.org/frameworks/shakti-sdk?utm_source=github&utm_medium=core>`_
|
||||
* `Simba <https://platformio.org/frameworks/simba?utm_source=github&utm_medium=core>`_
|
||||
* `SPL <https://platformio.org/frameworks/spl?utm_source=github&utm_medium=core>`_
|
||||
* `STM32Cube <https://platformio.org/frameworks/stm32cube?utm_source=github&utm_medium=core>`_
|
||||
* `WiringPi <https://platformio.org/frameworks/wiringpi?utm_source=github&utm_medium=core>`_
|
||||
* `Zephyr <https://platformio.org/frameworks/zephyr?utm_source=github&utm_medium=core>`_
|
||||
* `Libraries <https://registry.platformio.org/search?t=library&utm_source=github&utm_medium=core>`_
|
||||
* `Development Platforms <https://registry.platformio.org/search?t=platform&utm_source=github&utm_medium=core>`_
|
||||
* `Development Tools <https://registry.platformio.org/search?t=tool&utm_source=github&utm_medium=core>`_
|
||||
|
||||
Contributing
|
||||
------------
|
||||
@ -142,7 +88,7 @@ Telemetry / Privacy Policy
|
||||
Share minimal diagnostics and usage information to help us make PlatformIO better.
|
||||
It is enabled by default. For more information see:
|
||||
|
||||
* `Telemetry Setting <https://docs.platformio.org/page/userguide/cmd_settings.html?utm_source=github&utm_medium=core#enable-telemetry>`_
|
||||
* `Telemetry Setting <https://docs.platformio.org/en/latest/userguide/cmd_settings.html?utm_source=github&utm_medium=core#enable-telemetry>`_
|
||||
|
||||
License
|
||||
-------
|
||||
@ -151,3 +97,7 @@ Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
|
||||
The PlatformIO is licensed under the permissive Apache 2.0 license,
|
||||
so you can use it in both commercial and personal projects with confidence.
|
||||
|
||||
.. image:: https://raw.githubusercontent.com/vshymanskyy/StandWithUkraine/main/banner-direct.svg
|
||||
:target: https://github.com/vshymanskyy/StandWithUkraine/blob/main/docs/README.md
|
||||
:alt: SWUbanner
|
34
SECURITY.md
Normal file
34
SECURITY.md
Normal file
@ -0,0 +1,34 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
We are committed to ensuring the security and protection of PlatformIO Core.
|
||||
To this end, we support only the following versions:
|
||||
|
||||
| Version | Supported |
|
||||
| ------- | ------------------ |
|
||||
| 6.1.x | :white_check_mark: |
|
||||
| < 6.1 | :x: |
|
||||
|
||||
Unsupported versions of the PlatformIO Core may have known vulnerabilities or security issues that could compromise the security of our organization's systems and data.
|
||||
Therefore, it is important that all developers use only supported versions of the PlatformIO Core.
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
We take the security of our systems and data very seriously. We encourage responsible disclosure of any vulnerabilities or security issues that you may find in our systems or applications. If you believe you have discovered a vulnerability, please report it to us immediately.
|
||||
|
||||
To report a vulnerability, please send an email to our security team at contact@piolabs.com. Please include as much information as possible, including:
|
||||
|
||||
- A description of the vulnerability and how it can be exploited
|
||||
- Steps to reproduce the vulnerability
|
||||
- Any additional information that can help us understand and reproduce the vulnerability
|
||||
|
||||
Once we receive your report, our security team will acknowledge receipt within 24 hours and will work to validate the reported vulnerability. We will provide periodic updates on the progress of the vulnerability assessment, and will notify you once a fix has been deployed.
|
||||
|
||||
If the vulnerability is accepted, we will work to remediate the issue as quickly as possible. We may also provide credit or recognition to the individual who reported the vulnerability, at our discretion.
|
||||
|
||||
If the vulnerability is declined, we will provide a justification for our decision and may offer guidance on how to improve the report or how to test the system more effectively.
|
||||
|
||||
Please note that we will not take any legal action against individuals who report vulnerabilities in good faith and in accordance with this policy.
|
||||
|
||||
Thank you for helping us keep our systems and data secure.
|
2
docs
2
docs
Submodule docs updated: deae09a880...670721e923
2
examples
2
examples
Submodule examples updated: 84855946ea...4bed26fd0d
@ -12,53 +12,34 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
|
||||
VERSION = (5, 0, 2)
|
||||
VERSION = (6, 1, "15a1")
|
||||
__version__ = ".".join([str(s) for s in VERSION])
|
||||
|
||||
__title__ = "platformio"
|
||||
__description__ = (
|
||||
"A professional collaborative platform for embedded development. "
|
||||
"Cross-platform IDE and Unified Debugger. "
|
||||
"Static Code Analyzer and Remote Unit Testing. "
|
||||
"Multi-platform and Multi-architecture Build System. "
|
||||
"Firmware File Explorer and Memory Inspection. "
|
||||
"IoT, Arduino, CMSIS, ESP-IDF, FreeRTOS, libOpenCM3, mbedOS, Pulp OS, SPL, "
|
||||
"STM32Cube, Zephyr RTOS, ARM, AVR, Espressif (ESP8266/ESP32), FPGA, "
|
||||
"MCS-51 (8051), MSP430, Nordic (nRF51/nRF52), NXP i.MX RT, PIC32, RISC-V, "
|
||||
"STMicroelectronics (STM8/STM32), Teensy"
|
||||
"Your Gateway to Embedded Software Development Excellence. "
|
||||
"Unlock the true potential of embedded software development "
|
||||
"with PlatformIO's collaborative ecosystem, embracing "
|
||||
"declarative principles, test-driven methodologies, and "
|
||||
"modern toolchains for unrivaled success."
|
||||
)
|
||||
__url__ = "https://platformio.org"
|
||||
|
||||
__author__ = "PlatformIO"
|
||||
__email__ = "contact@platformio.org"
|
||||
__author__ = "PlatformIO Labs"
|
||||
__email__ = "contact@piolabs.com"
|
||||
|
||||
__license__ = "Apache Software License"
|
||||
__copyright__ = "Copyright 2014-present PlatformIO"
|
||||
__copyright__ = "Copyright 2014-present PlatformIO Labs"
|
||||
|
||||
__accounts_api__ = "https://api.accounts.platformio.org"
|
||||
__registry_api__ = [
|
||||
"https://api.registry.platformio.org",
|
||||
"https://api.registry.ns1.platformio.org",
|
||||
__registry_mirror_hosts__ = [
|
||||
"registry.platformio.org",
|
||||
"registry.nm1.platformio.org",
|
||||
]
|
||||
__pioremote_endpoint__ = "ssl:host=remote.platformio.org:port=4413"
|
||||
|
||||
__default_requests_timeout__ = (10, None) # (connect, read)
|
||||
|
||||
__core_packages__ = {
|
||||
"contrib-piohome": "~3.3.1",
|
||||
"contrib-pysite": "~2.%d%d.0" % (sys.version_info.major, sys.version_info.minor),
|
||||
"tool-unity": "~1.20500.0",
|
||||
"tool-scons": "~2.20501.7" if sys.version_info.major == 2 else "~4.40001.0",
|
||||
"tool-cppcheck": "~1.210.0",
|
||||
"tool-clangtidy": "~1.100000.0",
|
||||
"tool-pvs-studio": "~7.9.0",
|
||||
}
|
||||
|
||||
__check_internet_hosts__ = [
|
||||
"185.199.110.153", # Github.com
|
||||
"88.198.170.159", # platformio.org
|
||||
"github.com",
|
||||
"platformio.org",
|
||||
]
|
||||
] + __registry_mirror_hosts__
|
||||
|
@ -14,31 +14,24 @@
|
||||
|
||||
import os
|
||||
import sys
|
||||
from traceback import format_exc
|
||||
import traceback
|
||||
|
||||
import click
|
||||
|
||||
from platformio import __version__, exception, maintenance, util
|
||||
from platformio.commands import PlatformioCLI
|
||||
from platformio.compat import CYGWIN
|
||||
|
||||
try:
|
||||
import click_completion # pylint: disable=import-error
|
||||
|
||||
click_completion.init()
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
from platformio import __version__, exception, maintenance
|
||||
from platformio.cli import PlatformioCLI
|
||||
from platformio.compat import IS_CYGWIN, ensure_python3
|
||||
|
||||
|
||||
@click.command(
|
||||
cls=PlatformioCLI, context_settings=dict(help_option_names=["-h", "--help"])
|
||||
)
|
||||
@click.version_option(__version__, prog_name="PlatformIO")
|
||||
@click.option("--force", "-f", is_flag=True, help="DEPRECATE")
|
||||
@click.version_option(__version__, prog_name="PlatformIO Core")
|
||||
@click.option("--force", "-f", is_flag=True, help="DEPRECATED", hidden=True)
|
||||
@click.option("--caller", "-c", help="Caller ID (service)")
|
||||
@click.option("--no-ansi", is_flag=True, help="Do not print ANSI control characters")
|
||||
@click.pass_context
|
||||
def cli(ctx, force, caller, no_ansi):
|
||||
def cli(ctx, force, caller, no_ansi): # pylint: disable=unused-argument
|
||||
try:
|
||||
if (
|
||||
no_ansi
|
||||
@ -60,29 +53,19 @@ def cli(ctx, force, caller, no_ansi):
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
maintenance.on_platformio_start(ctx, force, caller)
|
||||
maintenance.on_cmd_start(ctx, caller)
|
||||
|
||||
|
||||
@cli.resultcallback()
|
||||
@cli.result_callback()
|
||||
@click.pass_context
|
||||
def process_result(ctx, result, *_, **__):
|
||||
maintenance.on_platformio_end(ctx, result)
|
||||
def process_result(*_, **__):
|
||||
maintenance.on_cmd_end()
|
||||
|
||||
|
||||
@util.memoized()
|
||||
def configure():
|
||||
if CYGWIN:
|
||||
if IS_CYGWIN:
|
||||
raise exception.CygwinEnvDetected()
|
||||
|
||||
# https://urllib3.readthedocs.org
|
||||
# /en/latest/security.html#insecureplatformwarning
|
||||
try:
|
||||
import urllib3 # pylint: disable=import-outside-toplevel
|
||||
|
||||
urllib3.disable_warnings()
|
||||
except (AttributeError, ImportError):
|
||||
pass
|
||||
|
||||
# Handle IOError issue with VSCode's Terminal (Windows)
|
||||
click_echo_origin = [click.echo, click.secho]
|
||||
|
||||
@ -104,30 +87,32 @@ def main(argv=None):
|
||||
if argv:
|
||||
assert isinstance(argv, list)
|
||||
sys.argv = argv
|
||||
|
||||
try:
|
||||
ensure_python3(raise_exception=True)
|
||||
configure()
|
||||
cli() # pylint: disable=no-value-for-parameter
|
||||
except SystemExit as e:
|
||||
if e.code and str(e.code).isdigit():
|
||||
exit_code = int(e.code)
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
if not isinstance(e, exception.ReturnErrorCode):
|
||||
maintenance.on_platformio_exception(e)
|
||||
error_str = "Error: "
|
||||
if isinstance(e, exception.PlatformioException):
|
||||
error_str += str(e)
|
||||
except SystemExit as exc:
|
||||
if exc.code and str(exc.code).isdigit():
|
||||
exit_code = int(exc.code)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
if not isinstance(exc, exception.ReturnErrorCode):
|
||||
maintenance.on_platformio_exception(exc)
|
||||
error_str = f"{exc.__class__.__name__}: "
|
||||
if isinstance(exc, exception.PlatformioException):
|
||||
error_str += str(exc)
|
||||
else:
|
||||
error_str += format_exc()
|
||||
error_str += traceback.format_exc()
|
||||
error_str += """
|
||||
============================================================
|
||||
|
||||
An unexpected error occurred. Further steps:
|
||||
|
||||
* Verify that you have the latest version of PlatformIO using
|
||||
`pip install -U platformio` command
|
||||
`python -m pip install -U platformio` command
|
||||
|
||||
* Try to find answer in FAQ Troubleshooting section
|
||||
https://docs.platformio.org/page/faq.html
|
||||
https://docs.platformio.org/page/faq/index.html
|
||||
|
||||
* Report this problem to the developers
|
||||
https://github.com/platformio/platformio-core/issues
|
||||
@ -135,7 +120,9 @@ An unexpected error occurred. Further steps:
|
||||
============================================================
|
||||
"""
|
||||
click.secho(error_str, fg="red", err=True)
|
||||
exit_code = int(str(e)) if str(e).isdigit() else 1
|
||||
exit_code = int(str(exc)) if str(exc).isdigit() else 1
|
||||
|
||||
maintenance.on_platformio_exit()
|
||||
sys.argv = prev_sys_argv
|
||||
return exit_code
|
||||
|
||||
|
44
platformio/account/cli.py
Normal file
44
platformio/account/cli.py
Normal file
@ -0,0 +1,44 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.commands.destroy import account_destroy_cmd
|
||||
from platformio.account.commands.forgot import account_forgot_cmd
|
||||
from platformio.account.commands.login import account_login_cmd
|
||||
from platformio.account.commands.logout import account_logout_cmd
|
||||
from platformio.account.commands.password import account_password_cmd
|
||||
from platformio.account.commands.register import account_register_cmd
|
||||
from platformio.account.commands.show import account_show_cmd
|
||||
from platformio.account.commands.token import account_token_cmd
|
||||
from platformio.account.commands.update import account_update_cmd
|
||||
|
||||
|
||||
@click.group(
|
||||
"account",
|
||||
commands=[
|
||||
account_destroy_cmd,
|
||||
account_forgot_cmd,
|
||||
account_login_cmd,
|
||||
account_logout_cmd,
|
||||
account_password_cmd,
|
||||
account_register_cmd,
|
||||
account_show_cmd,
|
||||
account_token_cmd,
|
||||
account_update_cmd,
|
||||
],
|
||||
short_help="Manage PlatformIO account",
|
||||
)
|
||||
def cli():
|
||||
pass
|
@ -16,38 +16,34 @@ import os
|
||||
import time
|
||||
|
||||
from platformio import __accounts_api__, app
|
||||
from platformio.clients.http import HTTPClient
|
||||
from platformio.exception import PlatformioException
|
||||
from platformio.exception import PlatformioException, UserSideException
|
||||
from platformio.http import HttpApiClient, HttpClientApiError
|
||||
|
||||
|
||||
class AccountError(PlatformioException):
|
||||
|
||||
MESSAGE = "{0}"
|
||||
|
||||
|
||||
class AccountNotAuthorized(AccountError):
|
||||
|
||||
class AccountNotAuthorized(AccountError, UserSideException):
|
||||
MESSAGE = "You are not authorized! Please log in to PlatformIO Account."
|
||||
|
||||
|
||||
class AccountAlreadyAuthorized(AccountError):
|
||||
|
||||
class AccountAlreadyAuthorized(AccountError, UserSideException):
|
||||
MESSAGE = "You are already authorized with {0} account."
|
||||
|
||||
|
||||
class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
|
||||
class AccountClient(HttpApiClient): # pylint:disable=too-many-public-methods
|
||||
SUMMARY_CACHE_TTL = 60 * 60 * 24 * 7
|
||||
|
||||
def __init__(self):
|
||||
super(AccountClient, self).__init__(__accounts_api__)
|
||||
super().__init__(__accounts_api__)
|
||||
|
||||
@staticmethod
|
||||
def get_refresh_token():
|
||||
try:
|
||||
return app.get_state_item("account").get("auth").get("refresh_token")
|
||||
except: # pylint:disable=bare-except
|
||||
raise AccountNotAuthorized()
|
||||
except Exception as exc:
|
||||
raise AccountNotAuthorized() from exc
|
||||
|
||||
@staticmethod
|
||||
def delete_local_session():
|
||||
@ -61,13 +57,33 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
del account[key]
|
||||
app.set_state_item("account", account)
|
||||
|
||||
def send_auth_request(self, *args, **kwargs):
|
||||
headers = kwargs.get("headers", {})
|
||||
if "Authorization" not in headers:
|
||||
token = self.fetch_authentication_token()
|
||||
headers["Authorization"] = "Bearer %s" % token
|
||||
kwargs["headers"] = headers
|
||||
return self.fetch_json_data(*args, **kwargs)
|
||||
def fetch_json_data(self, *args, **kwargs):
|
||||
try:
|
||||
return super().fetch_json_data(*args, **kwargs)
|
||||
except HttpClientApiError as exc:
|
||||
raise AccountError(exc) from exc
|
||||
|
||||
def fetch_authentication_token(self):
|
||||
if os.environ.get("PLATFORMIO_AUTH_TOKEN"):
|
||||
return os.environ.get("PLATFORMIO_AUTH_TOKEN")
|
||||
auth = app.get_state_item("account", {}).get("auth", {})
|
||||
if auth.get("access_token") and auth.get("access_token_expire"):
|
||||
if auth.get("access_token_expire") > time.time():
|
||||
return auth.get("access_token")
|
||||
if auth.get("refresh_token"):
|
||||
try:
|
||||
data = self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/login",
|
||||
headers={
|
||||
"Authorization": "Bearer %s" % auth.get("refresh_token")
|
||||
},
|
||||
)
|
||||
app.set_state_item("account", data)
|
||||
return data.get("auth").get("access_token")
|
||||
except AccountError:
|
||||
self.delete_local_session()
|
||||
raise AccountNotAuthorized()
|
||||
|
||||
def login(self, username, password):
|
||||
try:
|
||||
@ -119,10 +135,11 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
return True
|
||||
|
||||
def change_password(self, old_password, new_password):
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/password",
|
||||
data={"old_password": old_password, "new_password": new_password},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def registration(
|
||||
@ -150,10 +167,11 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
)
|
||||
|
||||
def auth_token(self, password, regenerate):
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/token",
|
||||
data={"password": password, "regenerate": 1 if regenerate else 0},
|
||||
x_with_authorization=True,
|
||||
).get("auth_token")
|
||||
|
||||
def forgot_password(self, username):
|
||||
@ -164,18 +182,20 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
)
|
||||
|
||||
def get_profile(self):
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"get",
|
||||
"/v1/profile",
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def update_profile(self, profile, current_password):
|
||||
profile["current_password"] = current_password
|
||||
self.delete_local_state("summary")
|
||||
response = self.send_auth_request(
|
||||
response = self.fetch_json_data(
|
||||
"put",
|
||||
"/v1/profile",
|
||||
data=profile,
|
||||
x_with_authorization=True,
|
||||
)
|
||||
return response
|
||||
|
||||
@ -193,9 +213,10 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
"username": account.get("username"),
|
||||
}
|
||||
}
|
||||
result = self.send_auth_request(
|
||||
result = self.fetch_json_data(
|
||||
"get",
|
||||
"/v1/summary",
|
||||
x_with_authorization=True,
|
||||
)
|
||||
account["summary"] = dict(
|
||||
profile=result.get("profile"),
|
||||
@ -207,120 +228,125 @@ class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
app.set_state_item("account", account)
|
||||
return result
|
||||
|
||||
def get_logged_username(self):
|
||||
return self.get_account_info(offline=True).get("profile").get("username")
|
||||
|
||||
def destroy_account(self):
|
||||
return self.send_auth_request("delete", "/v1/account")
|
||||
return self.fetch_json_data(
|
||||
"delete",
|
||||
"/v1/account",
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def create_org(self, orgname, email, displayname):
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/orgs",
|
||||
data={"orgname": orgname, "email": email, "displayname": displayname},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def get_org(self, orgname):
|
||||
return self.send_auth_request("get", "/v1/orgs/%s" % orgname)
|
||||
return self.fetch_json_data(
|
||||
"get",
|
||||
"/v1/orgs/%s" % orgname,
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def list_orgs(self):
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"get",
|
||||
"/v1/orgs",
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def update_org(self, orgname, data):
|
||||
return self.send_auth_request(
|
||||
"put", "/v1/orgs/%s" % orgname, data={k: v for k, v in data.items() if v}
|
||||
return self.fetch_json_data(
|
||||
"put",
|
||||
"/v1/orgs/%s" % orgname,
|
||||
data={k: v for k, v in data.items() if v},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def destroy_org(self, orgname):
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"delete",
|
||||
"/v1/orgs/%s" % orgname,
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def add_org_owner(self, orgname, username):
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/orgs/%s/owners" % orgname,
|
||||
data={"username": username},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def list_org_owners(self, orgname):
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"get",
|
||||
"/v1/orgs/%s/owners" % orgname,
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def remove_org_owner(self, orgname, username):
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"delete",
|
||||
"/v1/orgs/%s/owners" % orgname,
|
||||
data={"username": username},
|
||||
params={"username": username},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def create_team(self, orgname, teamname, description):
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/orgs/%s/teams" % orgname,
|
||||
data={"name": teamname, "description": description},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def destroy_team(self, orgname, teamname):
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"delete",
|
||||
"/v1/orgs/%s/teams/%s" % (orgname, teamname),
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def get_team(self, orgname, teamname):
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"get",
|
||||
"/v1/orgs/%s/teams/%s" % (orgname, teamname),
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def list_teams(self, orgname):
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"get",
|
||||
"/v1/orgs/%s/teams" % orgname,
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def update_team(self, orgname, teamname, data):
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"put",
|
||||
"/v1/orgs/%s/teams/%s" % (orgname, teamname),
|
||||
data={k: v for k, v in data.items() if v},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def add_team_member(self, orgname, teamname, username):
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/orgs/%s/teams/%s/members" % (orgname, teamname),
|
||||
data={"username": username},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def remove_team_member(self, orgname, teamname, username):
|
||||
return self.send_auth_request(
|
||||
return self.fetch_json_data(
|
||||
"delete",
|
||||
"/v1/orgs/%s/teams/%s/members" % (orgname, teamname),
|
||||
data={"username": username},
|
||||
params={"username": username},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def fetch_authentication_token(self):
|
||||
if os.environ.get("PLATFORMIO_AUTH_TOKEN"):
|
||||
return os.environ.get("PLATFORMIO_AUTH_TOKEN")
|
||||
auth = app.get_state_item("account", {}).get("auth", {})
|
||||
if auth.get("access_token") and auth.get("access_token_expire"):
|
||||
if auth.get("access_token_expire") > time.time():
|
||||
return auth.get("access_token")
|
||||
if auth.get("refresh_token"):
|
||||
try:
|
||||
data = self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/login",
|
||||
headers={
|
||||
"Authorization": "Bearer %s" % auth.get("refresh_token")
|
||||
},
|
||||
)
|
||||
app.set_state_item("account", data)
|
||||
return data.get("auth").get("access_token")
|
||||
except AccountError:
|
||||
self.delete_local_session()
|
||||
raise AccountNotAuthorized()
|
37
platformio/account/commands/destroy.py
Normal file
37
platformio/account/commands/destroy.py
Normal file
@ -0,0 +1,37 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient, AccountNotAuthorized
|
||||
|
||||
|
||||
@click.command("destroy", short_help="Destroy account")
|
||||
def account_destroy_cmd():
|
||||
with AccountClient() as client:
|
||||
click.confirm(
|
||||
"Are you sure you want to delete the %s user account?\n"
|
||||
"Warning! All linked data will be permanently removed and can not be restored."
|
||||
% client.get_logged_username(),
|
||||
abort=True,
|
||||
)
|
||||
client.destroy_account()
|
||||
try:
|
||||
client.logout()
|
||||
except AccountNotAuthorized:
|
||||
pass
|
||||
click.secho(
|
||||
"User account has been destroyed.",
|
||||
fg="green",
|
||||
)
|
@ -12,16 +12,18 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
import click
|
||||
|
||||
from platformio.commands.update import cli as cmd_update
|
||||
from platformio.account.client import AccountClient
|
||||
|
||||
|
||||
def test_update(clirunner, validate_cliresult, isolated_pio_core):
|
||||
matches = ("Platform Manager", "Library Manager")
|
||||
result = clirunner.invoke(cmd_update, ["--only-check"])
|
||||
validate_cliresult(result)
|
||||
assert all([m in result.output for m in matches])
|
||||
result = clirunner.invoke(cmd_update)
|
||||
validate_cliresult(result)
|
||||
assert all([m in result.output for m in matches])
|
||||
@click.command("forgot", short_help="Forgot password")
|
||||
@click.option("--username", prompt="Username or email")
|
||||
def account_forgot_cmd(username):
|
||||
with AccountClient() as client:
|
||||
client.forgot_password(username)
|
||||
click.secho(
|
||||
"If this account is registered, we will send the "
|
||||
"further instructions to your email.",
|
||||
fg="green",
|
||||
)
|
26
platformio/account/commands/login.py
Normal file
26
platformio/account/commands/login.py
Normal file
@ -0,0 +1,26 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
|
||||
|
||||
@click.command("login", short_help="Log in to PlatformIO Account")
|
||||
@click.option("-u", "--username", prompt="Username or email")
|
||||
@click.option("-p", "--password", prompt=True, hide_input=True)
|
||||
def account_login_cmd(username, password):
|
||||
with AccountClient() as client:
|
||||
client.login(username, password)
|
||||
click.secho("Successfully logged in!", fg="green")
|
24
platformio/account/commands/logout.py
Normal file
24
platformio/account/commands/logout.py
Normal file
@ -0,0 +1,24 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
|
||||
|
||||
@click.command("logout", short_help="Log out of PlatformIO Account")
|
||||
def account_logout_cmd():
|
||||
with AccountClient() as client:
|
||||
client.logout()
|
||||
click.secho("Successfully logged out!", fg="green")
|
26
platformio/account/commands/password.py
Normal file
26
platformio/account/commands/password.py
Normal file
@ -0,0 +1,26 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
|
||||
|
||||
@click.command("password", short_help="Change password")
|
||||
@click.option("--old-password", prompt=True, hide_input=True)
|
||||
@click.option("--new-password", prompt=True, hide_input=True, confirmation_prompt=True)
|
||||
def account_password_cmd(old_password, new_password):
|
||||
with AccountClient() as client:
|
||||
client.change_password(old_password, new_password)
|
||||
click.secho("Password successfully changed!", fg="green")
|
52
platformio/account/commands/register.py
Normal file
52
platformio/account/commands/register.py
Normal file
@ -0,0 +1,52 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
from platformio.account.validate import (
|
||||
validate_email,
|
||||
validate_password,
|
||||
validate_username,
|
||||
)
|
||||
|
||||
|
||||
@click.command("register", short_help="Create new PlatformIO Account")
|
||||
@click.option(
|
||||
"-u",
|
||||
"--username",
|
||||
prompt=True,
|
||||
callback=lambda _, __, value: validate_username(value),
|
||||
)
|
||||
@click.option(
|
||||
"-e", "--email", prompt=True, callback=lambda _, __, value: validate_email(value)
|
||||
)
|
||||
@click.option(
|
||||
"-p",
|
||||
"--password",
|
||||
prompt=True,
|
||||
hide_input=True,
|
||||
confirmation_prompt=True,
|
||||
callback=lambda _, __, value: validate_password(value),
|
||||
)
|
||||
@click.option("--firstname", prompt=True)
|
||||
@click.option("--lastname", prompt=True)
|
||||
def account_register_cmd(username, email, password, firstname, lastname):
|
||||
with AccountClient() as client:
|
||||
client.registration(username, email, password, firstname, lastname)
|
||||
click.secho(
|
||||
"An account has been successfully created. "
|
||||
"Please check your mail to activate your account and verify your email address.",
|
||||
fg="green",
|
||||
)
|
116
platformio/account/commands/show.py
Normal file
116
platformio/account/commands/show.py
Normal file
@ -0,0 +1,116 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
|
||||
import click
|
||||
from tabulate import tabulate
|
||||
|
||||
from platformio import util
|
||||
from platformio.account.client import AccountClient
|
||||
|
||||
|
||||
@click.command("show", short_help="PlatformIO Account information")
|
||||
@click.option("--offline", is_flag=True)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def account_show_cmd(offline, json_output):
|
||||
with AccountClient() as client:
|
||||
info = client.get_account_info(offline)
|
||||
if json_output:
|
||||
click.echo(json.dumps(info))
|
||||
return
|
||||
click.echo()
|
||||
if info.get("profile"):
|
||||
print_profile(info["profile"])
|
||||
if info.get("packages"):
|
||||
print_packages(info["packages"])
|
||||
if info.get("subscriptions"):
|
||||
print_subscriptions(info["subscriptions"])
|
||||
click.echo()
|
||||
|
||||
|
||||
def print_profile(profile):
|
||||
click.secho("Profile", fg="cyan", bold=True)
|
||||
click.echo("=" * len("Profile"))
|
||||
data = []
|
||||
if profile.get("username"):
|
||||
data.append(("Username:", profile["username"]))
|
||||
if profile.get("email"):
|
||||
data.append(("Email:", profile["email"]))
|
||||
if profile.get("firstname"):
|
||||
data.append(("First name:", profile["firstname"]))
|
||||
if profile.get("lastname"):
|
||||
data.append(("Last name:", profile["lastname"]))
|
||||
click.echo(tabulate(data, tablefmt="plain"))
|
||||
|
||||
|
||||
def print_packages(packages):
|
||||
click.echo()
|
||||
click.secho("Packages", fg="cyan")
|
||||
click.echo("=" * len("Packages"))
|
||||
for package in packages:
|
||||
click.echo()
|
||||
click.secho(package.get("name"), bold=True)
|
||||
click.echo("-" * len(package.get("name")))
|
||||
if package.get("description"):
|
||||
click.echo(package.get("description"))
|
||||
data = []
|
||||
expire = "-"
|
||||
if "subscription" in package:
|
||||
expire = util.parse_datetime(
|
||||
package["subscription"].get("end_at")
|
||||
or package["subscription"].get("next_bill_at")
|
||||
).strftime("%Y-%m-%d")
|
||||
data.append(("Expire:", expire))
|
||||
services = []
|
||||
for key in package:
|
||||
if not key.startswith("service."):
|
||||
continue
|
||||
if isinstance(package[key], dict):
|
||||
services.append(package[key].get("title"))
|
||||
else:
|
||||
services.append(package[key])
|
||||
if services:
|
||||
data.append(("Services:", ", ".join(services)))
|
||||
click.echo(tabulate(data, tablefmt="plain"))
|
||||
|
||||
|
||||
def print_subscriptions(subscriptions):
|
||||
click.echo()
|
||||
click.secho("Subscriptions", fg="cyan")
|
||||
click.echo("=" * len("Subscriptions"))
|
||||
for subscription in subscriptions:
|
||||
click.echo()
|
||||
click.secho(subscription.get("product_name"), bold=True)
|
||||
click.echo("-" * len(subscription.get("product_name")))
|
||||
data = [("State:", subscription.get("status"))]
|
||||
begin_at = util.parse_datetime(subscription.get("begin_at")).strftime("%c")
|
||||
data.append(("Start date:", begin_at or "-"))
|
||||
end_at = subscription.get("end_at")
|
||||
if end_at:
|
||||
end_at = util.parse_datetime(subscription.get("end_at")).strftime("%c")
|
||||
data.append(("End date:", end_at or "-"))
|
||||
next_bill_at = subscription.get("next_bill_at")
|
||||
if next_bill_at:
|
||||
next_bill_at = util.parse_datetime(
|
||||
subscription.get("next_bill_at")
|
||||
).strftime("%c")
|
||||
data.append(("Next payment:", next_bill_at or "-"))
|
||||
data.append(
|
||||
("Edit:", click.style(subscription.get("update_url"), fg="blue") or "-")
|
||||
)
|
||||
data.append(
|
||||
("Cancel:", click.style(subscription.get("cancel_url"), fg="blue") or "-")
|
||||
)
|
||||
click.echo(tabulate(data, tablefmt="plain"))
|
32
platformio/account/commands/token.py
Normal file
32
platformio/account/commands/token.py
Normal file
@ -0,0 +1,32 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
|
||||
|
||||
@click.command("token", short_help="Get or regenerate Authentication Token")
|
||||
@click.option("-p", "--password", prompt=True, hide_input=True)
|
||||
@click.option("--regenerate", is_flag=True)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def account_token_cmd(password, regenerate, json_output):
|
||||
with AccountClient() as client:
|
||||
auth_token = client.auth_token(password, regenerate)
|
||||
if json_output:
|
||||
click.echo(json.dumps({"status": "success", "result": auth_token}))
|
||||
return
|
||||
click.secho("Personal Authentication Token: %s" % auth_token, fg="green")
|
59
platformio/account/commands/update.py
Normal file
59
platformio/account/commands/update.py
Normal file
@ -0,0 +1,59 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient, AccountNotAuthorized
|
||||
from platformio.account.validate import validate_email, validate_username
|
||||
|
||||
|
||||
@click.command("update", short_help="Update profile information")
|
||||
@click.option("--current-password", prompt=True, hide_input=True)
|
||||
@click.option("--username")
|
||||
@click.option("--email")
|
||||
@click.option("--firstname")
|
||||
@click.option("--lastname")
|
||||
def account_update_cmd(current_password, **kwargs):
|
||||
with AccountClient() as client:
|
||||
profile = client.get_profile()
|
||||
new_profile = profile.copy()
|
||||
if not any(kwargs.values()):
|
||||
for field in profile:
|
||||
new_profile[field] = click.prompt(
|
||||
field.replace("_", " ").capitalize(), default=profile[field]
|
||||
)
|
||||
if field == "email":
|
||||
validate_email(new_profile[field])
|
||||
if field == "username":
|
||||
validate_username(new_profile[field])
|
||||
else:
|
||||
new_profile.update({key: value for key, value in kwargs.items() if value})
|
||||
client.update_profile(new_profile, current_password)
|
||||
click.secho("Profile successfully updated!", fg="green")
|
||||
username_changed = new_profile["username"] != profile["username"]
|
||||
email_changed = new_profile["email"] != profile["email"]
|
||||
if not username_changed and not email_changed:
|
||||
return None
|
||||
try:
|
||||
client.logout()
|
||||
except AccountNotAuthorized:
|
||||
pass
|
||||
if email_changed:
|
||||
click.secho(
|
||||
"Please check your mail to verify your new email address and re-login. ",
|
||||
fg="yellow",
|
||||
)
|
||||
return None
|
||||
click.secho("Please re-login.", fg="yellow")
|
||||
return None
|
38
platformio/account/org/cli.py
Normal file
38
platformio/account/org/cli.py
Normal file
@ -0,0 +1,38 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.org.commands.add import org_add_cmd
|
||||
from platformio.account.org.commands.create import org_create_cmd
|
||||
from platformio.account.org.commands.destroy import org_destroy_cmd
|
||||
from platformio.account.org.commands.list import org_list_cmd
|
||||
from platformio.account.org.commands.remove import org_remove_cmd
|
||||
from platformio.account.org.commands.update import org_update_cmd
|
||||
|
||||
|
||||
@click.group(
|
||||
"account",
|
||||
commands=[
|
||||
org_add_cmd,
|
||||
org_create_cmd,
|
||||
org_destroy_cmd,
|
||||
org_list_cmd,
|
||||
org_remove_cmd,
|
||||
org_update_cmd,
|
||||
],
|
||||
short_help="Manage organizations",
|
||||
)
|
||||
def cli():
|
||||
pass
|
34
platformio/account/org/commands/add.py
Normal file
34
platformio/account/org/commands/add.py
Normal file
@ -0,0 +1,34 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
|
||||
|
||||
@click.command("add", short_help="Add a new owner to organization")
|
||||
@click.argument(
|
||||
"orgname",
|
||||
)
|
||||
@click.argument(
|
||||
"username",
|
||||
)
|
||||
def org_add_cmd(orgname, username):
|
||||
with AccountClient() as client:
|
||||
client.add_org_owner(orgname, username)
|
||||
return click.secho(
|
||||
"The new owner `%s` has been successfully added to the `%s` organization."
|
||||
% (username, orgname),
|
||||
fg="green",
|
||||
)
|
38
platformio/account/org/commands/create.py
Normal file
38
platformio/account/org/commands/create.py
Normal file
@ -0,0 +1,38 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
from platformio.account.validate import validate_email, validate_orgname
|
||||
|
||||
|
||||
@click.command("create", short_help="Create a new organization")
|
||||
@click.argument(
|
||||
"orgname",
|
||||
callback=lambda _, __, value: validate_orgname(value),
|
||||
)
|
||||
@click.option(
|
||||
"--email", callback=lambda _, __, value: validate_email(value) if value else value
|
||||
)
|
||||
@click.option(
|
||||
"--displayname",
|
||||
)
|
||||
def org_create_cmd(orgname, email, displayname):
|
||||
with AccountClient() as client:
|
||||
client.create_org(orgname, email, displayname)
|
||||
return click.secho(
|
||||
"The organization `%s` has been successfully created." % orgname,
|
||||
fg="green",
|
||||
)
|
34
platformio/account/org/commands/destroy.py
Normal file
34
platformio/account/org/commands/destroy.py
Normal file
@ -0,0 +1,34 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
|
||||
|
||||
@click.command("destroy", short_help="Destroy organization")
|
||||
@click.argument("orgname")
|
||||
def org_destroy_cmd(orgname):
|
||||
with AccountClient() as client:
|
||||
click.confirm(
|
||||
"Are you sure you want to delete the `%s` organization account?\n"
|
||||
"Warning! All linked data will be permanently removed and can not be restored."
|
||||
% orgname,
|
||||
abort=True,
|
||||
)
|
||||
client.destroy_org(orgname)
|
||||
return click.secho(
|
||||
"Organization `%s` has been destroyed." % orgname,
|
||||
fg="green",
|
||||
)
|
48
platformio/account/org/commands/list.py
Normal file
48
platformio/account/org/commands/list.py
Normal file
@ -0,0 +1,48 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
|
||||
import click
|
||||
from tabulate import tabulate
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
|
||||
|
||||
@click.command("list", short_help="List organizations and their members")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def org_list_cmd(json_output):
|
||||
with AccountClient() as client:
|
||||
orgs = client.list_orgs()
|
||||
if json_output:
|
||||
return click.echo(json.dumps(orgs))
|
||||
if not orgs:
|
||||
return click.echo("You do not have any organization")
|
||||
for org in orgs:
|
||||
click.echo()
|
||||
click.secho(org.get("orgname"), fg="cyan")
|
||||
click.echo("-" * len(org.get("orgname")))
|
||||
data = []
|
||||
if org.get("displayname"):
|
||||
data.append(("Display Name:", org.get("displayname")))
|
||||
if org.get("email"):
|
||||
data.append(("Email:", org.get("email")))
|
||||
data.append(
|
||||
(
|
||||
"Owners:",
|
||||
", ".join((owner.get("username") for owner in org.get("owners"))),
|
||||
)
|
||||
)
|
||||
click.echo(tabulate(data, tablefmt="plain"))
|
||||
return click.echo()
|
34
platformio/account/org/commands/remove.py
Normal file
34
platformio/account/org/commands/remove.py
Normal file
@ -0,0 +1,34 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
|
||||
|
||||
@click.command("remove", short_help="Remove an owner from organization")
|
||||
@click.argument(
|
||||
"orgname",
|
||||
)
|
||||
@click.argument(
|
||||
"username",
|
||||
)
|
||||
def org_remove_cmd(orgname, username):
|
||||
with AccountClient() as client:
|
||||
client.remove_org_owner(orgname, username)
|
||||
return click.secho(
|
||||
"The `%s` owner has been successfully removed from the `%s` organization."
|
||||
% (username, orgname),
|
||||
fg="green",
|
||||
)
|
50
platformio/account/org/commands/update.py
Normal file
50
platformio/account/org/commands/update.py
Normal file
@ -0,0 +1,50 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
from platformio.account.validate import validate_email, validate_orgname
|
||||
|
||||
|
||||
@click.command("update", short_help="Update organization")
|
||||
@click.argument("cur_orgname")
|
||||
@click.option(
|
||||
"--orgname",
|
||||
callback=lambda _, __, value: validate_orgname(value) if value else value,
|
||||
help="A new orgname",
|
||||
)
|
||||
@click.option(
|
||||
"--email",
|
||||
callback=lambda _, __, value: validate_email(value) if value else value,
|
||||
)
|
||||
@click.option("--displayname")
|
||||
def org_update_cmd(cur_orgname, **kwargs):
|
||||
with AccountClient() as client:
|
||||
org = client.get_org(cur_orgname)
|
||||
new_org = {
|
||||
key: value if value is not None else org[key] for key, value in kwargs.items()
|
||||
}
|
||||
if not any(kwargs.values()):
|
||||
for key in kwargs:
|
||||
new_org[key] = click.prompt(key.capitalize(), default=org[key])
|
||||
if key == "email":
|
||||
validate_email(new_org[key])
|
||||
if key == "orgname":
|
||||
validate_orgname(new_org[key])
|
||||
client.update_org(cur_orgname, new_org)
|
||||
return click.secho(
|
||||
"The organization `%s` has been successfully updated." % cur_orgname,
|
||||
fg="green",
|
||||
)
|
38
platformio/account/team/cli.py
Normal file
38
platformio/account/team/cli.py
Normal file
@ -0,0 +1,38 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.team.commands.add import team_add_cmd
|
||||
from platformio.account.team.commands.create import team_create_cmd
|
||||
from platformio.account.team.commands.destroy import team_destroy_cmd
|
||||
from platformio.account.team.commands.list import team_list_cmd
|
||||
from platformio.account.team.commands.remove import team_remove_cmd
|
||||
from platformio.account.team.commands.update import team_update_cmd
|
||||
|
||||
|
||||
@click.group(
|
||||
"team",
|
||||
commands=[
|
||||
team_add_cmd,
|
||||
team_create_cmd,
|
||||
team_destroy_cmd,
|
||||
team_list_cmd,
|
||||
team_remove_cmd,
|
||||
team_update_cmd,
|
||||
],
|
||||
short_help="Manage organization teams",
|
||||
)
|
||||
def cli():
|
||||
pass
|
38
platformio/account/team/commands/add.py
Normal file
38
platformio/account/team/commands/add.py
Normal file
@ -0,0 +1,38 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
from platformio.account.validate import validate_orgname_teamname
|
||||
|
||||
|
||||
@click.command("add", short_help="Add a new member to team")
|
||||
@click.argument(
|
||||
"orgname_teamname",
|
||||
metavar="ORGNAME:TEAMNAME",
|
||||
callback=lambda _, __, value: validate_orgname_teamname(value),
|
||||
)
|
||||
@click.argument(
|
||||
"username",
|
||||
)
|
||||
def team_add_cmd(orgname_teamname, username):
|
||||
orgname, teamname = orgname_teamname.split(":", 1)
|
||||
with AccountClient() as client:
|
||||
client.add_team_member(orgname, teamname, username)
|
||||
return click.secho(
|
||||
"The new member %s has been successfully added to the %s team."
|
||||
% (username, teamname),
|
||||
fg="green",
|
||||
)
|
37
platformio/account/team/commands/create.py
Normal file
37
platformio/account/team/commands/create.py
Normal file
@ -0,0 +1,37 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
from platformio.account.validate import validate_orgname_teamname
|
||||
|
||||
|
||||
@click.command("create", short_help="Create a new team")
|
||||
@click.argument(
|
||||
"orgname_teamname",
|
||||
metavar="ORGNAME:TEAMNAME",
|
||||
callback=lambda _, __, value: validate_orgname_teamname(value),
|
||||
)
|
||||
@click.option(
|
||||
"--description",
|
||||
)
|
||||
def team_create_cmd(orgname_teamname, description):
|
||||
orgname, teamname = orgname_teamname.split(":", 1)
|
||||
with AccountClient() as client:
|
||||
client.create_team(orgname, teamname, description)
|
||||
return click.secho(
|
||||
"The team %s has been successfully created." % teamname,
|
||||
fg="green",
|
||||
)
|
40
platformio/account/team/commands/destroy.py
Normal file
40
platformio/account/team/commands/destroy.py
Normal file
@ -0,0 +1,40 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
from platformio.account.validate import validate_orgname_teamname
|
||||
|
||||
|
||||
@click.command("destroy", short_help="Destroy a team")
|
||||
@click.argument(
|
||||
"orgname_teamname",
|
||||
metavar="ORGNAME:TEAMNAME",
|
||||
callback=lambda _, __, value: validate_orgname_teamname(value),
|
||||
)
|
||||
def team_destroy_cmd(orgname_teamname):
|
||||
orgname, teamname = orgname_teamname.split(":", 1)
|
||||
click.confirm(
|
||||
click.style(
|
||||
"Are you sure you want to destroy the %s team?" % teamname, fg="yellow"
|
||||
),
|
||||
abort=True,
|
||||
)
|
||||
with AccountClient() as client:
|
||||
client.destroy_team(orgname, teamname)
|
||||
return click.secho(
|
||||
"The team %s has been successfully destroyed." % teamname,
|
||||
fg="green",
|
||||
)
|
64
platformio/account/team/commands/list.py
Normal file
64
platformio/account/team/commands/list.py
Normal file
@ -0,0 +1,64 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
|
||||
import click
|
||||
from tabulate import tabulate
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
|
||||
|
||||
@click.command("list", short_help="List teams")
|
||||
@click.argument("orgname", required=False)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def team_list_cmd(orgname, json_output):
|
||||
with AccountClient() as client:
|
||||
data = {}
|
||||
if not orgname:
|
||||
for item in client.list_orgs():
|
||||
teams = client.list_teams(item.get("orgname"))
|
||||
data[item.get("orgname")] = teams
|
||||
else:
|
||||
teams = client.list_teams(orgname)
|
||||
data[orgname] = teams
|
||||
|
||||
if json_output:
|
||||
return click.echo(json.dumps(data[orgname] if orgname else data))
|
||||
|
||||
if not any(data.values()):
|
||||
return click.secho("You do not have any teams.", fg="yellow")
|
||||
|
||||
for org_name, teams in data.items():
|
||||
for team in teams:
|
||||
click.echo()
|
||||
click.secho("%s:%s" % (org_name, team.get("name")), fg="cyan")
|
||||
click.echo("-" * len("%s:%s" % (org_name, team.get("name"))))
|
||||
table_data = []
|
||||
if team.get("description"):
|
||||
table_data.append(("Description:", team.get("description")))
|
||||
table_data.append(
|
||||
(
|
||||
"Members:",
|
||||
(
|
||||
", ".join(
|
||||
(member.get("username") for member in team.get("members"))
|
||||
)
|
||||
if team.get("members")
|
||||
else "-"
|
||||
),
|
||||
)
|
||||
)
|
||||
click.echo(tabulate(table_data, tablefmt="plain"))
|
||||
return click.echo()
|
36
platformio/account/team/commands/remove.py
Normal file
36
platformio/account/team/commands/remove.py
Normal file
@ -0,0 +1,36 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
from platformio.account.validate import validate_orgname_teamname
|
||||
|
||||
|
||||
@click.command("remove", short_help="Remove a member from team")
|
||||
@click.argument(
|
||||
"orgname_teamname",
|
||||
metavar="ORGNAME:TEAMNAME",
|
||||
callback=lambda _, __, value: validate_orgname_teamname(value),
|
||||
)
|
||||
@click.argument("username")
|
||||
def team_remove_cmd(orgname_teamname, username):
|
||||
orgname, teamname = orgname_teamname.split(":", 1)
|
||||
with AccountClient() as client:
|
||||
client.remove_team_member(orgname, teamname, username)
|
||||
return click.secho(
|
||||
"The %s member has been successfully removed from the %s team."
|
||||
% (username, teamname),
|
||||
fg="green",
|
||||
)
|
51
platformio/account/team/commands/update.py
Normal file
51
platformio/account/team/commands/update.py
Normal file
@ -0,0 +1,51 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
from platformio.account.validate import validate_orgname_teamname, validate_teamname
|
||||
|
||||
|
||||
@click.command("update", short_help="Update team")
|
||||
@click.argument(
|
||||
"orgname_teamname",
|
||||
metavar="ORGNAME:TEAMNAME",
|
||||
callback=lambda _, __, value: validate_orgname_teamname(value),
|
||||
)
|
||||
@click.option(
|
||||
"--name",
|
||||
callback=lambda _, __, value: validate_teamname(value) if value else value,
|
||||
help="A new team name",
|
||||
)
|
||||
@click.option(
|
||||
"--description",
|
||||
)
|
||||
def team_update_cmd(orgname_teamname, **kwargs):
|
||||
orgname, teamname = orgname_teamname.split(":", 1)
|
||||
with AccountClient() as client:
|
||||
team = client.get_team(orgname, teamname)
|
||||
new_team = {
|
||||
key: value if value is not None else team[key] for key, value in kwargs.items()
|
||||
}
|
||||
if not any(kwargs.values()):
|
||||
for key in kwargs:
|
||||
new_team[key] = click.prompt(key.capitalize(), default=team[key])
|
||||
if key == "name":
|
||||
validate_teamname(new_team[key])
|
||||
client.update_team(orgname, teamname, new_team)
|
||||
return click.secho(
|
||||
"The team %s has been successfully updated." % teamname,
|
||||
fg="green",
|
||||
)
|
84
platformio/account/validate.py
Normal file
84
platformio/account/validate.py
Normal file
@ -0,0 +1,84 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import re
|
||||
|
||||
import click
|
||||
|
||||
|
||||
def validate_username(value, field="username"):
|
||||
value = str(value).strip() if value else None
|
||||
if not value or not re.match(
|
||||
r"^[a-z\d](?:[a-z\d]|-(?=[a-z\d])){0,37}$", value, flags=re.I
|
||||
):
|
||||
raise click.BadParameter(
|
||||
"Invalid %s format. "
|
||||
"%s must contain only alphanumeric characters "
|
||||
"or single hyphens, cannot begin or end with a hyphen, "
|
||||
"and must not be longer than 38 characters."
|
||||
% (field.lower(), field.capitalize())
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
def validate_orgname(value):
|
||||
return validate_username(value, "Organization name")
|
||||
|
||||
|
||||
def validate_email(value):
|
||||
value = str(value).strip() if value else None
|
||||
if not value or not re.match(
|
||||
r"^[a-z\d_\.\+\-]+@[a-z\d\-]+\.[a-z\d\-\.]+$", value, flags=re.I
|
||||
):
|
||||
raise click.BadParameter("Invalid email address")
|
||||
return value
|
||||
|
||||
|
||||
def validate_password(value):
|
||||
value = str(value).strip() if value else None
|
||||
if not value or not re.match(r"^(?=.*[a-z])(?=.*\d).{8,}$", value):
|
||||
raise click.BadParameter(
|
||||
"Invalid password format. "
|
||||
"Password must contain at least 8 characters"
|
||||
" including a number and a lowercase letter"
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
def validate_teamname(value):
|
||||
value = str(value).strip() if value else None
|
||||
if not value or not re.match(
|
||||
r"^[a-z\d](?:[a-z\d]|[\-_ ](?=[a-z\d])){0,19}$", value, flags=re.I
|
||||
):
|
||||
raise click.BadParameter(
|
||||
"Invalid team name format. "
|
||||
"Team name must only contain alphanumeric characters, "
|
||||
"single hyphens, underscores, spaces. It can not "
|
||||
"begin or end with a hyphen or a underscore and must"
|
||||
" not be longer than 20 characters."
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
def validate_orgname_teamname(value):
|
||||
value = str(value).strip() if value else None
|
||||
if not value or ":" not in value:
|
||||
raise click.BadParameter(
|
||||
"Please specify organization and team name using the following"
|
||||
" format - orgname:teamname. For example, mycompany:DreamTeam"
|
||||
)
|
||||
orgname, teamname = value.split(":", 1)
|
||||
validate_orgname(orgname)
|
||||
validate_teamname(teamname)
|
||||
return value
|
@ -12,55 +12,42 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import getpass
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import platform
|
||||
import socket
|
||||
import time
|
||||
import uuid
|
||||
from os.path import dirname, isdir, isfile, join, realpath
|
||||
|
||||
from platformio import __version__, exception, fs, proc
|
||||
from platformio.compat import WINDOWS, dump_json_to_unicode, hashlib_encode_data
|
||||
from platformio.compat import IS_WINDOWS, hashlib_encode_data
|
||||
from platformio.package.lockfile import LockFile
|
||||
from platformio.project.helpers import get_default_projects_dir, get_project_core_dir
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.helpers import get_default_projects_dir
|
||||
|
||||
|
||||
def projects_dir_validate(projects_dir):
|
||||
assert isdir(projects_dir)
|
||||
return realpath(projects_dir)
|
||||
assert os.path.isdir(projects_dir)
|
||||
return os.path.abspath(projects_dir)
|
||||
|
||||
|
||||
DEFAULT_SETTINGS = {
|
||||
"auto_update_libraries": {
|
||||
"description": "Automatically update libraries (Yes/No)",
|
||||
"value": False,
|
||||
},
|
||||
"auto_update_platforms": {
|
||||
"description": "Automatically update platforms (Yes/No)",
|
||||
"value": False,
|
||||
},
|
||||
"check_libraries_interval": {
|
||||
"description": "Check for the library updates interval (days)",
|
||||
"value": 7,
|
||||
},
|
||||
"check_platformio_interval": {
|
||||
"description": "Check for the new PlatformIO interval (days)",
|
||||
"value": 3,
|
||||
},
|
||||
"check_platforms_interval": {
|
||||
"description": "Check for the platform updates interval (days)",
|
||||
"description": "Check for the new PlatformIO Core interval (days)",
|
||||
"value": 7,
|
||||
},
|
||||
"check_prune_system_threshold": {
|
||||
"description": "Check for pruning unnecessary data threshold (megabytes)",
|
||||
"value": 1024,
|
||||
},
|
||||
"enable_cache": {
|
||||
"description": "Enable caching for HTTP API requests",
|
||||
"value": True,
|
||||
},
|
||||
"enable_telemetry": {
|
||||
"description": ("Telemetry service <http://bit.ly/pio-telemetry> (Yes/No)"),
|
||||
"description": ("Telemetry service <https://bit.ly/pio-telemetry> (Yes/No)"),
|
||||
"value": True,
|
||||
},
|
||||
"force_verbose": {
|
||||
@ -72,22 +59,33 @@ DEFAULT_SETTINGS = {
|
||||
"value": get_default_projects_dir(),
|
||||
"validator": projects_dir_validate,
|
||||
},
|
||||
"enable_proxy_strict_ssl": {
|
||||
"description": "Verify the proxy server certificate against the list of supplied CAs",
|
||||
"value": True,
|
||||
},
|
||||
}
|
||||
|
||||
SESSION_VARS = {
|
||||
"command_ctx": None,
|
||||
"force_option": False,
|
||||
"caller_id": None,
|
||||
"custom_project_conf": None,
|
||||
"pause_telemetry": False,
|
||||
}
|
||||
|
||||
|
||||
class State(object):
|
||||
def resolve_state_path(conf_option_dir, file_name, ensure_dir_exists=True):
|
||||
state_dir = ProjectConfig.get_instance().get("platformio", conf_option_dir)
|
||||
if ensure_dir_exists and not os.path.isdir(state_dir):
|
||||
os.makedirs(state_dir)
|
||||
return os.path.join(state_dir, file_name)
|
||||
|
||||
|
||||
class State:
|
||||
def __init__(self, path=None, lock=False):
|
||||
self.path = path
|
||||
self.lock = lock
|
||||
if not self.path:
|
||||
self.path = join(get_project_core_dir(), "appstate.json")
|
||||
self.path = resolve_state_path("core_dir", "appstate.json")
|
||||
self._storage = {}
|
||||
self._lockfile = None
|
||||
self.modified = False
|
||||
@ -95,7 +93,7 @@ class State(object):
|
||||
def __enter__(self):
|
||||
try:
|
||||
self._lock_state_file()
|
||||
if isfile(self.path):
|
||||
if os.path.isfile(self.path):
|
||||
self._storage = fs.load_json(self.path)
|
||||
assert isinstance(self._storage, dict)
|
||||
except (
|
||||
@ -110,10 +108,12 @@ class State(object):
|
||||
def __exit__(self, type_, value, traceback):
|
||||
if self.modified:
|
||||
try:
|
||||
with open(self.path, "w") as fp:
|
||||
fp.write(dump_json_to_unicode(self._storage))
|
||||
except IOError:
|
||||
raise exception.HomeDirPermissionsError(get_project_core_dir())
|
||||
with open(self.path, mode="w", encoding="utf8") as fp:
|
||||
fp.write(json.dumps(self._storage))
|
||||
except IOError as exc:
|
||||
raise exception.HomeDirPermissionsError(
|
||||
os.path.dirname(self.path)
|
||||
) from exc
|
||||
self._unlock_state_file()
|
||||
|
||||
def _lock_state_file(self):
|
||||
@ -122,8 +122,8 @@ class State(object):
|
||||
self._lockfile = LockFile(self.path)
|
||||
try:
|
||||
self._lockfile.acquire()
|
||||
except IOError:
|
||||
raise exception.HomeDirPermissionsError(dirname(self.path))
|
||||
except IOError as exc:
|
||||
raise exception.HomeDirPermissionsError(os.path.dirname(self.path)) from exc
|
||||
|
||||
def _unlock_state_file(self):
|
||||
if hasattr(self, "_lockfile") and self._lockfile:
|
||||
@ -178,8 +178,8 @@ def sanitize_setting(name, value):
|
||||
value = str(value).lower() in ("true", "yes", "y", "1")
|
||||
elif isinstance(defdata["value"], int):
|
||||
value = int(value)
|
||||
except Exception:
|
||||
raise exception.InvalidSettingValue(value, name)
|
||||
except Exception as exc:
|
||||
raise exception.InvalidSettingValue(value, name) from exc
|
||||
return value
|
||||
|
||||
|
||||
@ -236,43 +236,25 @@ def set_session_var(name, value):
|
||||
|
||||
|
||||
def is_disabled_progressbar():
|
||||
return any(
|
||||
[
|
||||
get_session_var("force_option"),
|
||||
proc.is_ci(),
|
||||
os.getenv("PLATFORMIO_DISABLE_PROGRESSBAR") == "true",
|
||||
]
|
||||
)
|
||||
return os.getenv("PLATFORMIO_DISABLE_PROGRESSBAR") == "true"
|
||||
|
||||
|
||||
def get_cid():
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from platformio.clients.http import fetch_remote_content
|
||||
|
||||
cid = get_state_item("cid")
|
||||
if cid:
|
||||
return cid
|
||||
uid = None
|
||||
if os.getenv("C9_UID"):
|
||||
uid = os.getenv("C9_UID")
|
||||
elif os.getenv("CHE_API", os.getenv("CHE_API_ENDPOINT")):
|
||||
try:
|
||||
uid = json.loads(
|
||||
fetch_remote_content(
|
||||
"{api}/user?token={token}".format(
|
||||
api=os.getenv("CHE_API", os.getenv("CHE_API_ENDPOINT")),
|
||||
token=os.getenv("USER_TOKEN"),
|
||||
)
|
||||
)
|
||||
).get("id")
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
if os.getenv("GITHUB_USER"):
|
||||
uid = os.getenv("GITHUB_USER")
|
||||
elif os.getenv("GITPOD_GIT_USER_NAME"):
|
||||
uid = os.getenv("GITPOD_GIT_USER_NAME")
|
||||
if not uid:
|
||||
uid = uuid.getnode()
|
||||
cid = uuid.UUID(bytes=hashlib.md5(hashlib_encode_data(uid)).digest())
|
||||
cid = str(cid)
|
||||
if WINDOWS or os.getuid() > 0: # pylint: disable=no-member
|
||||
if IS_WINDOWS or os.getuid() > 0: # pylint: disable=no-member
|
||||
set_state_item("cid", cid)
|
||||
set_state_item("created_at", int(time.time()))
|
||||
return cid
|
||||
|
||||
|
||||
@ -288,6 +270,8 @@ def get_user_agent():
|
||||
data.append("IDE/%s" % os.getenv("PLATFORMIO_IDE"))
|
||||
data.append("Python/%s" % platform.python_version())
|
||||
data.append("Platform/%s" % platform.platform())
|
||||
if not get_setting("enable_telemetry"):
|
||||
data.append("Telemetry/0")
|
||||
return " ".join(data)
|
||||
|
||||
|
||||
|
487
platformio/assets/schema/library.json
Normal file
487
platformio/assets/schema/library.json
Normal file
@ -0,0 +1,487 @@
|
||||
{
|
||||
"$id": "https://example.com/library.json",
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"title": "library.json schema",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"maxLength": 50,
|
||||
"description": "A name of a library.\nMust be unique in the PlatformIO Registry\nShould be slug style for simplicity, consistency, and compatibility. Example: HelloWorld\nCan contain a-z, digits, and dashes (but not start/end with them)\nConsecutive dashes and [:;/,@<>] chars are not allowed.",
|
||||
"required": true
|
||||
},
|
||||
"version": {
|
||||
"type": "string",
|
||||
"maxLength": 20,
|
||||
"description": "A version of a current library source code. Can contain a-z, digits, dots or dash and should be Semantic Versioning compatible.",
|
||||
"required": true
|
||||
},
|
||||
"description": {
|
||||
"type": "string",
|
||||
"maxLength": 255,
|
||||
"description": "The field helps users to identify and search for your library with a brief description. Describe the hardware devices (sensors, boards and etc.) which are suitable with it.",
|
||||
"required": true
|
||||
},
|
||||
"keywords": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"maxLength": 255
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"maxLength": 255
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "Used for search by keyword. Helps to make your library easier to discover without people needing to know its name.\nThe keyword should be lowercased, can contain a-z, digits and dash (but not start/end with them). A list from the keywords can be specified with separator , or declared as Array.",
|
||||
"required": true
|
||||
},
|
||||
"homepage": {
|
||||
"type": "string",
|
||||
"maxLength": 255,
|
||||
"description": "Home page of a library (if is different from repository url).",
|
||||
"required": false
|
||||
},
|
||||
"repository": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": {
|
||||
"enum": [
|
||||
"git",
|
||||
"hg",
|
||||
"svn"
|
||||
],
|
||||
"description": "only “git”, “hg” or “svn” are supported"
|
||||
},
|
||||
"url": {
|
||||
"type": "string"
|
||||
},
|
||||
"branch": {
|
||||
"type": "string",
|
||||
"description": "if is not specified, default branch will be used. This field will be ignored if tag/release exists with the value of version."
|
||||
}
|
||||
},
|
||||
"description": "The repository in which the source code can be found.",
|
||||
"required": false
|
||||
},
|
||||
"authors": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"required": true,
|
||||
"description": "Full name"
|
||||
},
|
||||
"email": {
|
||||
"type": "string"
|
||||
},
|
||||
"url": {
|
||||
"type": "string",
|
||||
"description": "An author’s contact page"
|
||||
},
|
||||
"maintainer": {
|
||||
"type": "boolean",
|
||||
"description": "Specify “maintainer” status"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"required": true,
|
||||
"description": "Full name"
|
||||
},
|
||||
"email": {
|
||||
"type": "string"
|
||||
},
|
||||
"url": {
|
||||
"type": "string",
|
||||
"description": "An author’s contact page"
|
||||
},
|
||||
"maintainer": {
|
||||
"type": "boolean",
|
||||
"description": "Specify “maintainer” status"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "An author contact information\nIf authors field is not defined, PlatformIO will try to fetch data from VCS provider (Github, Gitlab, etc) if repository is declared.",
|
||||
"required": false
|
||||
},
|
||||
"license": {
|
||||
"type": "string",
|
||||
"description": "A SPDX license ID or SPDX Expression. You can check the full list of SPDX license IDs (see “Identifier” column).",
|
||||
"required": false
|
||||
},
|
||||
"frameworks": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "espidf, freertos, *, etc'"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"description": "espidf, freertos, *, etc'"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "A list with compatible frameworks. The available framework names are defined in the Frameworks section.\nIf the library is compatible with the all frameworks, then do not declare this field or you use *",
|
||||
"required": false
|
||||
},
|
||||
"platforms": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "atmelavr, espressif8266, *, etc'"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"description": "atmelavr, espressif8266, *, etc'"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "A list with compatible development platforms. The available platform name are defined in Development Platforms section.\nIf the library is compatible with the all platforms, then do not declare this field or use *.\nPlatformIO does not check platforms for compatibility in default mode. See Compatibility Mode for details. If you need a strict checking for compatible platforms for a library, please set libCompatMode to strict.",
|
||||
"required": false
|
||||
},
|
||||
"headers": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "MyLibrary.h"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"description": "FooCore.h, FooFeature.h"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "A list of header files that can be included in a project source files using #include <...> directive.",
|
||||
"required": false
|
||||
},
|
||||
"examples": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"base": {
|
||||
"type": "string"
|
||||
},
|
||||
"files": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"description": "A list of example patterns.",
|
||||
"required": "false"
|
||||
},
|
||||
"dependencies": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"owner": {
|
||||
"type": "string",
|
||||
"description": "an owner name (username) from the PlatformIO Registry"
|
||||
},
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "library name"
|
||||
},
|
||||
"version": {
|
||||
"type": "string",
|
||||
"description": "Version Requirements or Package Specifications"
|
||||
},
|
||||
"frameworks": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "project compatible Frameworks"
|
||||
},
|
||||
"platforms": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": " project compatible Development Platforms"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"owner": {
|
||||
"type": "string",
|
||||
"description": "an owner name (username) from the PlatformIO Registry"
|
||||
},
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "library name"
|
||||
},
|
||||
"version": {
|
||||
"type": "string",
|
||||
"description": "Version Requirements or Package Specifications"
|
||||
},
|
||||
"frameworks": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "project compatible Frameworks"
|
||||
},
|
||||
"platforms": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": " project compatible Development Platforms"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "A list of dependent libraries that will be automatically installed.",
|
||||
"required": false
|
||||
},
|
||||
"export": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"include": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": "Export only files that matched declared patterns.\n* - matches everything\n? - matches any single character\n[seq] - matches any character in seq\n[!seq] - matches any character not in seq"
|
||||
},
|
||||
"exclude": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": "Exclude the directories and files which match with exclude patterns."
|
||||
}
|
||||
},
|
||||
"description": "This option is useful if you need to exclude extra data (test code, docs, images, PDFs, etc). It allows one to reduce the size of the final archive.\nTo check which files will be included in the final packages, please use pio pkg pack command.",
|
||||
"required": false
|
||||
},
|
||||
"scripts": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"postinstall": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "runs a script AFTER the package has been installed.\nRun a custom Python script located in the package “scripts” folder AFTER the package is installed. Please note that you don’t need to specify a Python interpreter for Python scripts"
|
||||
},
|
||||
"preuninstall": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "runs a script BEFORE the package is removed.\nRun a custom Bash script BEFORE the package is uninstalled. The script is declared as a list of command arguments and is located at the root of a package"
|
||||
}
|
||||
},
|
||||
"description": "Execute custom scripts during the special Package Management CLI life cycle events",
|
||||
"required": false
|
||||
},
|
||||
"build": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"flags": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "Extra flags to control preprocessing, compilation, assembly, and linking processes. More details build_flags.\nKeep in mind when operating with the -I flag (directories to be searched for header files). The path should be relative to the root directory where the library.json manifest is located."
|
||||
},
|
||||
"unflags": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "Remove base/initial flags which were set by development platform. More details build_unflags."
|
||||
},
|
||||
"includeDir": {
|
||||
"type": "string",
|
||||
"description": "Custom directory to be searched for header files. A default value is include and means that folder is located at the root of a library.\nThe Library Dependency Finder (LDF) will pick a library automatically only when a project or other dependent libraries include any header file located in includeDir or srcDir.",
|
||||
"required": false
|
||||
},
|
||||
"srcDir": {
|
||||
"type": "string",
|
||||
"description": "Custom location of library source code. A default value is src and means that folder is located in the root of a library.",
|
||||
"required": "false"
|
||||
},
|
||||
"srcFilter": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "Specify which source files should be included/excluded from build process. The path in filter should be relative to the srcDir option of a library.\nSee syntax for build_src_filter.\nPlease note that you can generate source filter “on-the-fly” using extraScript",
|
||||
"required": false
|
||||
},
|
||||
"extraScript": {
|
||||
"type": "string",
|
||||
"description": "Launch extra script before a build process.",
|
||||
"required": "false"
|
||||
},
|
||||
"libArchive": {
|
||||
"type": "boolean",
|
||||
"description": "Create an archive (*.a, static library) from the object files and link it into a firmware (program). This is default behavior of PlatformIO Build System (\"libArchive\": true).\nSetting \"libArchive\": false will instruct PlatformIO Build System to link object files directly (in-line). This could be useful if you need to override weak symbols defined in framework or other libraries.\nYou can disable library archiving globally using lib_archive option in “platformio.ini” (Project Configuration File).",
|
||||
"required": "false"
|
||||
},
|
||||
"libLDFMode": {
|
||||
"anyOf": [
|
||||
{
|
||||
"enum": [
|
||||
"off"
|
||||
],
|
||||
"description": "“Manual mode”, does not process source files of a project and dependencies. Builds only the libraries that are specified in manifests (library.json, module.json) or using lib_deps option."
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"chain"
|
||||
],
|
||||
"description": "[DEFAULT] Parses ALL C/C++ source files of the project and follows only by nested includes (#include ..., chain...) from the libraries. It also parses C, CC, CPP files from libraries which have the same name as included header file. Does not evaluate C/C++ Preprocessor conditional syntax."
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"deep"
|
||||
],
|
||||
"description": "Parses ALL C/C++ source files of the project and parses ALL C/C++ source files of the each found dependency (recursively). Does not evaluate C/C++ Preprocessor conditional syntax."
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"chain+"
|
||||
],
|
||||
"description": "The same behavior as for the chain but evaluates C/C++ Preprocessor conditional syntax."
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"deep+"
|
||||
],
|
||||
"description": "The same behavior as for the deep but evaluates C/C++ Preprocessor conditional syntax."
|
||||
}
|
||||
],
|
||||
"description": "Specify Library Dependency Finder Mode. See Dependency Finder Mode for details.",
|
||||
"required": false
|
||||
},
|
||||
"libCompatMode": {
|
||||
"type": "string",
|
||||
"description": "Specify Library Compatibility Mode. See Compatibility Mode for details.",
|
||||
"required": false
|
||||
},
|
||||
"builder": {
|
||||
"anyOf": [
|
||||
{
|
||||
"enum": [
|
||||
"PlatformIOLibBuilder"
|
||||
],
|
||||
"description": "Default Builder"
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"ArduinoLibBuilder"
|
||||
]
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"MbedLibBuilder"
|
||||
]
|
||||
}
|
||||
],
|
||||
"description": "Override default PlatformIOLibBuilder with another builder.",
|
||||
"required": false
|
||||
}
|
||||
},
|
||||
"required": false
|
||||
}
|
||||
}
|
||||
}
|
@ -16,7 +16,7 @@
|
||||
#
|
||||
# INSTALLATION
|
||||
#
|
||||
# Please visit > https://docs.platformio.org/en/latest/faq.html#platformio-udev-rules
|
||||
# Please visit > https://docs.platformio.org/en/latest/core/installation/udev-rules.html
|
||||
#
|
||||
#####################################################################################
|
||||
|
||||
@ -25,7 +25,8 @@
|
||||
#
|
||||
|
||||
# CP210X USB UART
|
||||
ATTRS{idVendor}=="10c4", ATTRS{idProduct}=="ea60", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
ATTRS{idVendor}=="10c4", ATTRS{idProduct}=="ea[67][013]", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
ATTRS{idVendor}=="10c4", ATTRS{idProduct}=="80a9", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# FT231XS USB UART
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="6015", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
@ -35,9 +36,13 @@ ATTRS{idVendor}=="067b", ATTRS{idProduct}=="2303", MODE:="0666", ENV{ID_MM_DEVIC
|
||||
|
||||
# QinHeng Electronics HL-340 USB-Serial adapter
|
||||
ATTRS{idVendor}=="1a86", ATTRS{idProduct}=="7523", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
# QinHeng Electronics CH343 USB-Serial adapter
|
||||
ATTRS{idVendor}=="1a86", ATTRS{idProduct}=="55d3", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
# QinHeng Electronics CH9102 USB-Serial adapter
|
||||
ATTRS{idVendor}=="1a86", ATTRS{idProduct}=="55d4", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Arduino boards
|
||||
ATTRS{idVendor}=="2341", ATTRS{idProduct}=="[08][02]*", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
ATTRS{idVendor}=="2341", ATTRS{idProduct}=="[08][023]*", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
ATTRS{idVendor}=="2a03", ATTRS{idProduct}=="[08][02]*", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Arduino SAM-BA
|
||||
@ -61,15 +66,30 @@ ATTRS{idVendor}=="16c0", ATTRS{idProduct}=="04[789A]?", ENV{MTP_NO_PROBE}="1"
|
||||
SUBSYSTEMS=="usb", ATTRS{idVendor}=="16c0", ATTRS{idProduct}=="04[789ABCD]?", MODE:="0666"
|
||||
KERNEL=="ttyACM*", ATTRS{idVendor}=="16c0", ATTRS{idProduct}=="04[789B]?", MODE:="0666"
|
||||
|
||||
#TI Stellaris Launchpad
|
||||
# TI Stellaris Launchpad
|
||||
ATTRS{idVendor}=="1cbe", ATTRS{idProduct}=="00fd", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
#TI MSP430 Launchpad
|
||||
# TI MSP430 Launchpad
|
||||
ATTRS{idVendor}=="0451", ATTRS{idProduct}=="f432", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
#GD32V DFU Bootloader
|
||||
# GD32V DFU Bootloader
|
||||
ATTRS{idVendor}=="28e9", ATTRS{idProduct}=="0189", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# FireBeetle-ESP32
|
||||
ATTRS{idVendor}=="1a86", ATTRS{idProduct}=="7522", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Wio Terminal
|
||||
ATTRS{idVendor}=="2886", ATTRS{idProduct}=="[08]02d", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Raspberry Pi Pico
|
||||
ATTRS{idVendor}=="2e8a", ATTRS{idProduct}=="[01]*", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# AIR32F103
|
||||
ATTRS{idVendor}=="0d28", ATTRS{idProduct}=="0204", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# STM32 virtual COM port
|
||||
ATTRS{idVendor}=="0483", ATTRS{idProduct}=="5740", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
#
|
||||
# Debuggers
|
||||
#
|
||||
@ -81,44 +101,29 @@ SUBSYSTEM=="tty", ATTRS{interface}=="Black Magic UART Port", MODE="0666", ENV{ID
|
||||
# opendous and estick
|
||||
ATTRS{idVendor}=="03eb", ATTRS{idProduct}=="204f", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Original FT232/FT245 VID:PID
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="6001", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Original FT2232 VID:PID
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="6010", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Original FT4232 VID:PID
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="6011", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Original FT232H VID:PID
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="6014", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
# Original FT232/FT245/FT2232/FT232H/FT4232
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="60[01][104]", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# DISTORTEC JTAG-lock-pick Tiny 2
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="8220", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# TUMPA, TUMPA Lite
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="8a98", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="8a99", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="8a9[89]", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# XDS100v2
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="a6d0", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Xverve Signalyzer Tool (DT-USB-ST), Signalyzer LITE (DT-USB-SLITE)
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="bca0", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="bca1", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="bca[01]", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# TI/Luminary Stellaris Evaluation Board FTDI (several)
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="bcd9", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# TI/Luminary Stellaris In-Circuit Debug Interface FTDI (ICDI) Board
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="bcda", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="bcd[9a]", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# egnite Turtelizer 2
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="bdc8", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Section5 ICEbear
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="c140", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="c141", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="c14[01]", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Amontec JTAGkey and JTAGkey-tiny
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="cff8", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
@ -167,3 +172,9 @@ ATTRS{idVendor}=="c251", ATTRS{idProduct}=="2710", MODE="0666", ENV{ID_MM_DEVICE
|
||||
|
||||
# CMSIS-DAP compatible adapters
|
||||
ATTRS{product}=="*CMSIS-DAP*", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Atmel AVR Dragon
|
||||
ATTRS{idVendor}=="03eb", ATTRS{idProduct}=="2107", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Espressif USB JTAG/serial debug unit
|
||||
ATTRS{idVendor}=="303a", ATTRS{idProduct}=="1001", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
@ -12,10 +12,10 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from os import environ, makedirs
|
||||
from os.path import isdir, join
|
||||
from time import time
|
||||
import time
|
||||
|
||||
import click
|
||||
from SCons.Script import ARGUMENTS # pylint: disable=import-error
|
||||
@ -28,67 +28,69 @@ from SCons.Script import DefaultEnvironment # pylint: disable=import-error
|
||||
from SCons.Script import Import # pylint: disable=import-error
|
||||
from SCons.Script import Variables # pylint: disable=import-error
|
||||
|
||||
from platformio import compat, fs
|
||||
from platformio.compat import dump_json_to_unicode
|
||||
from platformio import app, fs
|
||||
from platformio.platform.base import PlatformBase
|
||||
from platformio.proc import get_pythonexe_path
|
||||
from platformio.project.helpers import get_project_dir
|
||||
from platformio.project.helpers import get_build_type, get_project_dir
|
||||
|
||||
AllowSubstExceptions(NameError)
|
||||
|
||||
# append CLI arguments to build environment
|
||||
clivars = Variables(None)
|
||||
clivars.AddVariables(
|
||||
("PLATFORM_MANIFEST",),
|
||||
("BUILD_SCRIPT",),
|
||||
("PROJECT_CONFIG",),
|
||||
("PIOENV",),
|
||||
("PIOTEST_RUNNING_NAME",),
|
||||
("UPLOAD_PORT",),
|
||||
("PROGRAM_ARGS",),
|
||||
)
|
||||
|
||||
DEFAULT_ENV_OPTIONS = dict(
|
||||
tools=[
|
||||
"ar",
|
||||
"as",
|
||||
"cc",
|
||||
"c++",
|
||||
"link",
|
||||
"platformio",
|
||||
"piotarget",
|
||||
"pioplatform",
|
||||
"piohooks",
|
||||
"pioasm",
|
||||
"piobuild",
|
||||
"pioproject",
|
||||
"piomaxlen",
|
||||
"pioplatform",
|
||||
"piotest",
|
||||
"piotarget",
|
||||
"piolib",
|
||||
"pioupload",
|
||||
"piomemusage",
|
||||
"pioino",
|
||||
"piomisc",
|
||||
"pioide",
|
||||
"piosize",
|
||||
"piointegration",
|
||||
"piomaxlen",
|
||||
],
|
||||
toolpath=[join(fs.get_source_dir(), "builder", "tools")],
|
||||
toolpath=[os.path.join(fs.get_source_dir(), "builder", "tools")],
|
||||
variables=clivars,
|
||||
# Propagating External Environment
|
||||
ENV=environ,
|
||||
UNIX_TIME=int(time()),
|
||||
BUILD_DIR=join("$PROJECT_BUILD_DIR", "$PIOENV"),
|
||||
BUILD_SRC_DIR=join("$BUILD_DIR", "src"),
|
||||
BUILD_TEST_DIR=join("$BUILD_DIR", "test"),
|
||||
COMPILATIONDB_PATH=join("$BUILD_DIR", "compile_commands.json"),
|
||||
LIBPATH=["$BUILD_DIR"],
|
||||
PROGNAME="program",
|
||||
PROG_PATH=join("$BUILD_DIR", "$PROGNAME$PROGSUFFIX"),
|
||||
ENV=os.environ,
|
||||
UNIX_TIME=int(time.time()),
|
||||
PYTHONEXE=get_pythonexe_path(),
|
||||
IDE_EXTRA_DATA={},
|
||||
)
|
||||
|
||||
# Declare command verbose messages
|
||||
command_strings = dict(
|
||||
ARCOM="Archiving",
|
||||
LINKCOM="Linking",
|
||||
RANLIBCOM="Indexing",
|
||||
ASCOM="Compiling",
|
||||
ASPPCOM="Compiling",
|
||||
CCCOM="Compiling",
|
||||
CXXCOM="Compiling",
|
||||
)
|
||||
if not int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
DEFAULT_ENV_OPTIONS["ARCOMSTR"] = "Archiving $TARGET"
|
||||
DEFAULT_ENV_OPTIONS["LINKCOMSTR"] = "Linking $TARGET"
|
||||
DEFAULT_ENV_OPTIONS["RANLIBCOMSTR"] = "Indexing $TARGET"
|
||||
for k in ("ASCOMSTR", "ASPPCOMSTR", "CCCOMSTR", "CXXCOMSTR"):
|
||||
DEFAULT_ENV_OPTIONS[k] = "Compiling $TARGET"
|
||||
for name, value in command_strings.items():
|
||||
DEFAULT_ENV_OPTIONS["%sSTR" % name] = "%s $TARGET" % (value)
|
||||
|
||||
env = DefaultEnvironment(**DEFAULT_ENV_OPTIONS)
|
||||
env.SConscriptChdir(False)
|
||||
|
||||
# Load variables from CLI
|
||||
env.Replace(
|
||||
@ -101,71 +103,72 @@ env.Replace(
|
||||
|
||||
# Setup project optional directories
|
||||
config = env.GetProjectConfig()
|
||||
app.set_session_var("custom_project_conf", config.path)
|
||||
|
||||
env.Replace(
|
||||
PROJECT_DIR=get_project_dir(),
|
||||
PROJECT_CORE_DIR=config.get_optional_dir("core"),
|
||||
PROJECT_PACKAGES_DIR=config.get_optional_dir("packages"),
|
||||
PROJECT_WORKSPACE_DIR=config.get_optional_dir("workspace"),
|
||||
PROJECT_LIBDEPS_DIR=config.get_optional_dir("libdeps"),
|
||||
PROJECT_INCLUDE_DIR=config.get_optional_dir("include"),
|
||||
PROJECT_SRC_DIR=config.get_optional_dir("src"),
|
||||
PROJECTSRC_DIR=config.get_optional_dir("src"), # legacy for dev/platform
|
||||
PROJECT_TEST_DIR=config.get_optional_dir("test"),
|
||||
PROJECT_DATA_DIR=config.get_optional_dir("data"),
|
||||
PROJECTDATA_DIR=config.get_optional_dir("data"), # legacy for dev/platform
|
||||
PROJECT_BUILD_DIR=config.get_optional_dir("build"),
|
||||
BUILD_CACHE_DIR=config.get_optional_dir("build_cache"),
|
||||
PROJECT_CORE_DIR=config.get("platformio", "core_dir"),
|
||||
PROJECT_PACKAGES_DIR=config.get("platformio", "packages_dir"),
|
||||
PROJECT_WORKSPACE_DIR=config.get("platformio", "workspace_dir"),
|
||||
PROJECT_LIBDEPS_DIR=config.get("platformio", "libdeps_dir"),
|
||||
PROJECT_INCLUDE_DIR=config.get("platformio", "include_dir"),
|
||||
PROJECT_SRC_DIR=config.get("platformio", "src_dir"),
|
||||
PROJECTSRC_DIR="$PROJECT_SRC_DIR", # legacy for dev/platform
|
||||
PROJECT_TEST_DIR=config.get("platformio", "test_dir"),
|
||||
PROJECT_DATA_DIR=config.get("platformio", "data_dir"),
|
||||
PROJECTDATA_DIR="$PROJECT_DATA_DIR", # legacy for dev/platform
|
||||
PROJECT_BUILD_DIR=config.get("platformio", "build_dir"),
|
||||
BUILD_TYPE=get_build_type(config, env["PIOENV"], COMMAND_LINE_TARGETS),
|
||||
BUILD_DIR=os.path.join("$PROJECT_BUILD_DIR", "$PIOENV", "$BUILD_TYPE"),
|
||||
BUILD_SRC_DIR=os.path.join("$BUILD_DIR", "src"),
|
||||
BUILD_TEST_DIR=os.path.join("$BUILD_DIR", "test"),
|
||||
BUILD_CACHE_DIR=config.get("platformio", "build_cache_dir"),
|
||||
LIBPATH=["$BUILD_DIR"],
|
||||
LIBSOURCE_DIRS=[
|
||||
config.get_optional_dir("lib"),
|
||||
join("$PROJECT_LIBDEPS_DIR", "$PIOENV"),
|
||||
config.get_optional_dir("globallib"),
|
||||
config.get("platformio", "lib_dir"),
|
||||
os.path.join("$PROJECT_LIBDEPS_DIR", "$PIOENV"),
|
||||
config.get("platformio", "globallib_dir"),
|
||||
],
|
||||
COMPILATIONDB_PATH=os.path.join("$PROJECT_DIR", "compile_commands.json"),
|
||||
PROGNAME="program",
|
||||
PROGPATH=os.path.join("$BUILD_DIR", "$PROGNAME$PROGSUFFIX"),
|
||||
PROG_PATH="$PROGPATH", # deprecated
|
||||
)
|
||||
|
||||
if (
|
||||
compat.WINDOWS
|
||||
and sys.version_info >= (3, 8)
|
||||
and env["PROJECT_DIR"].startswith("\\\\")
|
||||
):
|
||||
click.secho(
|
||||
"There is a known issue with Python 3.8+ and mapped network drives on "
|
||||
"Windows.\nPlease downgrade Python to the latest 3.7. More details at:\n"
|
||||
"https://github.com/platformio/platformio-core/issues/3417",
|
||||
fg="yellow",
|
||||
)
|
||||
|
||||
if env.subst("$BUILD_CACHE_DIR"):
|
||||
if not isdir(env.subst("$BUILD_CACHE_DIR")):
|
||||
makedirs(env.subst("$BUILD_CACHE_DIR"))
|
||||
env.CacheDir("$BUILD_CACHE_DIR")
|
||||
|
||||
if int(ARGUMENTS.get("ISATTY", 0)):
|
||||
# pylint: disable=protected-access
|
||||
click._compat.isatty = lambda stream: True
|
||||
|
||||
if env.GetOption("clean"):
|
||||
env.PioClean(env.subst("$BUILD_DIR"))
|
||||
env.Exit(0)
|
||||
elif not int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
if env.subst("$BUILD_CACHE_DIR"):
|
||||
if not os.path.isdir(env.subst("$BUILD_CACHE_DIR")):
|
||||
os.makedirs(env.subst("$BUILD_CACHE_DIR"))
|
||||
env.CacheDir("$BUILD_CACHE_DIR")
|
||||
|
||||
if not int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
click.echo("Verbose mode can be enabled via `-v, --verbose` option")
|
||||
|
||||
# Dynamically load dependent tools
|
||||
if "compiledb" in COMMAND_LINE_TARGETS:
|
||||
env.Tool("compilation_db")
|
||||
|
||||
if not isdir(env.subst("$BUILD_DIR")):
|
||||
makedirs(env.subst("$BUILD_DIR"))
|
||||
if not os.path.isdir(env.subst("$BUILD_DIR")):
|
||||
os.makedirs(env.subst("$BUILD_DIR"))
|
||||
|
||||
env.LoadProjectOptions()
|
||||
env.LoadPioPlatform()
|
||||
|
||||
env.SConscriptChdir(0)
|
||||
env.SConsignFile(
|
||||
join("$BUILD_DIR", ".sconsign%d%d" % (sys.version_info[0], sys.version_info[1]))
|
||||
os.path.join(
|
||||
"$BUILD_CACHE_DIR" if env.subst("$BUILD_CACHE_DIR") else "$BUILD_DIR",
|
||||
".sconsign%d%d" % (sys.version_info[0], sys.version_info[1]),
|
||||
)
|
||||
)
|
||||
|
||||
for item in env.GetExtraScripts("pre"):
|
||||
env.SConscript(item, exports="env")
|
||||
env.SConscript(env.GetExtraScripts("pre"), exports="env")
|
||||
|
||||
if env.IsCleanTarget():
|
||||
env.CleanProject(fullclean=int(ARGUMENTS.get("FULLCLEAN", 0)))
|
||||
env.Exit(0)
|
||||
|
||||
env.SConscript("$BUILD_SCRIPT")
|
||||
|
||||
@ -174,16 +177,15 @@ if "UPLOAD_FLAGS" in env:
|
||||
if env.GetProjectOption("upload_command"):
|
||||
env.Replace(UPLOADCMD=env.GetProjectOption("upload_command"))
|
||||
|
||||
for item in env.GetExtraScripts("post"):
|
||||
env.SConscript(item, exports="env")
|
||||
env.SConscript(env.GetExtraScripts("post"), exports="env")
|
||||
|
||||
##############################################################################
|
||||
|
||||
# Checking program size
|
||||
if env.get("SIZETOOL") and not (
|
||||
set(["nobuild", "sizedata"]) & set(COMMAND_LINE_TARGETS)
|
||||
set(["nobuild", "__memusage"]) & set(COMMAND_LINE_TARGETS)
|
||||
):
|
||||
env.Depends(["upload", "program"], "checkprogsize")
|
||||
env.Depends("upload", "checkprogsize")
|
||||
# Replace platform's "size" target with our
|
||||
_new_targets = [t for t in DEFAULT_TARGETS if str(t) != "size"]
|
||||
Default(None)
|
||||
@ -195,42 +197,57 @@ if "compiledb" in COMMAND_LINE_TARGETS:
|
||||
|
||||
# Print configured protocols
|
||||
env.AddPreAction(
|
||||
["upload", "program"],
|
||||
"upload",
|
||||
env.VerboseAction(
|
||||
lambda source, target, env: env.PrintUploadInfo(),
|
||||
"Configuring upload protocol...",
|
||||
),
|
||||
)
|
||||
|
||||
AlwaysBuild(env.Alias("debug", DEFAULT_TARGETS))
|
||||
AlwaysBuild(env.Alias("__debug", DEFAULT_TARGETS))
|
||||
AlwaysBuild(env.Alias("__test", DEFAULT_TARGETS))
|
||||
|
||||
env.ProcessDelayedActions()
|
||||
|
||||
##############################################################################
|
||||
|
||||
if "envdump" in COMMAND_LINE_TARGETS:
|
||||
click.echo(env.Dump())
|
||||
env.Exit(0)
|
||||
|
||||
if "idedata" in COMMAND_LINE_TARGETS:
|
||||
if env.IsIntegrationDump():
|
||||
projenv = None
|
||||
try:
|
||||
Import("projenv")
|
||||
except: # pylint: disable=bare-except
|
||||
projenv = env
|
||||
data = projenv.DumpIntegrationData(env)
|
||||
# dump to file for the further reading by project.helpers.load_build_metadata
|
||||
with open(
|
||||
projenv.subst(os.path.join("$BUILD_DIR", "metadata.json")),
|
||||
mode="w",
|
||||
encoding="utf8",
|
||||
) as fp:
|
||||
json.dump(data, fp)
|
||||
click.echo(
|
||||
"\n%s\n"
|
||||
% dump_json_to_unicode(
|
||||
projenv.DumpIDEData(env) # pylint: disable=undefined-variable
|
||||
)
|
||||
"Metadata has been saved to the following location: %s"
|
||||
% projenv.subst(os.path.join("$BUILD_DIR", "metadata.json"))
|
||||
)
|
||||
env.Exit(0)
|
||||
|
||||
if "sizedata" in COMMAND_LINE_TARGETS:
|
||||
if "__memusage" in COMMAND_LINE_TARGETS:
|
||||
AlwaysBuild(
|
||||
env.Alias(
|
||||
"sizedata",
|
||||
"__memusage",
|
||||
DEFAULT_TARGETS,
|
||||
env.VerboseAction(env.DumpSizeData, "Generating memory usage report..."),
|
||||
env.VerboseAction(env.DumpMemoryUsage, "Generating memory usage report..."),
|
||||
)
|
||||
)
|
||||
|
||||
Default("sizedata")
|
||||
Default("__memusage")
|
||||
|
||||
# issue #4604: process targets sequentially
|
||||
for index, target in enumerate(
|
||||
[t for t in COMMAND_LINE_TARGETS if not t.startswith("__")][1:]
|
||||
):
|
||||
env.Depends(target, COMMAND_LINE_TARGETS[index])
|
||||
|
@ -1,226 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
# Copyright 2020 MongoDB Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included
|
||||
# in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
|
||||
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
|
||||
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
# pylint: disable=unused-argument, protected-access, unused-variable, import-error
|
||||
# Original: https://github.com/mongodb/mongo/blob/master/site_scons/site_tools/compilation_db.py
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import itertools
|
||||
import json
|
||||
import os
|
||||
|
||||
import SCons
|
||||
|
||||
from platformio.builder.tools.platformio import SRC_ASM_EXT, SRC_C_EXT, SRC_CXX_EXT
|
||||
from platformio.proc import where_is_program
|
||||
|
||||
# Implements the ability for SCons to emit a compilation database for the MongoDB project. See
|
||||
# http://clang.llvm.org/docs/JSONCompilationDatabase.html for details on what a compilation
|
||||
# database is, and why you might want one. The only user visible entry point here is
|
||||
# 'env.CompilationDatabase'. This method takes an optional 'target' to name the file that
|
||||
# should hold the compilation database, otherwise, the file defaults to compile_commands.json,
|
||||
# which is the name that most clang tools search for by default.
|
||||
|
||||
# TODO: Is there a better way to do this than this global? Right now this exists so that the
|
||||
# emitter we add can record all of the things it emits, so that the scanner for the top level
|
||||
# compilation database can access the complete list, and also so that the writer has easy
|
||||
# access to write all of the files. But it seems clunky. How can the emitter and the scanner
|
||||
# communicate more gracefully?
|
||||
__COMPILATION_DB_ENTRIES = []
|
||||
|
||||
|
||||
# We make no effort to avoid rebuilding the entries. Someday, perhaps we could and even
|
||||
# integrate with the cache, but there doesn't seem to be much call for it.
|
||||
class __CompilationDbNode(SCons.Node.Python.Value):
|
||||
def __init__(self, value):
|
||||
SCons.Node.Python.Value.__init__(self, value)
|
||||
self.Decider(changed_since_last_build_node)
|
||||
|
||||
|
||||
def changed_since_last_build_node(*args, **kwargs):
|
||||
""" Dummy decider to force always building"""
|
||||
return True
|
||||
|
||||
|
||||
def makeEmitCompilationDbEntry(comstr):
|
||||
"""
|
||||
Effectively this creates a lambda function to capture:
|
||||
* command line
|
||||
* source
|
||||
* target
|
||||
:param comstr: unevaluated command line
|
||||
:return: an emitter which has captured the above
|
||||
"""
|
||||
user_action = SCons.Action.Action(comstr)
|
||||
|
||||
def EmitCompilationDbEntry(target, source, env):
|
||||
"""
|
||||
This emitter will be added to each c/c++ object build to capture the info needed
|
||||
for clang tools
|
||||
:param target: target node(s)
|
||||
:param source: source node(s)
|
||||
:param env: Environment for use building this node
|
||||
:return: target(s), source(s)
|
||||
"""
|
||||
|
||||
# Resolve absolute path of toolchain
|
||||
for cmd in ("CC", "CXX", "AS"):
|
||||
if cmd not in env:
|
||||
continue
|
||||
if os.path.isabs(env[cmd]):
|
||||
continue
|
||||
env[cmd] = where_is_program(
|
||||
env.subst("$%s" % cmd), env.subst("${ENV['PATH']}")
|
||||
)
|
||||
|
||||
dbtarget = __CompilationDbNode(source)
|
||||
|
||||
entry = env.__COMPILATIONDB_Entry(
|
||||
target=dbtarget,
|
||||
source=[],
|
||||
__COMPILATIONDB_UTARGET=target,
|
||||
__COMPILATIONDB_USOURCE=source,
|
||||
__COMPILATIONDB_UACTION=user_action,
|
||||
__COMPILATIONDB_ENV=env,
|
||||
)
|
||||
|
||||
# TODO: Technically, these next two lines should not be required: it should be fine to
|
||||
# cache the entries. However, they don't seem to update properly. Since they are quick
|
||||
# to re-generate disable caching and sidestep this problem.
|
||||
env.AlwaysBuild(entry)
|
||||
env.NoCache(entry)
|
||||
|
||||
__COMPILATION_DB_ENTRIES.append(dbtarget)
|
||||
|
||||
return target, source
|
||||
|
||||
return EmitCompilationDbEntry
|
||||
|
||||
|
||||
def CompilationDbEntryAction(target, source, env, **kw):
|
||||
"""
|
||||
Create a dictionary with evaluated command line, target, source
|
||||
and store that info as an attribute on the target
|
||||
(Which has been stored in __COMPILATION_DB_ENTRIES array
|
||||
:param target: target node(s)
|
||||
:param source: source node(s)
|
||||
:param env: Environment for use building this node
|
||||
:param kw:
|
||||
:return: None
|
||||
"""
|
||||
|
||||
command = env["__COMPILATIONDB_UACTION"].strfunction(
|
||||
target=env["__COMPILATIONDB_UTARGET"],
|
||||
source=env["__COMPILATIONDB_USOURCE"],
|
||||
env=env["__COMPILATIONDB_ENV"],
|
||||
)
|
||||
|
||||
entry = {
|
||||
"directory": env.Dir("#").abspath,
|
||||
"command": command,
|
||||
"file": str(env["__COMPILATIONDB_USOURCE"][0]),
|
||||
}
|
||||
|
||||
target[0].write(entry)
|
||||
|
||||
|
||||
def WriteCompilationDb(target, source, env):
|
||||
entries = []
|
||||
|
||||
for s in __COMPILATION_DB_ENTRIES:
|
||||
item = s.read()
|
||||
item["file"] = os.path.abspath(item["file"])
|
||||
entries.append(item)
|
||||
|
||||
with open(str(target[0]), "w") as target_file:
|
||||
json.dump(
|
||||
entries, target_file, sort_keys=True, indent=4, separators=(",", ": ")
|
||||
)
|
||||
|
||||
|
||||
def ScanCompilationDb(node, env, path):
|
||||
return __COMPILATION_DB_ENTRIES
|
||||
|
||||
|
||||
def generate(env, **kwargs):
|
||||
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
|
||||
|
||||
env["COMPILATIONDB_COMSTR"] = kwargs.get(
|
||||
"COMPILATIONDB_COMSTR", "Building compilation database $TARGET"
|
||||
)
|
||||
|
||||
components_by_suffix = itertools.chain(
|
||||
itertools.product(
|
||||
[".%s" % ext for ext in SRC_C_EXT],
|
||||
[
|
||||
(static_obj, SCons.Defaults.StaticObjectEmitter, "$CCCOM"),
|
||||
(shared_obj, SCons.Defaults.SharedObjectEmitter, "$SHCCCOM"),
|
||||
],
|
||||
),
|
||||
itertools.product(
|
||||
[".%s" % ext for ext in SRC_CXX_EXT],
|
||||
[
|
||||
(static_obj, SCons.Defaults.StaticObjectEmitter, "$CXXCOM"),
|
||||
(shared_obj, SCons.Defaults.SharedObjectEmitter, "$SHCXXCOM"),
|
||||
],
|
||||
),
|
||||
itertools.product(
|
||||
[".%s" % ext for ext in SRC_ASM_EXT],
|
||||
[(static_obj, SCons.Defaults.StaticObjectEmitter, "$ASCOM")],
|
||||
),
|
||||
)
|
||||
|
||||
for entry in components_by_suffix:
|
||||
suffix = entry[0]
|
||||
builder, base_emitter, command = entry[1]
|
||||
|
||||
# Assumes a dictionary emitter
|
||||
emitter = builder.emitter[suffix]
|
||||
builder.emitter[suffix] = SCons.Builder.ListEmitter(
|
||||
[emitter, makeEmitCompilationDbEntry(command)]
|
||||
)
|
||||
|
||||
env["BUILDERS"]["__COMPILATIONDB_Entry"] = SCons.Builder.Builder(
|
||||
action=SCons.Action.Action(CompilationDbEntryAction, None),
|
||||
)
|
||||
|
||||
env["BUILDERS"]["__COMPILATIONDB_Database"] = SCons.Builder.Builder(
|
||||
action=SCons.Action.Action(WriteCompilationDb, "$COMPILATIONDB_COMSTR"),
|
||||
target_scanner=SCons.Scanner.Scanner(
|
||||
function=ScanCompilationDb, node_class=None
|
||||
),
|
||||
)
|
||||
|
||||
def CompilationDatabase(env, target):
|
||||
result = env.__COMPILATIONDB_Database(target=target, source=[])
|
||||
|
||||
env.AlwaysBuild(result)
|
||||
env.NoCache(result)
|
||||
|
||||
return result
|
||||
|
||||
env.AddMethod(CompilationDatabase, "CompilationDatabase")
|
||||
|
||||
|
||||
def exists(env):
|
||||
return True
|
29
platformio/builder/tools/pioasm.py
Normal file
29
platformio/builder/tools/pioasm.py
Normal file
@ -0,0 +1,29 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import SCons.Tool.asm # pylint: disable=import-error
|
||||
|
||||
#
|
||||
# Resolve https://github.com/platformio/platformio-core/issues/3917
|
||||
# Avoid forcing .S to bare assembly on Windows OS
|
||||
#
|
||||
|
||||
if ".S" in SCons.Tool.asm.ASSuffixes:
|
||||
SCons.Tool.asm.ASSuffixes.remove(".S")
|
||||
if ".S" not in SCons.Tool.asm.ASPPSuffixes:
|
||||
SCons.Tool.asm.ASPPSuffixes.append(".S")
|
||||
|
||||
|
||||
generate = SCons.Tool.asm.generate
|
||||
exists = SCons.Tool.asm.exists
|
@ -12,8 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import fnmatch
|
||||
import os
|
||||
import sys
|
||||
@ -23,12 +21,12 @@ from SCons.Node import FS # pylint: disable=import-error
|
||||
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
|
||||
from SCons.Script import AlwaysBuild # pylint: disable=import-error
|
||||
from SCons.Script import DefaultEnvironment # pylint: disable=import-error
|
||||
from SCons.Script import Export # pylint: disable=import-error
|
||||
from SCons.Script import SConscript # pylint: disable=import-error
|
||||
|
||||
from platformio import __version__, fs
|
||||
from platformio.compat import MACOS, string_types
|
||||
from platformio.compat import IS_MACOS, string_types
|
||||
from platformio.package.version import pepver_to_semver
|
||||
from platformio.proc import where_is_program
|
||||
|
||||
SRC_HEADER_EXT = ["h", "hpp"]
|
||||
SRC_ASM_EXT = ["S", "spp", "SPP", "sx", "s", "asm", "ASM"]
|
||||
@ -47,14 +45,16 @@ def scons_patched_match_splitext(path, suffixes=None):
|
||||
|
||||
|
||||
def GetBuildType(env):
|
||||
return (
|
||||
"debug"
|
||||
if (
|
||||
set(["debug", "sizedata"]) & set(COMMAND_LINE_TARGETS)
|
||||
or env.GetProjectOption("build_type") == "debug"
|
||||
)
|
||||
else "release"
|
||||
)
|
||||
modes = []
|
||||
if (
|
||||
set(["__debug", "sizedata"]) # sizedata = for memory inspection
|
||||
& set(COMMAND_LINE_TARGETS)
|
||||
or env.GetProjectOption("build_type") == "debug"
|
||||
):
|
||||
modes.append("debug")
|
||||
if "__test" in COMMAND_LINE_TARGETS or env.GetProjectOption("build_type") == "test":
|
||||
modes.append("test")
|
||||
return ", ".join(modes or ["release"])
|
||||
|
||||
|
||||
def BuildProgram(env):
|
||||
@ -69,14 +69,12 @@ def BuildProgram(env):
|
||||
if (
|
||||
env.get("LIBS")
|
||||
and env.GetCompilerType() == "gcc"
|
||||
and (env.PioPlatform().is_embedded() or not MACOS)
|
||||
and (env.PioPlatform().is_embedded() or not IS_MACOS)
|
||||
):
|
||||
env.Prepend(_LIBFLAGS="-Wl,--start-group ")
|
||||
env.Append(_LIBFLAGS=" -Wl,--end-group")
|
||||
|
||||
program = env.Program(
|
||||
os.path.join("$BUILD_DIR", env.subst("$PROGNAME")), env["PIOBUILDFILES"]
|
||||
)
|
||||
program = env.Program(env.subst("$PROGPATH"), env["PIOBUILDFILES"])
|
||||
env.Replace(PIOMAINPROG=program)
|
||||
|
||||
AlwaysBuild(
|
||||
@ -87,7 +85,7 @@ def BuildProgram(env):
|
||||
)
|
||||
)
|
||||
|
||||
print("Building in %s mode" % env.GetBuildType())
|
||||
print("Building in %s mode" % env["BUILD_TYPE"])
|
||||
|
||||
return program
|
||||
|
||||
@ -112,10 +110,6 @@ def ProcessProgramDeps(env):
|
||||
|
||||
env.PrintConfiguration()
|
||||
|
||||
# fix ASM handling under non case-sensitive OS
|
||||
if not Util.case_sensitive_suffixes(".s", ".S"):
|
||||
env.Replace(AS="$CC", ASCOM="$ASPPCOM")
|
||||
|
||||
# process extra flags from board
|
||||
if "BOARD" in env and "build.extra_flags" in env.BoardConfig():
|
||||
env.ProcessFlags(env.BoardConfig().get("build.extra_flags"))
|
||||
@ -126,56 +120,72 @@ def ProcessProgramDeps(env):
|
||||
# process framework scripts
|
||||
env.BuildFrameworks(env.get("PIOFRAMEWORK"))
|
||||
|
||||
if env.GetBuildType() == "debug":
|
||||
env.ConfigureDebugFlags()
|
||||
if "debug" in env["BUILD_TYPE"]:
|
||||
env.ConfigureDebugTarget()
|
||||
|
||||
# remove specified flags
|
||||
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
|
||||
|
||||
if "__test" in COMMAND_LINE_TARGETS:
|
||||
env.ConfigureTestTarget()
|
||||
env.ProcessCompileDbToolchainOption()
|
||||
|
||||
|
||||
def ProcessCompileDbToolchainOption(env):
|
||||
if "compiledb" in COMMAND_LINE_TARGETS:
|
||||
# Resolve absolute path of toolchain
|
||||
for cmd in ("CC", "CXX", "AS"):
|
||||
if cmd not in env:
|
||||
continue
|
||||
if os.path.isabs(env[cmd]):
|
||||
continue
|
||||
env[cmd] = where_is_program(
|
||||
env.subst("$%s" % cmd), env.subst("${ENV['PATH']}")
|
||||
)
|
||||
|
||||
if env.get("COMPILATIONDB_INCLUDE_TOOLCHAIN"):
|
||||
print("Warning! `COMPILATIONDB_INCLUDE_TOOLCHAIN` is scoping")
|
||||
for scope, includes in env.DumpIntegrationIncludes().items():
|
||||
if scope in ("toolchain",):
|
||||
env.Append(CPPPATH=includes)
|
||||
|
||||
|
||||
def ProcessProjectDeps(env):
|
||||
project_lib_builder = env.ConfigureProjectLibBuilder()
|
||||
plb = env.ConfigureProjectLibBuilder()
|
||||
|
||||
# prepend project libs to the beginning of list
|
||||
env.Prepend(LIBS=project_lib_builder.build())
|
||||
env.Prepend(LIBS=plb.build())
|
||||
# prepend extra linker related options from libs
|
||||
env.PrependUnique(
|
||||
**{
|
||||
key: project_lib_builder.env.get(key)
|
||||
key: plb.env.get(key)
|
||||
for key in ("LIBS", "LIBPATH", "LINKFLAGS")
|
||||
if project_lib_builder.env.get(key)
|
||||
if plb.env.get(key)
|
||||
}
|
||||
)
|
||||
|
||||
projenv = env.Clone()
|
||||
|
||||
# CPPPATH from dependencies
|
||||
projenv.PrependUnique(CPPPATH=project_lib_builder.env.get("CPPPATH"))
|
||||
# extra build flags from `platformio.ini`
|
||||
projenv.ProcessFlags(env.get("SRC_BUILD_FLAGS"))
|
||||
|
||||
is_test = "__test" in COMMAND_LINE_TARGETS
|
||||
if is_test:
|
||||
projenv.BuildSources(
|
||||
if "test" in env["BUILD_TYPE"]:
|
||||
build_files_before_nums = len(env.get("PIOBUILDFILES", []))
|
||||
plb.env.BuildSources(
|
||||
"$BUILD_TEST_DIR", "$PROJECT_TEST_DIR", "$PIOTEST_SRC_FILTER"
|
||||
)
|
||||
if not is_test or env.GetProjectOption("test_build_project_src"):
|
||||
projenv.BuildSources(
|
||||
if len(env.get("PIOBUILDFILES", [])) - build_files_before_nums < 1:
|
||||
sys.stderr.write(
|
||||
"Error: Nothing to build. Please put your test suites "
|
||||
"to the '%s' folder\n" % env.subst("$PROJECT_TEST_DIR")
|
||||
)
|
||||
env.Exit(1)
|
||||
|
||||
if "test" not in env["BUILD_TYPE"] or env.GetProjectOption("test_build_src"):
|
||||
plb.env.BuildSources(
|
||||
"$BUILD_SRC_DIR", "$PROJECT_SRC_DIR", env.get("SRC_FILTER")
|
||||
)
|
||||
|
||||
if not env.get("PIOBUILDFILES") and not COMMAND_LINE_TARGETS:
|
||||
sys.stderr.write(
|
||||
"Error: Nothing to build. Please put your source code files "
|
||||
"to '%s' folder\n" % env.subst("$PROJECT_SRC_DIR")
|
||||
"to the '%s' folder\n" % env.subst("$PROJECT_SRC_DIR")
|
||||
)
|
||||
env.Exit(1)
|
||||
|
||||
Export("projenv")
|
||||
|
||||
|
||||
def ParseFlagsExtended(env, flags): # pylint: disable=too-many-branches
|
||||
if not isinstance(flags, list):
|
||||
@ -205,13 +215,14 @@ def ParseFlagsExtended(env, flags): # pylint: disable=too-many-branches
|
||||
# fix relative CPPPATH & LIBPATH
|
||||
for k in ("CPPPATH", "LIBPATH"):
|
||||
for i, p in enumerate(result.get(k, [])):
|
||||
p = env.subst(p)
|
||||
if os.path.isdir(p):
|
||||
result[k][i] = os.path.realpath(p)
|
||||
result[k][i] = os.path.abspath(p)
|
||||
|
||||
# fix relative path for "-include"
|
||||
for i, f in enumerate(result.get("CCFLAGS", [])):
|
||||
if isinstance(f, tuple) and f[0] == "-include":
|
||||
result["CCFLAGS"][i] = (f[0], env.File(os.path.realpath(f[1].get_path())))
|
||||
result["CCFLAGS"][i] = (f[0], env.subst(f[1].get_path()))
|
||||
|
||||
return result
|
||||
|
||||
@ -240,33 +251,30 @@ def ProcessUnFlags(env, flags):
|
||||
if not flags:
|
||||
return
|
||||
parsed = env.ParseFlagsExtended(flags)
|
||||
|
||||
# get all flags and copy them to each "*FLAGS" variable
|
||||
all_flags = []
|
||||
for key, unflags in parsed.items():
|
||||
if key.endswith("FLAGS"):
|
||||
all_flags.extend(unflags)
|
||||
for key, unflags in parsed.items():
|
||||
if key.endswith("FLAGS"):
|
||||
parsed[key].extend(all_flags)
|
||||
|
||||
for key, unflags in parsed.items():
|
||||
for unflag in unflags:
|
||||
for current in env.get(key, []):
|
||||
conditions = [
|
||||
unflag == current,
|
||||
isinstance(current, (tuple, list)) and unflag[0] == current[0],
|
||||
]
|
||||
if any(conditions):
|
||||
env[key].remove(current)
|
||||
unflag_scopes = tuple(set(["ASPPFLAGS"] + list(parsed.keys())))
|
||||
for scope in unflag_scopes:
|
||||
for unflags in parsed.values():
|
||||
for unflag in unflags:
|
||||
for current in list(env.get(scope, [])):
|
||||
conditions = [
|
||||
unflag == current,
|
||||
not isinstance(unflag, (tuple, list))
|
||||
and isinstance(current, (tuple, list))
|
||||
and unflag == current[0],
|
||||
]
|
||||
if any(conditions):
|
||||
env[scope].remove(current)
|
||||
|
||||
|
||||
def MatchSourceFiles(env, src_dir, src_filter=None):
|
||||
def StringifyMacro(env, value): # pylint: disable=unused-argument
|
||||
return '\\"%s\\"' % value.replace('"', '\\\\\\"')
|
||||
|
||||
|
||||
def MatchSourceFiles(env, src_dir, src_filter=None, src_exts=None):
|
||||
src_filter = env.subst(src_filter) if src_filter else None
|
||||
src_filter = src_filter or SRC_FILTER_DEFAULT
|
||||
return fs.match_src_files(
|
||||
env.subst(src_dir), src_filter, SRC_BUILD_EXT + SRC_HEADER_EXT
|
||||
)
|
||||
src_exts = src_exts or (SRC_BUILD_EXT + SRC_HEADER_EXT)
|
||||
return fs.match_src_files(env.subst(src_dir), src_filter, src_exts)
|
||||
|
||||
|
||||
def CollectBuildFiles(
|
||||
@ -279,7 +287,7 @@ def CollectBuildFiles(
|
||||
if src_dir.endswith(os.sep):
|
||||
src_dir = src_dir[:-1]
|
||||
|
||||
for item in env.MatchSourceFiles(src_dir, src_filter):
|
||||
for item in env.MatchSourceFiles(src_dir, src_filter, SRC_BUILD_EXT):
|
||||
_reldir = os.path.dirname(item)
|
||||
_src_dir = os.path.join(src_dir, _reldir) if _reldir else src_dir
|
||||
_var_dir = os.path.join(variant_dir, _reldir) if _reldir else variant_dir
|
||||
@ -288,8 +296,7 @@ def CollectBuildFiles(
|
||||
variants.append(_var_dir)
|
||||
env.VariantDir(_var_dir, _src_dir, duplicate)
|
||||
|
||||
if fs.path_endswith_ext(item, SRC_BUILD_EXT):
|
||||
sources.append(env.File(os.path.join(_var_dir, os.path.basename(item))))
|
||||
sources.append(env.File(os.path.join(_var_dir, os.path.basename(item))))
|
||||
|
||||
middlewares = env.get("__PIO_BUILD_MIDDLEWARES")
|
||||
if not middlewares:
|
||||
@ -301,7 +308,12 @@ def CollectBuildFiles(
|
||||
for callback, pattern in middlewares:
|
||||
if pattern and not fnmatch.fnmatch(node.srcnode().get_path(), pattern):
|
||||
continue
|
||||
new_node = callback(new_node)
|
||||
if callback.__code__.co_argcount == 2:
|
||||
new_node = callback(env, new_node)
|
||||
else:
|
||||
new_node = callback(new_node)
|
||||
if not new_node:
|
||||
break
|
||||
if new_node:
|
||||
new_sources.append(new_node)
|
||||
|
||||
@ -323,36 +335,36 @@ def BuildFrameworks(env, frameworks):
|
||||
)
|
||||
env.Exit(1)
|
||||
|
||||
board_frameworks = env.BoardConfig().get("frameworks", [])
|
||||
if frameworks == ["platformio"]:
|
||||
if board_frameworks:
|
||||
frameworks.insert(0, board_frameworks[0])
|
||||
else:
|
||||
sys.stderr.write("Error: Please specify `board` in `platformio.ini`\n")
|
||||
env.Exit(1)
|
||||
|
||||
for f in frameworks:
|
||||
if f == "arduino":
|
||||
# Arduino IDE appends .o the end of filename
|
||||
supported_frameworks = env.BoardConfig().get("frameworks", [])
|
||||
for name in frameworks:
|
||||
if name == "arduino":
|
||||
# Arduino IDE appends .o to the end of filename
|
||||
Builder.match_splitext = scons_patched_match_splitext
|
||||
if "nobuild" not in COMMAND_LINE_TARGETS:
|
||||
env.ConvertInoToCpp()
|
||||
|
||||
if f in board_frameworks:
|
||||
SConscript(env.GetFrameworkScript(f), exports="env")
|
||||
if name in supported_frameworks:
|
||||
SConscript(env.GetFrameworkScript(name), exports="env")
|
||||
else:
|
||||
sys.stderr.write("Error: This board doesn't support %s framework!\n" % f)
|
||||
sys.stderr.write("Error: This board doesn't support %s framework!\n" % name)
|
||||
env.Exit(1)
|
||||
|
||||
|
||||
def BuildLibrary(env, variant_dir, src_dir, src_filter=None):
|
||||
def BuildLibrary(env, variant_dir, src_dir, src_filter=None, nodes=None):
|
||||
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
|
||||
return env.StaticLibrary(
|
||||
env.subst(variant_dir), env.CollectBuildFiles(variant_dir, src_dir, src_filter)
|
||||
)
|
||||
nodes = nodes or env.CollectBuildFiles(variant_dir, src_dir, src_filter)
|
||||
return env.StaticLibrary(env.subst(variant_dir), nodes)
|
||||
|
||||
|
||||
def BuildSources(env, variant_dir, src_dir, src_filter=None):
|
||||
if env.get("PIOMAINPROG"):
|
||||
sys.stderr.write(
|
||||
"Error: The main program is already constructed and the inline "
|
||||
"source files are not allowed. Please use `env.BuildLibrary(...)` "
|
||||
"or PRE-type script instead."
|
||||
)
|
||||
env.Exit(1)
|
||||
|
||||
nodes = env.CollectBuildFiles(variant_dir, src_dir, src_filter)
|
||||
DefaultEnvironment().Append(
|
||||
PIOBUILDFILES=[
|
||||
@ -369,10 +381,12 @@ def generate(env):
|
||||
env.AddMethod(GetBuildType)
|
||||
env.AddMethod(BuildProgram)
|
||||
env.AddMethod(ProcessProgramDeps)
|
||||
env.AddMethod(ProcessCompileDbToolchainOption)
|
||||
env.AddMethod(ProcessProjectDeps)
|
||||
env.AddMethod(ParseFlagsExtended)
|
||||
env.AddMethod(ProcessFlags)
|
||||
env.AddMethod(ProcessUnFlags)
|
||||
env.AddMethod(StringifyMacro)
|
||||
env.AddMethod(MatchSourceFiles)
|
||||
env.AddMethod(CollectBuildFiles)
|
||||
env.AddMethod(AddBuildMiddleware)
|
50
platformio/builder/tools/piohooks.py
Normal file
50
platformio/builder/tools/piohooks.py
Normal file
@ -0,0 +1,50 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
def AddActionWrapper(handler):
|
||||
def wraps(env, files, action):
|
||||
if not isinstance(files, (list, tuple, set)):
|
||||
files = [files]
|
||||
known_nodes = []
|
||||
unknown_files = []
|
||||
for item in files:
|
||||
nodes = env.arg2nodes(item, env.fs.Entry)
|
||||
if nodes and nodes[0].exists():
|
||||
known_nodes.extend(nodes)
|
||||
else:
|
||||
unknown_files.append(item)
|
||||
if unknown_files:
|
||||
env.Append(**{"_PIO_DELAYED_ACTIONS": [(handler, unknown_files, action)]})
|
||||
if known_nodes:
|
||||
return handler(known_nodes, action)
|
||||
return []
|
||||
|
||||
return wraps
|
||||
|
||||
|
||||
def ProcessDelayedActions(env):
|
||||
for func, nodes, action in env.get("_PIO_DELAYED_ACTIONS", []):
|
||||
func(nodes, action)
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.Replace(**{"_PIO_DELAYED_ACTIONS": []})
|
||||
env.AddMethod(AddActionWrapper(env.AddPreAction), "AddPreAction")
|
||||
env.AddMethod(AddActionWrapper(env.AddPostAction), "AddPostAction")
|
||||
env.AddMethod(ProcessDelayedActions)
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
256
platformio/builder/tools/pioino.py
Normal file
256
platformio/builder/tools/pioino.py
Normal file
@ -0,0 +1,256 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import atexit
|
||||
import glob
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
import tempfile
|
||||
|
||||
import click
|
||||
|
||||
from platformio.compat import get_filesystem_encoding, get_locale_encoding
|
||||
|
||||
|
||||
class InoToCPPConverter:
|
||||
PROTOTYPE_RE = re.compile(
|
||||
r"""^(
|
||||
(?:template\<.*\>\s*)? # template
|
||||
([a-z_\d\&]+\*?\s+){1,2} # return type
|
||||
([a-z_\d]+\s*) # name of prototype
|
||||
\([a-z_,\.\*\&\[\]\s\d]*\) # arguments
|
||||
)\s*(\{|;) # must end with `{` or `;`
|
||||
""",
|
||||
re.X | re.M | re.I,
|
||||
)
|
||||
DETECTMAIN_RE = re.compile(r"void\s+(setup|loop)\s*\(", re.M | re.I)
|
||||
PROTOPTRS_TPLRE = r"\([^&\(]*&(%s)[^\)]*\)"
|
||||
|
||||
def __init__(self, env):
|
||||
self.env = env
|
||||
self._main_ino = None
|
||||
self._safe_encoding = None
|
||||
|
||||
def read_safe_contents(self, path):
|
||||
error_reported = False
|
||||
for encoding in (
|
||||
"utf-8",
|
||||
None,
|
||||
get_filesystem_encoding(),
|
||||
get_locale_encoding(),
|
||||
"latin-1",
|
||||
):
|
||||
try:
|
||||
with io.open(path, encoding=encoding) as fp:
|
||||
contents = fp.read()
|
||||
self._safe_encoding = encoding
|
||||
return contents
|
||||
except UnicodeDecodeError:
|
||||
if not error_reported:
|
||||
error_reported = True
|
||||
click.secho(
|
||||
"Unicode decode error has occurred, please remove invalid "
|
||||
"(non-ASCII or non-UTF8) characters from %s file or convert it to UTF-8"
|
||||
% path,
|
||||
fg="yellow",
|
||||
err=True,
|
||||
)
|
||||
return ""
|
||||
|
||||
def write_safe_contents(self, path, contents):
|
||||
with io.open(
|
||||
path, "w", encoding=self._safe_encoding, errors="backslashreplace"
|
||||
) as fp:
|
||||
return fp.write(contents)
|
||||
|
||||
def is_main_node(self, contents):
|
||||
return self.DETECTMAIN_RE.search(contents)
|
||||
|
||||
def convert(self, nodes):
|
||||
contents = self.merge(nodes)
|
||||
if not contents:
|
||||
return None
|
||||
return self.process(contents)
|
||||
|
||||
def merge(self, nodes):
|
||||
assert nodes
|
||||
lines = []
|
||||
for node in nodes:
|
||||
contents = self.read_safe_contents(node.get_path())
|
||||
_lines = ['# 1 "%s"' % node.get_path().replace("\\", "/"), contents]
|
||||
if self.is_main_node(contents):
|
||||
lines = _lines + lines
|
||||
self._main_ino = node.get_path()
|
||||
else:
|
||||
lines.extend(_lines)
|
||||
|
||||
if not self._main_ino:
|
||||
self._main_ino = nodes[0].get_path()
|
||||
|
||||
return "\n".join(["#include <Arduino.h>"] + lines) if lines else None
|
||||
|
||||
def process(self, contents):
|
||||
out_file = re.sub(r"[\"\'\;]+", "", self._main_ino) + ".cpp"
|
||||
assert self._gcc_preprocess(contents, out_file)
|
||||
contents = self.read_safe_contents(out_file)
|
||||
contents = self._join_multiline_strings(contents)
|
||||
self.write_safe_contents(out_file, self.append_prototypes(contents))
|
||||
return out_file
|
||||
|
||||
def _gcc_preprocess(self, contents, out_file):
|
||||
tmp_path = tempfile.mkstemp()[1]
|
||||
self.write_safe_contents(tmp_path, contents)
|
||||
self.env.Execute(
|
||||
self.env.VerboseAction(
|
||||
'$CXX -o "{0}" -x c++ -fpreprocessed -dD -E "{1}"'.format(
|
||||
out_file, tmp_path
|
||||
),
|
||||
"Converting " + os.path.basename(out_file[:-4]),
|
||||
)
|
||||
)
|
||||
atexit.register(_delete_file, tmp_path)
|
||||
return os.path.isfile(out_file)
|
||||
|
||||
def _join_multiline_strings(self, contents):
|
||||
if "\\\n" not in contents:
|
||||
return contents
|
||||
newlines = []
|
||||
linenum = 0
|
||||
stropen = False
|
||||
for line in contents.split("\n"):
|
||||
_linenum = self._parse_preproc_line_num(line)
|
||||
if _linenum is not None:
|
||||
linenum = _linenum
|
||||
else:
|
||||
linenum += 1
|
||||
|
||||
if line.endswith("\\"):
|
||||
if line.startswith('"'):
|
||||
stropen = True
|
||||
newlines.append(line[:-1])
|
||||
continue
|
||||
if stropen:
|
||||
newlines[len(newlines) - 1] += line[:-1]
|
||||
continue
|
||||
elif stropen and line.endswith(('",', '";')):
|
||||
newlines[len(newlines) - 1] += line
|
||||
stropen = False
|
||||
newlines.append(
|
||||
'#line %d "%s"' % (linenum, self._main_ino.replace("\\", "/"))
|
||||
)
|
||||
continue
|
||||
|
||||
newlines.append(line)
|
||||
|
||||
return "\n".join(newlines)
|
||||
|
||||
@staticmethod
|
||||
def _parse_preproc_line_num(line):
|
||||
if not line.startswith("#"):
|
||||
return None
|
||||
tokens = line.split(" ", 3)
|
||||
if len(tokens) > 2 and tokens[1].isdigit():
|
||||
return int(tokens[1])
|
||||
return None
|
||||
|
||||
def _parse_prototypes(self, contents):
|
||||
prototypes = []
|
||||
reserved_keywords = set(["if", "else", "while"])
|
||||
for match in self.PROTOTYPE_RE.finditer(contents):
|
||||
if (
|
||||
set([match.group(2).strip(), match.group(3).strip()])
|
||||
& reserved_keywords
|
||||
):
|
||||
continue
|
||||
prototypes.append(match)
|
||||
return prototypes
|
||||
|
||||
def _get_total_lines(self, contents):
|
||||
total = 0
|
||||
if contents.endswith("\n"):
|
||||
contents = contents[:-1]
|
||||
for line in contents.split("\n")[::-1]:
|
||||
linenum = self._parse_preproc_line_num(line)
|
||||
if linenum is not None:
|
||||
return total + linenum
|
||||
total += 1
|
||||
return total
|
||||
|
||||
def append_prototypes(self, contents):
|
||||
prototypes = self._parse_prototypes(contents) or []
|
||||
|
||||
# skip already declared prototypes
|
||||
declared = set(m.group(1).strip() for m in prototypes if m.group(4) == ";")
|
||||
prototypes = [m for m in prototypes if m.group(1).strip() not in declared]
|
||||
|
||||
if not prototypes:
|
||||
return contents
|
||||
|
||||
prototype_names = set(m.group(3).strip() for m in prototypes)
|
||||
split_pos = prototypes[0].start()
|
||||
match_ptrs = re.search(
|
||||
self.PROTOPTRS_TPLRE % ("|".join(prototype_names)),
|
||||
contents[:split_pos],
|
||||
re.M,
|
||||
)
|
||||
if match_ptrs:
|
||||
split_pos = contents.rfind("\n", 0, match_ptrs.start()) + 1
|
||||
|
||||
result = []
|
||||
result.append(contents[:split_pos].strip())
|
||||
result.append("%s;" % ";\n".join([m.group(1) for m in prototypes]))
|
||||
result.append(
|
||||
'#line %d "%s"'
|
||||
% (
|
||||
self._get_total_lines(contents[:split_pos]),
|
||||
self._main_ino.replace("\\", "/"),
|
||||
)
|
||||
)
|
||||
result.append(contents[split_pos:].strip())
|
||||
return "\n".join(result)
|
||||
|
||||
|
||||
def FindInoNodes(env):
|
||||
src_dir = glob.escape(env.subst("$PROJECT_SRC_DIR"))
|
||||
return env.Glob(os.path.join(src_dir, "*.ino")) + env.Glob(
|
||||
os.path.join(src_dir, "*.pde")
|
||||
)
|
||||
|
||||
|
||||
def ConvertInoToCpp(env):
|
||||
ino_nodes = env.FindInoNodes()
|
||||
if not ino_nodes:
|
||||
return
|
||||
c = InoToCPPConverter(env)
|
||||
out_file = c.convert(ino_nodes)
|
||||
|
||||
atexit.register(_delete_file, out_file)
|
||||
|
||||
|
||||
def _delete_file(path):
|
||||
try:
|
||||
if os.path.isfile(path):
|
||||
os.remove(path)
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(FindInoNodes)
|
||||
env.AddMethod(ConvertInoToCpp)
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
@ -12,43 +12,40 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import glob
|
||||
import os
|
||||
from glob import glob
|
||||
|
||||
from SCons.Defaults import processDefines # pylint: disable=import-error
|
||||
import SCons.Defaults # pylint: disable=import-error
|
||||
import SCons.Subst # pylint: disable=import-error
|
||||
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
|
||||
|
||||
from platformio.compat import glob_escape
|
||||
from platformio.package.manager.core import get_core_package_dir
|
||||
from platformio.proc import exec_command, where_is_program
|
||||
|
||||
|
||||
def _dump_includes(env):
|
||||
includes = {}
|
||||
def IsIntegrationDump(_):
|
||||
return set(["__idedata", "__metadata"]) & set(COMMAND_LINE_TARGETS)
|
||||
|
||||
includes["build"] = [
|
||||
env.subst("$PROJECT_INCLUDE_DIR"),
|
||||
env.subst("$PROJECT_SRC_DIR"),
|
||||
]
|
||||
includes["build"].extend(
|
||||
[os.path.realpath(env.subst(item)) for item in env.get("CPPPATH", [])]
|
||||
|
||||
def DumpIntegrationIncludes(env):
|
||||
result = dict(build=[], compatlib=[], toolchain=[])
|
||||
|
||||
# `env`(project) CPPPATH
|
||||
result["build"].extend(
|
||||
[os.path.abspath(env.subst(item)) for item in env.get("CPPPATH", [])]
|
||||
)
|
||||
|
||||
# installed libs
|
||||
includes["compatlib"] = []
|
||||
for lb in env.GetLibBuilders():
|
||||
includes["compatlib"].extend(
|
||||
[os.path.realpath(inc) for inc in lb.get_include_dirs()]
|
||||
result["compatlib"].extend(
|
||||
[os.path.abspath(inc) for inc in lb.get_include_dirs()]
|
||||
)
|
||||
|
||||
# includes from toolchains
|
||||
p = env.PioPlatform()
|
||||
includes["toolchain"] = []
|
||||
for pkg in p.get_installed_packages():
|
||||
for pkg in p.get_installed_packages(with_optional=False):
|
||||
if p.get_package_type(pkg.metadata.name) != "toolchain":
|
||||
continue
|
||||
toolchain_dir = glob_escape(pkg.path)
|
||||
toolchain_dir = glob.escape(pkg.path)
|
||||
toolchain_incglobs = [
|
||||
os.path.join(toolchain_dir, "*", "include", "c++", "*"),
|
||||
os.path.join(toolchain_dir, "*", "include", "c++", "*", "*-*-*"),
|
||||
@ -56,17 +53,12 @@ def _dump_includes(env):
|
||||
os.path.join(toolchain_dir, "*", "include*"),
|
||||
]
|
||||
for g in toolchain_incglobs:
|
||||
includes["toolchain"].extend([os.path.realpath(inc) for inc in glob(g)])
|
||||
result["toolchain"].extend([os.path.abspath(inc) for inc in glob.glob(g)])
|
||||
|
||||
includes["unity"] = []
|
||||
unity_dir = get_core_package_dir("tool-unity")
|
||||
if unity_dir:
|
||||
includes["unity"].append(unity_dir)
|
||||
|
||||
return includes
|
||||
return result
|
||||
|
||||
|
||||
def _get_gcc_defines(env):
|
||||
def get_gcc_defines(env):
|
||||
items = []
|
||||
try:
|
||||
sysenv = os.environ.copy()
|
||||
@ -89,13 +81,13 @@ def _get_gcc_defines(env):
|
||||
return items
|
||||
|
||||
|
||||
def _dump_defines(env):
|
||||
def dump_defines(env):
|
||||
defines = []
|
||||
# global symbols
|
||||
for item in processDefines(env.get("CPPDEFINES", [])):
|
||||
for item in SCons.Defaults.processDefines(env.get("CPPDEFINES", [])):
|
||||
item = item.strip()
|
||||
if item:
|
||||
defines.append(env.subst(item).replace("\\", ""))
|
||||
defines.append(env.subst(item).replace('\\"', '"'))
|
||||
|
||||
# special symbol for Atmel AVR MCU
|
||||
if env["PIOPLATFORM"] == "atmelavr":
|
||||
@ -114,15 +106,15 @@ def _dump_defines(env):
|
||||
|
||||
# built-in GCC marcos
|
||||
# if env.GetCompilerType() == "gcc":
|
||||
# defines.extend(_get_gcc_defines(env))
|
||||
# defines.extend(get_gcc_defines(env))
|
||||
|
||||
return defines
|
||||
|
||||
|
||||
def _get_svd_path(env):
|
||||
def dump_svd_path(env):
|
||||
svd_path = env.GetProjectOption("debug_svd_path")
|
||||
if svd_path:
|
||||
return os.path.realpath(svd_path)
|
||||
return os.path.abspath(svd_path)
|
||||
|
||||
if "BOARD" not in env:
|
||||
return None
|
||||
@ -137,60 +129,49 @@ def _get_svd_path(env):
|
||||
# default file from ./platform/misc/svd folder
|
||||
p = env.PioPlatform()
|
||||
if os.path.isfile(os.path.join(p.get_dir(), "misc", "svd", svd_path)):
|
||||
return os.path.realpath(os.path.join(p.get_dir(), "misc", "svd", svd_path))
|
||||
return os.path.abspath(os.path.join(p.get_dir(), "misc", "svd", svd_path))
|
||||
return None
|
||||
|
||||
|
||||
def _escape_build_flag(flags):
|
||||
return [flag if " " not in flag else '"%s"' % flag for flag in flags]
|
||||
def _split_flags_string(env, s):
|
||||
args = env.subst_list(s, SCons.Subst.SUBST_CMD)[0]
|
||||
return [str(arg) for arg in args]
|
||||
|
||||
|
||||
def DumpIDEData(env, globalenv):
|
||||
""" env here is `projenv`"""
|
||||
|
||||
env["__escape_build_flag"] = _escape_build_flag
|
||||
|
||||
LINTCCOM = (
|
||||
"${__escape_build_flag(CFLAGS)} ${__escape_build_flag(CCFLAGS)} $CPPFLAGS"
|
||||
)
|
||||
LINTCXXCOM = (
|
||||
"${__escape_build_flag(CXXFLAGS)} ${__escape_build_flag(CCFLAGS)} $CPPFLAGS"
|
||||
)
|
||||
|
||||
def DumpIntegrationData(*args):
|
||||
projenv, globalenv = args[0:2] # pylint: disable=unbalanced-tuple-unpacking
|
||||
data = {
|
||||
"env_name": env["PIOENV"],
|
||||
"libsource_dirs": [env.subst(l) for l in env.GetLibSourceDirs()],
|
||||
"defines": _dump_defines(env),
|
||||
"includes": _dump_includes(env),
|
||||
"cc_path": where_is_program(env.subst("$CC"), env.subst("${ENV['PATH']}")),
|
||||
"cxx_path": where_is_program(env.subst("$CXX"), env.subst("${ENV['PATH']}")),
|
||||
"gdb_path": where_is_program(env.subst("$GDB"), env.subst("${ENV['PATH']}")),
|
||||
"prog_path": env.subst("$PROG_PATH"),
|
||||
"svd_path": _get_svd_path(env),
|
||||
"compiler_type": env.GetCompilerType(),
|
||||
"build_type": globalenv["BUILD_TYPE"],
|
||||
"env_name": globalenv["PIOENV"],
|
||||
"libsource_dirs": [
|
||||
globalenv.subst(item) for item in globalenv.GetLibSourceDirs()
|
||||
],
|
||||
"defines": dump_defines(projenv),
|
||||
"includes": projenv.DumpIntegrationIncludes(),
|
||||
"cc_flags": _split_flags_string(projenv, "$CFLAGS $CCFLAGS $CPPFLAGS"),
|
||||
"cxx_flags": _split_flags_string(projenv, "$CXXFLAGS $CCFLAGS $CPPFLAGS"),
|
||||
"cc_path": where_is_program(
|
||||
globalenv.subst("$CC"), globalenv.subst("${ENV['PATH']}")
|
||||
),
|
||||
"cxx_path": where_is_program(
|
||||
globalenv.subst("$CXX"), globalenv.subst("${ENV['PATH']}")
|
||||
),
|
||||
"gdb_path": where_is_program(
|
||||
globalenv.subst("$GDB"), globalenv.subst("${ENV['PATH']}")
|
||||
),
|
||||
"prog_path": globalenv.subst("$PROGPATH"),
|
||||
"svd_path": dump_svd_path(globalenv),
|
||||
"compiler_type": globalenv.GetCompilerType(),
|
||||
"targets": globalenv.DumpTargets(),
|
||||
"extra": dict(
|
||||
flash_images=[
|
||||
{"offset": item[0], "path": env.subst(item[1])}
|
||||
for item in env.get("FLASH_EXTRA_IMAGES", [])
|
||||
{"offset": item[0], "path": globalenv.subst(item[1])}
|
||||
for item in globalenv.get("FLASH_EXTRA_IMAGES", [])
|
||||
]
|
||||
),
|
||||
}
|
||||
data["extra"].update(env.get("IDE_EXTRA_DATA", {}))
|
||||
|
||||
env_ = env.Clone()
|
||||
# https://github.com/platformio/platformio-atom-ide/issues/34
|
||||
_new_defines = []
|
||||
for item in processDefines(env_.get("CPPDEFINES", [])):
|
||||
item = item.replace('\\"', '"')
|
||||
if " " in item:
|
||||
_new_defines.append(item.replace(" ", "\\\\ "))
|
||||
else:
|
||||
_new_defines.append(item)
|
||||
env_.Replace(CPPDEFINES=_new_defines)
|
||||
|
||||
data.update({"cc_flags": env_.subst(LINTCCOM), "cxx_flags": env_.subst(LINTCXXCOM)})
|
||||
|
||||
for key in ("IDE_EXTRA_DATA", "INTEGRATION_EXTRA_DATA"):
|
||||
data["extra"].update(globalenv.get(key, {}))
|
||||
return data
|
||||
|
||||
|
||||
@ -199,5 +180,9 @@ def exists(_):
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(DumpIDEData)
|
||||
env["IDE_EXTRA_DATA"] = {} # legacy support
|
||||
env["INTEGRATION_EXTRA_DATA"] = {}
|
||||
env.AddMethod(IsIntegrationDump)
|
||||
env.AddMethod(DumpIntegrationIncludes)
|
||||
env.AddMethod(DumpIntegrationData)
|
||||
return env
|
@ -12,11 +12,8 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=no-self-use, unused-argument, too-many-lines
|
||||
# pylint: disable=too-many-instance-attributes, too-many-public-methods
|
||||
# pylint: disable=assignment-from-no-return
|
||||
|
||||
from __future__ import absolute_import
|
||||
# pylint: disable=assignment-from-no-return, unused-argument, too-many-lines
|
||||
|
||||
import hashlib
|
||||
import io
|
||||
@ -27,24 +24,26 @@ import sys
|
||||
import click
|
||||
import SCons.Scanner # pylint: disable=import-error
|
||||
from SCons.Script import ARGUMENTS # pylint: disable=import-error
|
||||
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
|
||||
from SCons.Script import DefaultEnvironment # pylint: disable=import-error
|
||||
|
||||
from platformio import exception, fs, util
|
||||
from platformio.builder.tools import platformio as piotool
|
||||
from platformio.clients.http import InternetIsOffline
|
||||
from platformio.compat import WINDOWS, hashlib_encode_data, string_types
|
||||
from platformio.package.exception import UnknownPackageError
|
||||
from platformio import exception, fs
|
||||
from platformio.builder.tools import piobuild
|
||||
from platformio.compat import IS_WINDOWS, hashlib_encode_data, string_types
|
||||
from platformio.http import HttpClientApiError, InternetConnectionError
|
||||
from platformio.package.exception import (
|
||||
MissingPackageManifestError,
|
||||
UnknownPackageError,
|
||||
)
|
||||
from platformio.package.manager.library import LibraryPackageManager
|
||||
from platformio.package.manifest.parser import (
|
||||
ManifestParserError,
|
||||
ManifestParserFactory,
|
||||
)
|
||||
from platformio.package.meta import PackageItem
|
||||
from platformio.package.meta import PackageCompatibility, PackageItem, PackageSpec
|
||||
from platformio.project.options import ProjectOptions
|
||||
|
||||
|
||||
class LibBuilderFactory(object):
|
||||
class LibBuilderFactory:
|
||||
@staticmethod
|
||||
def new(env, path, verbose=int(ARGUMENTS.get("PIOVERBOSE", 0))):
|
||||
clsname = "UnknownLibBuilder"
|
||||
@ -54,11 +53,21 @@ class LibBuilderFactory(object):
|
||||
used_frameworks = LibBuilderFactory.get_used_frameworks(env, path)
|
||||
common_frameworks = set(env.get("PIOFRAMEWORK", [])) & set(used_frameworks)
|
||||
if common_frameworks:
|
||||
clsname = "%sLibBuilder" % list(common_frameworks)[0].title()
|
||||
clsname = "%sLibBuilder" % list(common_frameworks)[0].capitalize()
|
||||
elif used_frameworks:
|
||||
clsname = "%sLibBuilder" % used_frameworks[0].title()
|
||||
clsname = "%sLibBuilder" % used_frameworks[0].capitalize()
|
||||
|
||||
obj = globals()[clsname](env, path, verbose=verbose)
|
||||
|
||||
# Handle PlatformIOLibBuilder.manifest.build.builder
|
||||
# pylint: disable=protected-access
|
||||
if isinstance(obj, PlatformIOLibBuilder) and obj._manifest.get("build", {}).get(
|
||||
"builder"
|
||||
):
|
||||
obj = globals()[obj._manifest.get("build", {}).get("builder")](
|
||||
env, path, verbose=verbose
|
||||
)
|
||||
|
||||
obj = getattr(sys.modules[__name__], clsname)(env, path, verbose=verbose)
|
||||
assert isinstance(obj, LibBuilderBase)
|
||||
return obj
|
||||
|
||||
@ -83,10 +92,12 @@ class LibBuilderFactory(object):
|
||||
return ["mbed"]
|
||||
for fname in files:
|
||||
if not fs.path_endswith_ext(
|
||||
fname, piotool.SRC_BUILD_EXT + piotool.SRC_HEADER_EXT
|
||||
fname, piobuild.SRC_BUILD_EXT + piobuild.SRC_HEADER_EXT
|
||||
):
|
||||
continue
|
||||
with io.open(os.path.join(root, fname), errors="ignore") as fp:
|
||||
with io.open(
|
||||
os.path.join(root, fname), encoding="utf8", errors="ignore"
|
||||
) as fp:
|
||||
content = fp.read()
|
||||
if not content:
|
||||
continue
|
||||
@ -97,8 +108,7 @@ class LibBuilderFactory(object):
|
||||
return []
|
||||
|
||||
|
||||
class LibBuilderBase(object):
|
||||
|
||||
class LibBuilderBase:
|
||||
CLASSIC_SCANNER = SCons.Scanner.C.CScanner()
|
||||
CCONDITIONAL_SCANNER = SCons.Scanner.C.CConditionalScanner()
|
||||
# Max depth of nested includes:
|
||||
@ -113,7 +123,7 @@ class LibBuilderBase(object):
|
||||
def __init__(self, env, path, manifest=None, verbose=False):
|
||||
self.env = env.Clone()
|
||||
self.envorigin = env.Clone()
|
||||
self.path = os.path.realpath(env.subst(path))
|
||||
self.path = os.path.abspath(env.subst(path))
|
||||
self.verbose = verbose
|
||||
|
||||
try:
|
||||
@ -124,11 +134,17 @@ class LibBuilderBase(object):
|
||||
)
|
||||
self._manifest = {}
|
||||
|
||||
self._is_dependent = False
|
||||
self._is_built = False
|
||||
self._depbuilders = list()
|
||||
self._circular_deps = list()
|
||||
self._processed_files = list()
|
||||
self.is_dependent = False
|
||||
self.is_built = False
|
||||
self.depbuilders = []
|
||||
|
||||
self._deps_are_processed = False
|
||||
self._circular_deps = []
|
||||
self._processed_search_files = []
|
||||
|
||||
# pass a macro to the projenv + libs
|
||||
if "test" in env["BUILD_TYPE"]:
|
||||
self.env.Append(CPPDEFINES=["PIO_UNIT_TESTING"])
|
||||
|
||||
# reset source filter, could be overridden with extra script
|
||||
self.env["SRC_FILTER"] = ""
|
||||
@ -139,15 +155,27 @@ class LibBuilderBase(object):
|
||||
def __repr__(self):
|
||||
return "%s(%r)" % (self.__class__, self.path)
|
||||
|
||||
def __contains__(self, path):
|
||||
p1 = self.path
|
||||
p2 = path
|
||||
if WINDOWS:
|
||||
p1 = p1.lower()
|
||||
p2 = p2.lower()
|
||||
if p1 == p2:
|
||||
def __contains__(self, child_path):
|
||||
return self.is_common_builder(self.path, child_path)
|
||||
|
||||
def is_common_builder(self, root_path, child_path):
|
||||
if IS_WINDOWS:
|
||||
root_path = root_path.lower()
|
||||
child_path = child_path.lower()
|
||||
if root_path == child_path:
|
||||
return True
|
||||
return os.path.commonprefix((p1 + os.path.sep, p2)) == p1 + os.path.sep
|
||||
if (
|
||||
os.path.commonprefix([root_path + os.path.sep, child_path])
|
||||
== root_path + os.path.sep
|
||||
):
|
||||
return True
|
||||
# try to resolve paths
|
||||
root_path = os.path.realpath(root_path)
|
||||
child_path = os.path.realpath(child_path)
|
||||
return (
|
||||
os.path.commonprefix([root_path + os.path.sep, child_path])
|
||||
== root_path + os.path.sep
|
||||
)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@ -157,13 +185,18 @@ class LibBuilderBase(object):
|
||||
def version(self):
|
||||
return self._manifest.get("version")
|
||||
|
||||
@property
|
||||
def dependent(self):
|
||||
"""Backward compatibility with ESP-IDF"""
|
||||
return self.is_dependent
|
||||
|
||||
@property
|
||||
def dependencies(self):
|
||||
return self._manifest.get("dependencies")
|
||||
|
||||
@property
|
||||
def src_filter(self):
|
||||
return piotool.SRC_FILTER_DEFAULT + [
|
||||
return piobuild.SRC_FILTER_DEFAULT + [
|
||||
"-<example%s>" % os.sep,
|
||||
"-<examples%s>" % os.sep,
|
||||
"-<test%s>" % os.sep,
|
||||
@ -172,19 +205,19 @@ class LibBuilderBase(object):
|
||||
|
||||
@property
|
||||
def include_dir(self):
|
||||
if not all(
|
||||
os.path.isdir(os.path.join(self.path, d)) for d in ("include", "src")
|
||||
):
|
||||
return None
|
||||
return os.path.join(self.path, "include")
|
||||
for name in ("include", "Include"):
|
||||
d = os.path.join(self.path, name)
|
||||
if os.path.isdir(d):
|
||||
return d
|
||||
return None
|
||||
|
||||
@property
|
||||
def src_dir(self):
|
||||
return (
|
||||
os.path.join(self.path, "src")
|
||||
if os.path.isdir(os.path.join(self.path, "src"))
|
||||
else self.path
|
||||
)
|
||||
for name in ("src", "Src"):
|
||||
d = os.path.join(self.path, name)
|
||||
if os.path.isdir(d):
|
||||
return d
|
||||
return self.path
|
||||
|
||||
def get_include_dirs(self):
|
||||
items = []
|
||||
@ -213,18 +246,6 @@ class LibBuilderBase(object):
|
||||
def extra_script(self):
|
||||
return None
|
||||
|
||||
@property
|
||||
def depbuilders(self):
|
||||
return self._depbuilders
|
||||
|
||||
@property
|
||||
def dependent(self):
|
||||
return self._is_dependent
|
||||
|
||||
@property
|
||||
def is_built(self):
|
||||
return self._is_built
|
||||
|
||||
@property
|
||||
def lib_archive(self):
|
||||
return self.env.GetProjectOption("lib_archive")
|
||||
@ -276,48 +297,62 @@ class LibBuilderBase(object):
|
||||
with fs.cd(self.path):
|
||||
self.env.ProcessFlags(self.build_flags)
|
||||
if self.extra_script:
|
||||
self.env.SConscriptChdir(1)
|
||||
self.env.SConscriptChdir(True)
|
||||
self.env.SConscript(
|
||||
os.path.realpath(self.extra_script),
|
||||
os.path.abspath(self.extra_script),
|
||||
exports={"env": self.env, "pio_lib_builder": self},
|
||||
)
|
||||
self.env.SConscriptChdir(False)
|
||||
self.env.ProcessUnFlags(self.build_unflags)
|
||||
|
||||
def process_dependencies(self):
|
||||
if not self.dependencies:
|
||||
if not self.dependencies or self._deps_are_processed:
|
||||
return
|
||||
for item in self.dependencies:
|
||||
self._deps_are_processed = True
|
||||
for dependency in self.dependencies:
|
||||
found = False
|
||||
for lb in self.env.GetLibBuilders():
|
||||
if item["name"] != lb.name:
|
||||
if not lb.is_dependency_compatible(dependency):
|
||||
continue
|
||||
found = True
|
||||
if lb not in self.depbuilders:
|
||||
self.depend_recursive(lb)
|
||||
self.depend_on(lb)
|
||||
break
|
||||
|
||||
if not found and self.verbose:
|
||||
sys.stderr.write(
|
||||
"Warning: Ignored `%s` dependency for `%s` "
|
||||
"library\n" % (item["name"], self.name)
|
||||
"library\n" % (dependency["name"], self.name)
|
||||
)
|
||||
|
||||
def get_search_files(self):
|
||||
items = [
|
||||
os.path.join(self.src_dir, item)
|
||||
for item in self.env.MatchSourceFiles(self.src_dir, self.src_filter)
|
||||
]
|
||||
include_dir = self.include_dir
|
||||
if include_dir:
|
||||
items.extend(
|
||||
[
|
||||
os.path.join(include_dir, item)
|
||||
for item in self.env.MatchSourceFiles(include_dir)
|
||||
]
|
||||
)
|
||||
return items
|
||||
def is_dependency_compatible(self, dependency):
|
||||
pkg = PackageItem(self.path)
|
||||
qualifiers = {"name": self.name, "version": self.version}
|
||||
if pkg.metadata:
|
||||
qualifiers = {"name": pkg.metadata.name, "version": pkg.metadata.version}
|
||||
if pkg.metadata.spec and pkg.metadata.spec.owner:
|
||||
qualifiers["owner"] = pkg.metadata.spec.owner
|
||||
dep_qualifiers = {
|
||||
k: v for k, v in dependency.items() if k in ("owner", "name", "version")
|
||||
}
|
||||
if (
|
||||
"version" in dep_qualifiers
|
||||
and not PackageSpec(dep_qualifiers["version"]).requirements
|
||||
):
|
||||
del dep_qualifiers["version"]
|
||||
return PackageCompatibility.from_dependency(dep_qualifiers).is_compatible(
|
||||
PackageCompatibility(**qualifiers)
|
||||
)
|
||||
|
||||
def _get_found_includes( # pylint: disable=too-many-branches
|
||||
def get_search_files(self):
|
||||
return [
|
||||
os.path.join(self.src_dir, item)
|
||||
for item in self.env.MatchSourceFiles(
|
||||
self.src_dir, self.src_filter, piobuild.SRC_BUILD_EXT
|
||||
)
|
||||
]
|
||||
|
||||
def get_implicit_includes( # pylint: disable=too-many-branches
|
||||
self, search_files=None
|
||||
):
|
||||
# all include directories
|
||||
@ -325,7 +360,7 @@ class LibBuilderBase(object):
|
||||
LibBuilderBase._INCLUDE_DIRS_CACHE = [
|
||||
self.env.Dir(d)
|
||||
for d in ProjectAsLibBuilder(
|
||||
self.envorigin, "$PROJECT_DIR"
|
||||
self.envorigin, "$PROJECT_DIR", export_projenv=False
|
||||
).get_include_dirs()
|
||||
]
|
||||
for lb in self.env.GetLibBuilders():
|
||||
@ -338,57 +373,82 @@ class LibBuilderBase(object):
|
||||
include_dirs.extend(LibBuilderBase._INCLUDE_DIRS_CACHE)
|
||||
|
||||
result = []
|
||||
for path in search_files or []:
|
||||
if path in self._processed_files:
|
||||
search_files = search_files or []
|
||||
while search_files:
|
||||
node = self.env.File(search_files.pop(0))
|
||||
if node.get_abspath() in self._processed_search_files:
|
||||
continue
|
||||
self._processed_files.append(path)
|
||||
self._processed_search_files.append(node.get_abspath())
|
||||
|
||||
try:
|
||||
assert "+" in self.lib_ldf_mode
|
||||
candidates = LibBuilderBase.CCONDITIONAL_SCANNER(
|
||||
self.env.File(path),
|
||||
node,
|
||||
self.env,
|
||||
tuple(include_dirs),
|
||||
depth=self.CCONDITIONAL_SCANNER_DEPTH,
|
||||
)
|
||||
# mark candidates already processed via Conditional Scanner
|
||||
self._processed_files.extend(
|
||||
[
|
||||
c.get_abspath()
|
||||
for c in candidates
|
||||
if c.get_abspath() not in self._processed_files
|
||||
]
|
||||
)
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
if self.verbose and "+" in self.lib_ldf_mode:
|
||||
sys.stderr.write(
|
||||
"Warning! Classic Pre Processor is used for `%s`, "
|
||||
"advanced has failed with `%s`\n" % (path, e)
|
||||
"advanced has failed with `%s`\n" % (node.get_abspath(), exc)
|
||||
)
|
||||
candidates = LibBuilderBase.CLASSIC_SCANNER(
|
||||
self.env.File(path), self.env, tuple(include_dirs)
|
||||
node, self.env, tuple(include_dirs)
|
||||
)
|
||||
|
||||
# print(path, [c.get_abspath() for c in candidates])
|
||||
# print(node.get_abspath(), [c.get_abspath() for c in candidates])
|
||||
for item in candidates:
|
||||
item_path = item.get_abspath()
|
||||
# process internal files recursively
|
||||
if (
|
||||
item_path not in self._processed_search_files
|
||||
and item_path not in search_files
|
||||
and item_path in self
|
||||
):
|
||||
search_files.append(item_path)
|
||||
if item not in result:
|
||||
result.append(item)
|
||||
if not self.PARSE_SRC_BY_H_NAME:
|
||||
continue
|
||||
_h_path = item.get_abspath()
|
||||
if not fs.path_endswith_ext(_h_path, piotool.SRC_HEADER_EXT):
|
||||
if not fs.path_endswith_ext(item_path, piobuild.SRC_HEADER_EXT):
|
||||
continue
|
||||
_f_part = _h_path[: _h_path.rindex(".")]
|
||||
for ext in piotool.SRC_C_EXT + piotool.SRC_CXX_EXT:
|
||||
if not os.path.isfile("%s.%s" % (_f_part, ext)):
|
||||
item_fname = item_path[: item_path.rindex(".")]
|
||||
for ext in piobuild.SRC_C_EXT + piobuild.SRC_CXX_EXT:
|
||||
if not os.path.isfile("%s.%s" % (item_fname, ext)):
|
||||
continue
|
||||
_c_path = self.env.File("%s.%s" % (_f_part, ext))
|
||||
if _c_path not in result:
|
||||
result.append(_c_path)
|
||||
item_c_node = self.env.File("%s.%s" % (item_fname, ext))
|
||||
if item_c_node not in result:
|
||||
result.append(item_c_node)
|
||||
|
||||
return result
|
||||
|
||||
def depend_recursive(self, lb, search_files=None):
|
||||
def search_deps_recursive(self, search_files=None):
|
||||
self.process_dependencies()
|
||||
|
||||
# when LDF is disabled
|
||||
if self.lib_ldf_mode == "off":
|
||||
return
|
||||
|
||||
if self.lib_ldf_mode.startswith("deep"):
|
||||
search_files = self.get_search_files()
|
||||
|
||||
lib_inc_map = {}
|
||||
for inc in self.get_implicit_includes(search_files):
|
||||
inc_path = inc.get_abspath()
|
||||
for lb in self.env.GetLibBuilders():
|
||||
if inc_path in lb:
|
||||
if lb not in lib_inc_map:
|
||||
lib_inc_map[lb] = []
|
||||
lib_inc_map[lb].append(inc_path)
|
||||
break
|
||||
|
||||
for lb, lb_search_files in lib_inc_map.items():
|
||||
self.depend_on(lb, search_files=lb_search_files)
|
||||
|
||||
def depend_on(self, lb, search_files=None, recursive=True):
|
||||
def _already_depends(_lb):
|
||||
if self in _lb.depbuilders:
|
||||
return True
|
||||
@ -406,65 +466,66 @@ class LibBuilderBase(object):
|
||||
"between `%s` and `%s`\n" % (self.path, lb.path)
|
||||
)
|
||||
self._circular_deps.append(lb)
|
||||
elif lb not in self._depbuilders:
|
||||
self._depbuilders.append(lb)
|
||||
elif lb not in self.depbuilders:
|
||||
self.depbuilders.append(lb)
|
||||
lb.is_dependent = True
|
||||
LibBuilderBase._INCLUDE_DIRS_CACHE = None
|
||||
lb.search_deps_recursive(search_files)
|
||||
|
||||
def search_deps_recursive(self, search_files=None):
|
||||
if not self._is_dependent:
|
||||
self._is_dependent = True
|
||||
self.process_dependencies()
|
||||
|
||||
if self.lib_ldf_mode.startswith("deep"):
|
||||
search_files = self.get_search_files()
|
||||
|
||||
# when LDF is disabled
|
||||
if self.lib_ldf_mode == "off":
|
||||
return
|
||||
|
||||
lib_inc_map = {}
|
||||
for inc in self._get_found_includes(search_files):
|
||||
for lb in self.env.GetLibBuilders():
|
||||
if inc.get_abspath() in lb:
|
||||
if lb not in lib_inc_map:
|
||||
lib_inc_map[lb] = []
|
||||
lib_inc_map[lb].append(inc.get_abspath())
|
||||
break
|
||||
|
||||
for lb, lb_search_files in lib_inc_map.items():
|
||||
self.depend_recursive(lb, lb_search_files)
|
||||
if recursive:
|
||||
lb.search_deps_recursive(search_files)
|
||||
|
||||
def build(self):
|
||||
libs = []
|
||||
for lb in self._depbuilders:
|
||||
shared_scopes = ("CPPPATH", "LIBPATH", "LIBS", "LINKFLAGS")
|
||||
for lb in self.depbuilders:
|
||||
libs.extend(lb.build())
|
||||
# copy shared information to self env
|
||||
for key in ("CPPPATH", "LIBPATH", "LIBS", "LINKFLAGS"):
|
||||
self.env.PrependUnique(**{key: lb.env.get(key)})
|
||||
self.env.PrependUnique(
|
||||
**{
|
||||
scope: lb.env.get(scope)
|
||||
for scope in shared_scopes
|
||||
if lb.env.get(scope)
|
||||
}
|
||||
)
|
||||
|
||||
for lb in self._circular_deps:
|
||||
self.env.PrependUnique(CPPPATH=lb.get_include_dirs())
|
||||
|
||||
if self._is_built:
|
||||
if self.is_built:
|
||||
return libs
|
||||
self._is_built = True
|
||||
self.is_built = True
|
||||
|
||||
self.env.PrependUnique(CPPPATH=self.get_include_dirs())
|
||||
self.env.ProcessCompileDbToolchainOption()
|
||||
|
||||
if self.lib_ldf_mode == "off":
|
||||
for lb in self.env.GetLibBuilders():
|
||||
if self == lb or not lb.is_built:
|
||||
continue
|
||||
for key in ("CPPPATH", "LIBPATH", "LIBS", "LINKFLAGS"):
|
||||
self.env.PrependUnique(**{key: lb.env.get(key)})
|
||||
self.env.PrependUnique(
|
||||
**{
|
||||
scope: lb.env.get(scope)
|
||||
for scope in shared_scopes
|
||||
if lb.env.get(scope)
|
||||
}
|
||||
)
|
||||
|
||||
if self.lib_archive:
|
||||
libs.append(
|
||||
self.env.BuildLibrary(self.build_dir, self.src_dir, self.src_filter)
|
||||
do_not_archive = not self.lib_archive
|
||||
if not do_not_archive:
|
||||
nodes = self.env.CollectBuildFiles(
|
||||
self.build_dir, self.src_dir, self.src_filter
|
||||
)
|
||||
else:
|
||||
if nodes:
|
||||
libs.append(
|
||||
self.env.BuildLibrary(
|
||||
self.build_dir, self.src_dir, self.src_filter, nodes
|
||||
)
|
||||
)
|
||||
else:
|
||||
do_not_archive = True
|
||||
if do_not_archive:
|
||||
self.env.BuildSources(self.build_dir, self.src_dir, self.src_filter)
|
||||
|
||||
return libs
|
||||
|
||||
|
||||
@ -479,8 +540,16 @@ class ArduinoLibBuilder(LibBuilderBase):
|
||||
return {}
|
||||
return ManifestParserFactory.new_from_file(manifest_path).as_dict()
|
||||
|
||||
@property
|
||||
def include_dir(self):
|
||||
if not all(
|
||||
os.path.isdir(os.path.join(self.path, d)) for d in ("include", "src")
|
||||
):
|
||||
return None
|
||||
return os.path.join(self.path, "include")
|
||||
|
||||
def get_include_dirs(self):
|
||||
include_dirs = LibBuilderBase.get_include_dirs(self)
|
||||
include_dirs = super().get_include_dirs()
|
||||
if os.path.isdir(os.path.join(self.path, "src")):
|
||||
return include_dirs
|
||||
if os.path.isdir(os.path.join(self.path, "utility")):
|
||||
@ -509,7 +578,7 @@ class ArduinoLibBuilder(LibBuilderBase):
|
||||
|
||||
src_filter = []
|
||||
is_utility = os.path.isdir(os.path.join(self.path, "utility"))
|
||||
for ext in piotool.SRC_BUILD_EXT + piotool.SRC_HEADER_EXT:
|
||||
for ext in piobuild.SRC_BUILD_EXT + piobuild.SRC_HEADER_EXT:
|
||||
# arduino ide ignores files with .asm or .ASM extensions
|
||||
if ext.lower() == "asm":
|
||||
continue
|
||||
@ -540,10 +609,32 @@ class ArduinoLibBuilder(LibBuilderBase):
|
||||
return "chain+"
|
||||
|
||||
def is_frameworks_compatible(self, frameworks):
|
||||
return util.items_in_list(frameworks, ["arduino", "energia"])
|
||||
return PackageCompatibility(frameworks=frameworks).is_compatible(
|
||||
PackageCompatibility(frameworks=["arduino", "energia"])
|
||||
)
|
||||
|
||||
def is_platforms_compatible(self, platforms):
|
||||
return util.items_in_list(platforms, self._manifest.get("platforms") or ["*"])
|
||||
return PackageCompatibility(platforms=platforms).is_compatible(
|
||||
PackageCompatibility(platforms=self._manifest.get("platforms"))
|
||||
)
|
||||
|
||||
@property
|
||||
def build_flags(self):
|
||||
ldflags = [
|
||||
LibBuilderBase.build_flags.fget(self), # pylint: disable=no-member
|
||||
self._manifest.get("ldflags"),
|
||||
]
|
||||
if self._manifest.get("precompiled") in ("true", "full"):
|
||||
# add to LDPATH {build.mcu} folder
|
||||
board_config = self.env.BoardConfig()
|
||||
for key in ("build.mcu", "build.cpu"):
|
||||
libpath = os.path.join(self.src_dir, board_config.get(key, ""))
|
||||
if not os.path.isdir(libpath):
|
||||
continue
|
||||
self.env.PrependUnique(LIBPATH=libpath)
|
||||
break
|
||||
ldflags = [flag for flag in ldflags if flag] # remove empty
|
||||
return " ".join(ldflags) if ldflags else None
|
||||
|
||||
|
||||
class MbedLibBuilder(LibBuilderBase):
|
||||
@ -553,12 +644,6 @@ class MbedLibBuilder(LibBuilderBase):
|
||||
return {}
|
||||
return ManifestParserFactory.new_from_file(manifest_path).as_dict()
|
||||
|
||||
@property
|
||||
def include_dir(self):
|
||||
if os.path.isdir(os.path.join(self.path, "include")):
|
||||
return os.path.join(self.path, "include")
|
||||
return None
|
||||
|
||||
@property
|
||||
def src_dir(self):
|
||||
if os.path.isdir(os.path.join(self.path, "source")):
|
||||
@ -566,7 +651,7 @@ class MbedLibBuilder(LibBuilderBase):
|
||||
return LibBuilderBase.src_dir.fget(self) # pylint: disable=no-member
|
||||
|
||||
def get_include_dirs(self):
|
||||
include_dirs = LibBuilderBase.get_include_dirs(self)
|
||||
include_dirs = super().get_include_dirs()
|
||||
if self.path not in include_dirs:
|
||||
include_dirs.append(self.path)
|
||||
|
||||
@ -586,11 +671,13 @@ class MbedLibBuilder(LibBuilderBase):
|
||||
return include_dirs
|
||||
|
||||
def is_frameworks_compatible(self, frameworks):
|
||||
return util.items_in_list(frameworks, ["mbed"])
|
||||
return PackageCompatibility(frameworks=frameworks).is_compatible(
|
||||
PackageCompatibility(frameworks=["mbed"])
|
||||
)
|
||||
|
||||
def process_extra_options(self):
|
||||
self._process_mbed_lib_confs()
|
||||
return super(MbedLibBuilder, self).process_extra_options()
|
||||
return super().process_extra_options()
|
||||
|
||||
def _process_mbed_lib_confs(self):
|
||||
mbed_lib_paths = [
|
||||
@ -671,7 +758,7 @@ class MbedLibBuilder(LibBuilderBase):
|
||||
|
||||
def _mbed_conf_append_macros(self, mbed_config_path, macros):
|
||||
lines = []
|
||||
with open(mbed_config_path) as fp:
|
||||
with open(mbed_config_path, encoding="utf8") as fp:
|
||||
for line in fp.readlines():
|
||||
line = line.strip()
|
||||
if line == "#endif":
|
||||
@ -690,7 +777,7 @@ class MbedLibBuilder(LibBuilderBase):
|
||||
if len(tokens) < 2 or tokens[1] not in macros:
|
||||
lines.append(line)
|
||||
lines.append("")
|
||||
with open(mbed_config_path, "w") as fp:
|
||||
with open(mbed_config_path, mode="w", encoding="utf8") as fp:
|
||||
fp.write("\n".join(lines))
|
||||
|
||||
|
||||
@ -708,14 +795,34 @@ class PlatformIOLibBuilder(LibBuilderBase):
|
||||
def include_dir(self):
|
||||
if "includeDir" in self._manifest.get("build", {}):
|
||||
with fs.cd(self.path):
|
||||
return os.path.realpath(self._manifest.get("build").get("includeDir"))
|
||||
return os.path.abspath(self._manifest.get("build").get("includeDir"))
|
||||
return LibBuilderBase.include_dir.fget(self) # pylint: disable=no-member
|
||||
|
||||
def get_include_dirs(self):
|
||||
include_dirs = super().get_include_dirs()
|
||||
|
||||
# backwards compatibility with PlatformIO 2.0
|
||||
if (
|
||||
"build" not in self._manifest
|
||||
and self._has_arduino_manifest()
|
||||
and not os.path.isdir(os.path.join(self.path, "src"))
|
||||
and os.path.isdir(os.path.join(self.path, "utility"))
|
||||
):
|
||||
include_dirs.append(os.path.join(self.path, "utility"))
|
||||
|
||||
for path in self.env.get("CPPPATH", []):
|
||||
if path not in include_dirs and path not in self.envorigin.get(
|
||||
"CPPPATH", []
|
||||
):
|
||||
include_dirs.append(self.env.subst(path))
|
||||
|
||||
return include_dirs
|
||||
|
||||
@property
|
||||
def src_dir(self):
|
||||
if "srcDir" in self._manifest.get("build", {}):
|
||||
with fs.cd(self.path):
|
||||
return os.path.realpath(self._manifest.get("build").get("srcDir"))
|
||||
return os.path.abspath(self._manifest.get("build").get("srcDir"))
|
||||
return LibBuilderBase.src_dir.fget(self) # pylint: disable=no-member
|
||||
|
||||
@property
|
||||
@ -781,36 +888,33 @@ class PlatformIOLibBuilder(LibBuilderBase):
|
||||
)
|
||||
|
||||
def is_platforms_compatible(self, platforms):
|
||||
return util.items_in_list(platforms, self._manifest.get("platforms") or ["*"])
|
||||
return PackageCompatibility(platforms=platforms).is_compatible(
|
||||
PackageCompatibility(platforms=self._manifest.get("platforms"))
|
||||
)
|
||||
|
||||
def is_frameworks_compatible(self, frameworks):
|
||||
return util.items_in_list(frameworks, self._manifest.get("frameworks") or ["*"])
|
||||
|
||||
def get_include_dirs(self):
|
||||
include_dirs = LibBuilderBase.get_include_dirs(self)
|
||||
|
||||
# backwards compatibility with PlatformIO 2.0
|
||||
if (
|
||||
"build" not in self._manifest
|
||||
and self._has_arduino_manifest()
|
||||
and not os.path.isdir(os.path.join(self.path, "src"))
|
||||
and os.path.isdir(os.path.join(self.path, "utility"))
|
||||
):
|
||||
include_dirs.append(os.path.join(self.path, "utility"))
|
||||
|
||||
for path in self.env.get("CPPPATH", []):
|
||||
if path not in self.envorigin.get("CPPPATH", []):
|
||||
include_dirs.append(self.env.subst(path))
|
||||
|
||||
return include_dirs
|
||||
return PackageCompatibility(frameworks=frameworks).is_compatible(
|
||||
PackageCompatibility(frameworks=self._manifest.get("frameworks"))
|
||||
)
|
||||
|
||||
|
||||
class ProjectAsLibBuilder(LibBuilderBase):
|
||||
def __init__(self, env, *args, **kwargs):
|
||||
export_projenv = kwargs.get("export_projenv", True)
|
||||
if "export_projenv" in kwargs:
|
||||
del kwargs["export_projenv"]
|
||||
# backup original value, will be reset in base.__init__
|
||||
project_src_filter = env.get("SRC_FILTER")
|
||||
super(ProjectAsLibBuilder, self).__init__(env, *args, **kwargs)
|
||||
super().__init__(env, *args, **kwargs)
|
||||
self.env["SRC_FILTER"] = project_src_filter
|
||||
if export_projenv:
|
||||
env.Export(dict(projenv=self.env))
|
||||
|
||||
def __contains__(self, child_path):
|
||||
for root_path in (self.include_dir, self.src_dir, self.test_dir):
|
||||
if root_path and self.is_common_builder(root_path, child_path):
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def include_dir(self):
|
||||
@ -821,21 +925,18 @@ class ProjectAsLibBuilder(LibBuilderBase):
|
||||
def src_dir(self):
|
||||
return self.env.subst("$PROJECT_SRC_DIR")
|
||||
|
||||
def get_include_dirs(self):
|
||||
include_dirs = []
|
||||
project_include_dir = self.env.subst("$PROJECT_INCLUDE_DIR")
|
||||
if os.path.isdir(project_include_dir):
|
||||
include_dirs.append(project_include_dir)
|
||||
for include_dir in LibBuilderBase.get_include_dirs(self):
|
||||
if include_dir not in include_dirs:
|
||||
include_dirs.append(include_dir)
|
||||
return include_dirs
|
||||
@property
|
||||
def test_dir(self):
|
||||
return self.env.subst("$PROJECT_TEST_DIR")
|
||||
|
||||
def get_search_files(self):
|
||||
items = []
|
||||
build_type = self.env["BUILD_TYPE"]
|
||||
# project files
|
||||
items = LibBuilderBase.get_search_files(self)
|
||||
if "test" not in build_type or self.env.GetProjectOption("test_build_src"):
|
||||
items.extend(super().get_search_files())
|
||||
# test files
|
||||
if "__test" in COMMAND_LINE_TARGETS:
|
||||
if "test" in build_type:
|
||||
items.extend(
|
||||
[
|
||||
os.path.join("$PROJECT_TEST_DIR", item)
|
||||
@ -859,13 +960,19 @@ class ProjectAsLibBuilder(LibBuilderBase):
|
||||
# pylint: disable=no-member
|
||||
return self.env.get("SRC_FILTER") or LibBuilderBase.src_filter.fget(self)
|
||||
|
||||
@property
|
||||
def build_flags(self):
|
||||
# pylint: disable=no-member
|
||||
return self.env.get("SRC_BUILD_FLAGS") or LibBuilderBase.build_flags.fget(self)
|
||||
|
||||
@property
|
||||
def dependencies(self):
|
||||
return self.env.GetProjectOption("lib_deps", [])
|
||||
|
||||
def process_extra_options(self):
|
||||
# skip for project, options are already processed
|
||||
pass
|
||||
with fs.cd(self.path):
|
||||
self.env.ProcessFlags(self.build_flags)
|
||||
self.env.ProcessUnFlags(self.build_unflags)
|
||||
|
||||
def install_dependencies(self):
|
||||
def _is_builtin(spec):
|
||||
@ -897,14 +1004,19 @@ class ProjectAsLibBuilder(LibBuilderBase):
|
||||
try:
|
||||
lm.install(spec)
|
||||
did_install = True
|
||||
except (UnknownPackageError, InternetIsOffline) as e:
|
||||
click.secho("Warning! %s" % e, fg="yellow")
|
||||
except (
|
||||
HttpClientApiError,
|
||||
UnknownPackageError,
|
||||
InternetConnectionError,
|
||||
) as exc:
|
||||
click.secho("Warning! %s" % exc, fg="yellow")
|
||||
|
||||
# reset cache
|
||||
if did_install:
|
||||
DefaultEnvironment().Replace(__PIO_LIB_BUILDERS=None)
|
||||
|
||||
def process_dependencies(self): # pylint: disable=too-many-branches
|
||||
found_lbs = []
|
||||
for spec in self.dependencies:
|
||||
found = False
|
||||
for storage_dir in self.env.GetLibSourceDirs():
|
||||
@ -918,7 +1030,8 @@ class ProjectAsLibBuilder(LibBuilderBase):
|
||||
if pkg.path != lb.path:
|
||||
continue
|
||||
if lb not in self.depbuilders:
|
||||
self.depend_recursive(lb)
|
||||
self.depend_on(lb, recursive=False)
|
||||
found_lbs.append(lb)
|
||||
found = True
|
||||
break
|
||||
if found:
|
||||
@ -930,13 +1043,17 @@ class ProjectAsLibBuilder(LibBuilderBase):
|
||||
if lb.name != spec:
|
||||
continue
|
||||
if lb not in self.depbuilders:
|
||||
self.depend_recursive(lb)
|
||||
self.depend_on(lb)
|
||||
found = True
|
||||
break
|
||||
|
||||
# process library dependencies
|
||||
for lb in found_lbs:
|
||||
lb.search_deps_recursive()
|
||||
|
||||
def build(self):
|
||||
self._is_built = True # do not build Project now
|
||||
result = LibBuilderBase.build(self)
|
||||
self.is_built = True # do not build Project now
|
||||
result = super().build()
|
||||
self.env.PrependUnique(CPPPATH=self.get_include_dirs())
|
||||
return result
|
||||
|
||||
@ -961,7 +1078,7 @@ def IsCompatibleLibBuilder(env, lb, verbose=int(ARGUMENTS.get("PIOVERBOSE", 0)))
|
||||
sys.stderr.write("Platform incompatible library %s\n" % lb.path)
|
||||
return False
|
||||
if compat_mode in ("soft", "strict") and not lb.is_frameworks_compatible(
|
||||
env.get("PIOFRAMEWORK", [])
|
||||
env.get("PIOFRAMEWORK", "__noframework__")
|
||||
):
|
||||
if verbose:
|
||||
sys.stderr.write("Framework incompatible library %s\n" % lb.path)
|
||||
@ -969,25 +1086,30 @@ def IsCompatibleLibBuilder(env, lb, verbose=int(ARGUMENTS.get("PIOVERBOSE", 0)))
|
||||
return True
|
||||
|
||||
|
||||
def GetLibBuilders(env): # pylint: disable=too-many-branches
|
||||
if DefaultEnvironment().get("__PIO_LIB_BUILDERS", None) is not None:
|
||||
def GetLibBuilders(_): # pylint: disable=too-many-branches
|
||||
env = DefaultEnvironment()
|
||||
if env.get("__PIO_LIB_BUILDERS", None) is not None:
|
||||
return sorted(
|
||||
DefaultEnvironment()["__PIO_LIB_BUILDERS"],
|
||||
key=lambda lb: 0 if lb.dependent else 1,
|
||||
env["__PIO_LIB_BUILDERS"],
|
||||
key=lambda lb: 0 if lb.is_dependent else 1,
|
||||
)
|
||||
|
||||
DefaultEnvironment().Replace(__PIO_LIB_BUILDERS=[])
|
||||
env.Replace(__PIO_LIB_BUILDERS=[])
|
||||
|
||||
verbose = int(ARGUMENTS.get("PIOVERBOSE", 0))
|
||||
found_incompat = False
|
||||
|
||||
for storage_dir in env.GetLibSourceDirs():
|
||||
storage_dir = os.path.realpath(storage_dir)
|
||||
storage_dir = os.path.abspath(storage_dir)
|
||||
if not os.path.isdir(storage_dir):
|
||||
continue
|
||||
for item in sorted(os.listdir(storage_dir)):
|
||||
lib_dir = os.path.join(storage_dir, item)
|
||||
if item == "__cores__" or not os.path.isdir(lib_dir):
|
||||
if item == "__cores__":
|
||||
continue
|
||||
if LibraryPackageManager.is_symlink(lib_dir):
|
||||
lib_dir, _ = LibraryPackageManager.resolve_symlink(lib_dir)
|
||||
if not lib_dir or not os.path.isdir(lib_dir):
|
||||
continue
|
||||
try:
|
||||
lb = LibBuilderFactory.new(env, lib_dir)
|
||||
@ -998,13 +1120,13 @@ def GetLibBuilders(env): # pylint: disable=too-many-branches
|
||||
)
|
||||
continue
|
||||
if env.IsCompatibleLibBuilder(lb):
|
||||
DefaultEnvironment().Append(__PIO_LIB_BUILDERS=[lb])
|
||||
env.Append(__PIO_LIB_BUILDERS=[lb])
|
||||
else:
|
||||
found_incompat = True
|
||||
|
||||
for lb in env.get("EXTRA_LIB_BUILDERS", []):
|
||||
if env.IsCompatibleLibBuilder(lb):
|
||||
DefaultEnvironment().Append(__PIO_LIB_BUILDERS=[lb])
|
||||
env.Append(__PIO_LIB_BUILDERS=[lb])
|
||||
else:
|
||||
found_incompat = True
|
||||
|
||||
@ -1015,13 +1137,25 @@ def GetLibBuilders(env): # pylint: disable=too-many-branches
|
||||
"ldf-compat-mode\n"
|
||||
)
|
||||
|
||||
return DefaultEnvironment()["__PIO_LIB_BUILDERS"]
|
||||
return env["__PIO_LIB_BUILDERS"]
|
||||
|
||||
|
||||
def ConfigureProjectLibBuilder(env):
|
||||
_pm_storage = {}
|
||||
|
||||
def _get_lib_license(pkg):
|
||||
storage_dir = os.path.dirname(os.path.dirname(pkg.path))
|
||||
if storage_dir not in _pm_storage:
|
||||
_pm_storage[storage_dir] = LibraryPackageManager(storage_dir)
|
||||
try:
|
||||
return (_pm_storage[storage_dir].load_manifest(pkg) or {}).get("license")
|
||||
except MissingPackageManifestError:
|
||||
pass
|
||||
return None
|
||||
|
||||
def _correct_found_libs(lib_builders):
|
||||
# build full dependency graph
|
||||
found_lbs = [lb for lb in lib_builders if lb.dependent]
|
||||
found_lbs = [lb for lb in lib_builders if lb.is_dependent]
|
||||
for lb in lib_builders:
|
||||
if lb in found_lbs:
|
||||
lb.search_deps_recursive(lb.get_search_files())
|
||||
@ -1033,27 +1167,33 @@ def ConfigureProjectLibBuilder(env):
|
||||
def _print_deps_tree(root, level=0):
|
||||
margin = "| " * (level)
|
||||
for lb in root.depbuilders:
|
||||
title = "<%s>" % lb.name
|
||||
title = lb.name
|
||||
pkg = PackageItem(lb.path)
|
||||
if pkg.metadata:
|
||||
title += " %s" % pkg.metadata.version
|
||||
title += " @ %s" % pkg.metadata.version
|
||||
elif lb.version:
|
||||
title += " %s" % lb.version
|
||||
title += " @ %s" % lb.version
|
||||
click.echo("%s|-- %s" % (margin, title), nl=False)
|
||||
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
click.echo(
|
||||
" (License: %s, " % (_get_lib_license(pkg) or "Unknown"), nl=False
|
||||
)
|
||||
if pkg.metadata and pkg.metadata.spec.external:
|
||||
click.echo(" [%s]" % pkg.metadata.spec.url, nl=False)
|
||||
click.echo(" (", nl=False)
|
||||
click.echo(lb.path, nl=False)
|
||||
click.echo("URI: %s, " % pkg.metadata.spec.uri, nl=False)
|
||||
click.echo("Path: %s" % lb.path, nl=False)
|
||||
click.echo(")", nl=False)
|
||||
click.echo("")
|
||||
if lb.depbuilders:
|
||||
if lb.verbose and lb.depbuilders:
|
||||
_print_deps_tree(lb, level + 1)
|
||||
|
||||
project = ProjectAsLibBuilder(env, "$PROJECT_DIR")
|
||||
|
||||
if "test" in env["BUILD_TYPE"]:
|
||||
project.env.ConfigureTestTarget()
|
||||
|
||||
ldf_mode = LibBuilderBase.lib_ldf_mode.fget(project) # pylint: disable=no-member
|
||||
|
||||
click.echo("LDF: Library Dependency Finder -> http://bit.ly/configure-pio-ldf")
|
||||
click.echo("LDF: Library Dependency Finder -> https://bit.ly/configure-pio-ldf")
|
||||
click.echo(
|
||||
"LDF Modes: Finder ~ %s, Compatibility ~ %s"
|
||||
% (ldf_mode, project.lib_compat_mode)
|
||||
|
@ -12,17 +12,31 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
import hashlib
|
||||
import os
|
||||
import re
|
||||
|
||||
from hashlib import md5
|
||||
from os import makedirs
|
||||
from os.path import isdir, isfile, join
|
||||
from SCons.Platform import TempFileMunge # pylint: disable=import-error
|
||||
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
|
||||
from SCons.Subst import quote_spaces # pylint: disable=import-error
|
||||
|
||||
from platformio.compat import WINDOWS, hashlib_encode_data
|
||||
from platformio.compat import IS_WINDOWS, hashlib_encode_data
|
||||
|
||||
# Windows CLI has limit with command length to 8192
|
||||
# Leave 2000 chars for flags and other options
|
||||
MAX_LINE_LENGTH = 6000 if WINDOWS else 128072
|
||||
# There are the next limits depending on a platform:
|
||||
# - Windows = 8191
|
||||
# - Unix = 131072
|
||||
# We need ~512 characters for compiler and temporary file paths
|
||||
MAX_LINE_LENGTH = (8191 if IS_WINDOWS else 131072) - 512
|
||||
|
||||
WINPATHSEP_RE = re.compile(r"\\([^\"'\\]|$)")
|
||||
|
||||
|
||||
def tempfile_arg_esc_func(arg):
|
||||
arg = quote_spaces(arg)
|
||||
if not IS_WINDOWS:
|
||||
return arg
|
||||
# GCC requires double Windows slashes, let's use UNIX separator
|
||||
return WINPATHSEP_RE.sub(r"/\1", arg)
|
||||
|
||||
|
||||
def long_sources_hook(env, sources):
|
||||
@ -41,52 +55,42 @@ def long_sources_hook(env, sources):
|
||||
return '@"%s"' % _file_long_data(env, " ".join(data))
|
||||
|
||||
|
||||
def long_incflags_hook(env, incflags):
|
||||
_incflags = env.subst(incflags).replace("\\", "/")
|
||||
if len(_incflags) < MAX_LINE_LENGTH:
|
||||
return incflags
|
||||
|
||||
# fix space in paths
|
||||
data = []
|
||||
for line in _incflags.split(" -I"):
|
||||
line = line.strip()
|
||||
if not line.startswith("-I"):
|
||||
line = "-I" + line
|
||||
data.append('-I"%s"' % line[2:])
|
||||
|
||||
return '@"%s"' % _file_long_data(env, " ".join(data))
|
||||
|
||||
|
||||
def _file_long_data(env, data):
|
||||
build_dir = env.subst("$BUILD_DIR")
|
||||
if not isdir(build_dir):
|
||||
makedirs(build_dir)
|
||||
tmp_file = join(
|
||||
build_dir, "longcmd-%s" % md5(hashlib_encode_data(data)).hexdigest()
|
||||
if not os.path.isdir(build_dir):
|
||||
os.makedirs(build_dir)
|
||||
tmp_file = os.path.join(
|
||||
build_dir, "longcmd-%s" % hashlib.md5(hashlib_encode_data(data)).hexdigest()
|
||||
)
|
||||
if isfile(tmp_file):
|
||||
if os.path.isfile(tmp_file):
|
||||
return tmp_file
|
||||
with open(tmp_file, "w") as fp:
|
||||
with open(tmp_file, mode="w", encoding="utf8") as fp:
|
||||
fp.write(data)
|
||||
return tmp_file
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
||||
def exists(env):
|
||||
return "compiledb" not in COMMAND_LINE_TARGETS and not env.IsIntegrationDump()
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.Replace(_long_sources_hook=long_sources_hook)
|
||||
env.Replace(_long_incflags_hook=long_incflags_hook)
|
||||
coms = {}
|
||||
for key in ("ARCOM", "LINKCOM"):
|
||||
coms[key] = env.get(key, "").replace(
|
||||
"$SOURCES", "${_long_sources_hook(__env__, SOURCES)}"
|
||||
)
|
||||
for key in ("_CCCOMCOM", "ASPPCOM"):
|
||||
coms[key] = env.get(key, "").replace(
|
||||
"$_CPPINCFLAGS", "${_long_incflags_hook(__env__, _CPPINCFLAGS)}"
|
||||
)
|
||||
env.Replace(**coms)
|
||||
if not exists(env):
|
||||
return env
|
||||
kwargs = dict(
|
||||
_long_sources_hook=long_sources_hook,
|
||||
TEMPFILE=TempFileMunge,
|
||||
MAXLINELENGTH=MAX_LINE_LENGTH,
|
||||
TEMPFILEARGESCFUNC=tempfile_arg_esc_func,
|
||||
TEMPFILESUFFIX=".tmp",
|
||||
TEMPFILEDIR="$BUILD_DIR",
|
||||
)
|
||||
|
||||
for name in ("LINKCOM", "ASCOM", "ASPPCOM", "CCCOM", "CXXCOM"):
|
||||
kwargs[name] = "${TEMPFILE('%s','$%sSTR')}" % (env.get(name), name)
|
||||
|
||||
kwargs["ARCOM"] = env.get("ARCOM", "").replace(
|
||||
"$SOURCES", "${_long_sources_hook(__env__, SOURCES)}"
|
||||
)
|
||||
env.Replace(**kwargs)
|
||||
|
||||
return env
|
||||
|
@ -14,35 +14,33 @@
|
||||
|
||||
# pylint: disable=too-many-locals
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import sys
|
||||
from os import environ, makedirs, remove
|
||||
from os.path import isdir, join, splitdrive
|
||||
import time
|
||||
|
||||
from elftools.elf.descriptions import describe_sh_flags
|
||||
from elftools.elf.elffile import ELFFile
|
||||
|
||||
from platformio.compat import dump_json_to_unicode
|
||||
from platformio.compat import IS_WINDOWS
|
||||
from platformio.proc import exec_command
|
||||
from platformio.util import get_systype
|
||||
from platformio.project.memusage import save_report
|
||||
|
||||
|
||||
def _run_tool(cmd, env, tool_args):
|
||||
sysenv = environ.copy()
|
||||
sysenv = os.environ.copy()
|
||||
sysenv["PATH"] = str(env["ENV"]["PATH"])
|
||||
|
||||
build_dir = env.subst("$BUILD_DIR")
|
||||
if not isdir(build_dir):
|
||||
makedirs(build_dir)
|
||||
tmp_file = join(build_dir, "size-data-longcmd.txt")
|
||||
if not os.path.isdir(build_dir):
|
||||
os.makedirs(build_dir)
|
||||
tmp_file = os.path.join(build_dir, "size-data-longcmd.txt")
|
||||
|
||||
with open(tmp_file, "w") as fp:
|
||||
with open(tmp_file, mode="w", encoding="utf8") as fp:
|
||||
fp.write("\n".join(tool_args))
|
||||
|
||||
cmd.append("@" + tmp_file)
|
||||
result = exec_command(cmd, env=sysenv)
|
||||
remove(tmp_file)
|
||||
os.remove(tmp_file)
|
||||
|
||||
return result
|
||||
|
||||
@ -55,7 +53,7 @@ def _get_symbol_locations(env, elf_path, addrs):
|
||||
locations = [line for line in result["out"].split("\n") if line]
|
||||
assert len(addrs) == len(locations)
|
||||
|
||||
return dict(zip(addrs, [l.strip() for l in locations]))
|
||||
return dict(zip(addrs, [loc.strip() for loc in locations]))
|
||||
|
||||
|
||||
def _get_demangled_names(env, mangled_names):
|
||||
@ -75,31 +73,7 @@ def _get_demangled_names(env, mangled_names):
|
||||
)
|
||||
|
||||
|
||||
def _determine_section(sections, symbol_addr):
|
||||
for section, info in sections.items():
|
||||
if not _is_flash_section(info) and not _is_ram_section(info):
|
||||
continue
|
||||
if symbol_addr in range(info["start_addr"], info["start_addr"] + info["size"]):
|
||||
return section
|
||||
return "unknown"
|
||||
|
||||
|
||||
def _is_ram_section(section):
|
||||
return (
|
||||
section.get("type", "") in ("SHT_NOBITS", "SHT_PROGBITS")
|
||||
and section.get("flags", "") == "WA"
|
||||
)
|
||||
|
||||
|
||||
def _is_flash_section(section):
|
||||
return section.get("type", "") == "SHT_PROGBITS" and "A" in section.get("flags", "")
|
||||
|
||||
|
||||
def _is_valid_symbol(symbol_name, symbol_type, symbol_address):
|
||||
return symbol_name and symbol_address != 0 and symbol_type != "STT_NOTYPE"
|
||||
|
||||
|
||||
def _collect_sections_info(elffile):
|
||||
def _collect_sections_info(env, elffile):
|
||||
sections = {}
|
||||
for section in elffile.iter_sections():
|
||||
if section.is_null() or section.name.startswith(".debug"):
|
||||
@ -109,13 +83,18 @@ def _collect_sections_info(elffile):
|
||||
section_flags = describe_sh_flags(section["sh_flags"])
|
||||
section_size = section.data_size
|
||||
|
||||
sections[section.name] = {
|
||||
section_data = {
|
||||
"name": section.name,
|
||||
"size": section_size,
|
||||
"start_addr": section["sh_addr"],
|
||||
"type": section_type,
|
||||
"flags": section_flags,
|
||||
}
|
||||
|
||||
sections[section.name] = section_data
|
||||
sections[section.name]["in_flash"] = env.memusageIsFlashSection(section_data)
|
||||
sections[section.name]["in_ram"] = env.memusageIsRamSection(section_data)
|
||||
|
||||
return sections
|
||||
|
||||
|
||||
@ -127,7 +106,7 @@ def _collect_symbols_info(env, elffile, elf_path, sections):
|
||||
sys.stderr.write("Couldn't find symbol table. Is ELF file stripped?")
|
||||
env.Exit(1)
|
||||
|
||||
sysenv = environ.copy()
|
||||
sysenv = os.environ.copy()
|
||||
sysenv["PATH"] = str(env["ENV"]["PATH"])
|
||||
|
||||
symbol_addrs = []
|
||||
@ -138,7 +117,7 @@ def _collect_symbols_info(env, elffile, elf_path, sections):
|
||||
symbol_size = s["st_size"]
|
||||
symbol_type = symbol_info["type"]
|
||||
|
||||
if not _is_valid_symbol(s.name, symbol_type, symbol_addr):
|
||||
if not env.memusageIsValidSymbol(s.name, symbol_type, symbol_addr):
|
||||
continue
|
||||
|
||||
symbol = {
|
||||
@ -147,7 +126,7 @@ def _collect_symbols_info(env, elffile, elf_path, sections):
|
||||
"name": s.name,
|
||||
"type": symbol_type,
|
||||
"size": symbol_size,
|
||||
"section": _determine_section(sections, symbol_addr),
|
||||
"section": env.memusageDetermineSection(sections, symbol_addr),
|
||||
}
|
||||
|
||||
if s.name.startswith("_Z"):
|
||||
@ -164,9 +143,9 @@ def _collect_symbols_info(env, elffile, elf_path, sections):
|
||||
location = symbol_locations.get(hex(symbol["addr"]))
|
||||
if not location or "?" in location:
|
||||
continue
|
||||
if "windows" in get_systype():
|
||||
drive, tail = splitdrive(location)
|
||||
location = join(drive.upper(), tail)
|
||||
if IS_WINDOWS:
|
||||
drive, tail = os.path.splitdrive(location)
|
||||
location = os.path.join(drive.upper(), tail)
|
||||
symbol["file"] = location
|
||||
symbol["line"] = 0
|
||||
if ":" in location:
|
||||
@ -177,31 +156,57 @@ def _collect_symbols_info(env, elffile, elf_path, sections):
|
||||
return symbols
|
||||
|
||||
|
||||
def _calculate_firmware_size(sections):
|
||||
def memusageDetermineSection(_, sections, symbol_addr):
|
||||
for section, info in sections.items():
|
||||
if not info.get("in_flash", False) and not info.get("in_ram", False):
|
||||
continue
|
||||
if symbol_addr in range(info["start_addr"], info["start_addr"] + info["size"]):
|
||||
return section
|
||||
return "unknown"
|
||||
|
||||
|
||||
def memusageIsValidSymbol(_, symbol_name, symbol_type, symbol_address):
|
||||
return symbol_name and symbol_address != 0 and symbol_type != "STT_NOTYPE"
|
||||
|
||||
|
||||
def memusageIsRamSection(_, section):
|
||||
return (
|
||||
section.get("type", "") in ("SHT_NOBITS", "SHT_PROGBITS")
|
||||
and section.get("flags", "") == "WA"
|
||||
)
|
||||
|
||||
|
||||
def memusageIsFlashSection(_, section):
|
||||
return section.get("type", "") == "SHT_PROGBITS" and "A" in section.get("flags", "")
|
||||
|
||||
|
||||
def memusageCalculateFirmwareSize(_, sections):
|
||||
flash_size = ram_size = 0
|
||||
for section_info in sections.values():
|
||||
if _is_flash_section(section_info):
|
||||
if section_info.get("in_flash", False):
|
||||
flash_size += section_info.get("size", 0)
|
||||
if _is_ram_section(section_info):
|
||||
if section_info.get("in_ram", False):
|
||||
ram_size += section_info.get("size", 0)
|
||||
|
||||
return ram_size, flash_size
|
||||
|
||||
|
||||
def DumpSizeData(_, target, source, env): # pylint: disable=unused-argument
|
||||
data = {"device": {}, "memory": {}, "version": 1}
|
||||
def DumpMemoryUsage(_, target, source, env): # pylint: disable=unused-argument
|
||||
result = {"version": 1, "timestamp": int(time.time()), "device": {}, "memory": {}}
|
||||
|
||||
board = env.BoardConfig()
|
||||
if board:
|
||||
data["device"] = {
|
||||
result["device"] = {
|
||||
"mcu": board.get("build.mcu", ""),
|
||||
"cpu": board.get("build.cpu", ""),
|
||||
"frequency": board.get("build.f_cpu"),
|
||||
"flash": int(board.get("upload.maximum_size", 0)),
|
||||
"ram": int(board.get("upload.maximum_ram_size", 0)),
|
||||
}
|
||||
if data["device"]["frequency"] and data["device"]["frequency"].endswith("L"):
|
||||
data["device"]["frequency"] = int(data["device"]["frequency"][0:-1])
|
||||
if result["device"]["frequency"] and result["device"]["frequency"].endswith(
|
||||
"L"
|
||||
):
|
||||
result["device"]["frequency"] = int(result["device"]["frequency"][0:-1])
|
||||
|
||||
elf_path = env.subst("$PIOMAINPROG")
|
||||
|
||||
@ -212,37 +217,41 @@ def DumpSizeData(_, target, source, env): # pylint: disable=unused-argument
|
||||
sys.stderr.write("Elf file doesn't contain DWARF information")
|
||||
env.Exit(1)
|
||||
|
||||
sections = _collect_sections_info(elffile)
|
||||
firmware_ram, firmware_flash = _calculate_firmware_size(sections)
|
||||
data["memory"]["total"] = {
|
||||
sections = _collect_sections_info(env, elffile)
|
||||
firmware_ram, firmware_flash = env.memusageCalculateFirmwareSize(sections)
|
||||
result["memory"]["total"] = {
|
||||
"ram_size": firmware_ram,
|
||||
"flash_size": firmware_flash,
|
||||
"sections": sections,
|
||||
}
|
||||
result["memory"]["sections"] = sections
|
||||
|
||||
files = dict()
|
||||
files = {}
|
||||
for symbol in _collect_symbols_info(env, elffile, elf_path, sections):
|
||||
file_path = symbol.get("file") or "unknown"
|
||||
file_path = symbol.pop("file", "unknown")
|
||||
if not files.get(file_path, {}):
|
||||
files[file_path] = {"symbols": [], "ram_size": 0, "flash_size": 0}
|
||||
|
||||
symbol_size = symbol.get("size", 0)
|
||||
section = sections.get(symbol.get("section", ""), {})
|
||||
if _is_ram_section(section):
|
||||
if not section:
|
||||
continue
|
||||
if section.get("in_ram", False):
|
||||
files[file_path]["ram_size"] += symbol_size
|
||||
if _is_flash_section(section):
|
||||
if section.get("in_flash", False):
|
||||
files[file_path]["flash_size"] += symbol_size
|
||||
|
||||
files[file_path]["symbols"].append(symbol)
|
||||
|
||||
data["memory"]["files"] = list()
|
||||
result["memory"]["files"] = []
|
||||
for k, v in files.items():
|
||||
file_data = {"path": k}
|
||||
file_data.update(v)
|
||||
data["memory"]["files"].append(file_data)
|
||||
result["memory"]["files"].append(file_data)
|
||||
|
||||
with open(join(env.subst("$BUILD_DIR"), "sizedata.json"), "w") as fp:
|
||||
fp.write(dump_json_to_unicode(data))
|
||||
print(
|
||||
"Memory usage report has been saved to the following location: "
|
||||
f"\"{save_report(os.getcwd(), env['PIOENV'], result)}\""
|
||||
)
|
||||
|
||||
|
||||
def exists(_):
|
||||
@ -250,5 +259,10 @@ def exists(_):
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(DumpSizeData)
|
||||
env.AddMethod(memusageIsRamSection)
|
||||
env.AddMethod(memusageIsFlashSection)
|
||||
env.AddMethod(memusageCalculateFirmwareSize)
|
||||
env.AddMethod(memusageDetermineSection)
|
||||
env.AddMethod(memusageIsValidSymbol)
|
||||
env.AddMethod(DumpMemoryUsage)
|
||||
return env
|
@ -12,245 +12,15 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import atexit
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from tempfile import mkstemp
|
||||
|
||||
import click
|
||||
|
||||
from platformio import fs, util
|
||||
from platformio.compat import get_filesystem_encoding, get_locale_encoding, glob_escape
|
||||
from platformio.package.manager.core import get_core_package_dir
|
||||
from platformio.proc import exec_command
|
||||
|
||||
|
||||
class InoToCPPConverter(object):
|
||||
|
||||
PROTOTYPE_RE = re.compile(
|
||||
r"""^(
|
||||
(?:template\<.*\>\s*)? # template
|
||||
([a-z_\d\&]+\*?\s+){1,2} # return type
|
||||
([a-z_\d]+\s*) # name of prototype
|
||||
\([a-z_,\.\*\&\[\]\s\d]*\) # arguments
|
||||
)\s*(\{|;) # must end with `{` or `;`
|
||||
""",
|
||||
re.X | re.M | re.I,
|
||||
)
|
||||
DETECTMAIN_RE = re.compile(r"void\s+(setup|loop)\s*\(", re.M | re.I)
|
||||
PROTOPTRS_TPLRE = r"\([^&\(]*&(%s)[^\)]*\)"
|
||||
|
||||
def __init__(self, env):
|
||||
self.env = env
|
||||
self._main_ino = None
|
||||
self._safe_encoding = None
|
||||
|
||||
def read_safe_contents(self, path):
|
||||
error_reported = False
|
||||
for encoding in (
|
||||
"utf-8",
|
||||
None,
|
||||
get_filesystem_encoding(),
|
||||
get_locale_encoding(),
|
||||
"latin-1",
|
||||
):
|
||||
try:
|
||||
with io.open(path, encoding=encoding) as fp:
|
||||
contents = fp.read()
|
||||
self._safe_encoding = encoding
|
||||
return contents
|
||||
except UnicodeDecodeError:
|
||||
if not error_reported:
|
||||
error_reported = True
|
||||
click.secho(
|
||||
"Unicode decode error has occurred, please remove invalid "
|
||||
"(non-ASCII or non-UTF8) characters from %s file or convert it to UTF-8"
|
||||
% path,
|
||||
fg="yellow",
|
||||
err=True,
|
||||
)
|
||||
return ""
|
||||
|
||||
def write_safe_contents(self, path, contents):
|
||||
with io.open(
|
||||
path, "w", encoding=self._safe_encoding, errors="backslashreplace"
|
||||
) as fp:
|
||||
return fp.write(contents)
|
||||
|
||||
def is_main_node(self, contents):
|
||||
return self.DETECTMAIN_RE.search(contents)
|
||||
|
||||
def convert(self, nodes):
|
||||
contents = self.merge(nodes)
|
||||
if not contents:
|
||||
return None
|
||||
return self.process(contents)
|
||||
|
||||
def merge(self, nodes):
|
||||
assert nodes
|
||||
lines = []
|
||||
for node in nodes:
|
||||
contents = self.read_safe_contents(node.get_path())
|
||||
_lines = ['# 1 "%s"' % node.get_path().replace("\\", "/"), contents]
|
||||
if self.is_main_node(contents):
|
||||
lines = _lines + lines
|
||||
self._main_ino = node.get_path()
|
||||
else:
|
||||
lines.extend(_lines)
|
||||
|
||||
if not self._main_ino:
|
||||
self._main_ino = nodes[0].get_path()
|
||||
|
||||
return "\n".join(["#include <Arduino.h>"] + lines) if lines else None
|
||||
|
||||
def process(self, contents):
|
||||
out_file = self._main_ino + ".cpp"
|
||||
assert self._gcc_preprocess(contents, out_file)
|
||||
contents = self.read_safe_contents(out_file)
|
||||
contents = self._join_multiline_strings(contents)
|
||||
self.write_safe_contents(out_file, self.append_prototypes(contents))
|
||||
return out_file
|
||||
|
||||
def _gcc_preprocess(self, contents, out_file):
|
||||
tmp_path = mkstemp()[1]
|
||||
self.write_safe_contents(tmp_path, contents)
|
||||
self.env.Execute(
|
||||
self.env.VerboseAction(
|
||||
'$CXX -o "{0}" -x c++ -fpreprocessed -dD -E "{1}"'.format(
|
||||
out_file, tmp_path
|
||||
),
|
||||
"Converting " + os.path.basename(out_file[:-4]),
|
||||
)
|
||||
)
|
||||
atexit.register(_delete_file, tmp_path)
|
||||
return os.path.isfile(out_file)
|
||||
|
||||
def _join_multiline_strings(self, contents):
|
||||
if "\\\n" not in contents:
|
||||
return contents
|
||||
newlines = []
|
||||
linenum = 0
|
||||
stropen = False
|
||||
for line in contents.split("\n"):
|
||||
_linenum = self._parse_preproc_line_num(line)
|
||||
if _linenum is not None:
|
||||
linenum = _linenum
|
||||
else:
|
||||
linenum += 1
|
||||
|
||||
if line.endswith("\\"):
|
||||
if line.startswith('"'):
|
||||
stropen = True
|
||||
newlines.append(line[:-1])
|
||||
continue
|
||||
if stropen:
|
||||
newlines[len(newlines) - 1] += line[:-1]
|
||||
continue
|
||||
elif stropen and line.endswith(('",', '";')):
|
||||
newlines[len(newlines) - 1] += line
|
||||
stropen = False
|
||||
newlines.append(
|
||||
'#line %d "%s"' % (linenum, self._main_ino.replace("\\", "/"))
|
||||
)
|
||||
continue
|
||||
|
||||
newlines.append(line)
|
||||
|
||||
return "\n".join(newlines)
|
||||
|
||||
@staticmethod
|
||||
def _parse_preproc_line_num(line):
|
||||
if not line.startswith("#"):
|
||||
return None
|
||||
tokens = line.split(" ", 3)
|
||||
if len(tokens) > 2 and tokens[1].isdigit():
|
||||
return int(tokens[1])
|
||||
return None
|
||||
|
||||
def _parse_prototypes(self, contents):
|
||||
prototypes = []
|
||||
reserved_keywords = set(["if", "else", "while"])
|
||||
for match in self.PROTOTYPE_RE.finditer(contents):
|
||||
if (
|
||||
set([match.group(2).strip(), match.group(3).strip()])
|
||||
& reserved_keywords
|
||||
):
|
||||
continue
|
||||
prototypes.append(match)
|
||||
return prototypes
|
||||
|
||||
def _get_total_lines(self, contents):
|
||||
total = 0
|
||||
if contents.endswith("\n"):
|
||||
contents = contents[:-1]
|
||||
for line in contents.split("\n")[::-1]:
|
||||
linenum = self._parse_preproc_line_num(line)
|
||||
if linenum is not None:
|
||||
return total + linenum
|
||||
total += 1
|
||||
return total
|
||||
|
||||
def append_prototypes(self, contents):
|
||||
prototypes = self._parse_prototypes(contents) or []
|
||||
|
||||
# skip already declared prototypes
|
||||
declared = set(m.group(1).strip() for m in prototypes if m.group(4) == ";")
|
||||
prototypes = [m for m in prototypes if m.group(1).strip() not in declared]
|
||||
|
||||
if not prototypes:
|
||||
return contents
|
||||
|
||||
prototype_names = set(m.group(3).strip() for m in prototypes)
|
||||
split_pos = prototypes[0].start()
|
||||
match_ptrs = re.search(
|
||||
self.PROTOPTRS_TPLRE % ("|".join(prototype_names)),
|
||||
contents[:split_pos],
|
||||
re.M,
|
||||
)
|
||||
if match_ptrs:
|
||||
split_pos = contents.rfind("\n", 0, match_ptrs.start()) + 1
|
||||
|
||||
result = []
|
||||
result.append(contents[:split_pos].strip())
|
||||
result.append("%s;" % ";\n".join([m.group(1) for m in prototypes]))
|
||||
result.append(
|
||||
'#line %d "%s"'
|
||||
% (
|
||||
self._get_total_lines(contents[:split_pos]),
|
||||
self._main_ino.replace("\\", "/"),
|
||||
)
|
||||
)
|
||||
result.append(contents[split_pos:].strip())
|
||||
return "\n".join(result)
|
||||
|
||||
|
||||
def ConvertInoToCpp(env):
|
||||
src_dir = glob_escape(env.subst("$PROJECT_SRC_DIR"))
|
||||
ino_nodes = env.Glob(os.path.join(src_dir, "*.ino")) + env.Glob(
|
||||
os.path.join(src_dir, "*.pde")
|
||||
)
|
||||
if not ino_nodes:
|
||||
return
|
||||
c = InoToCPPConverter(env)
|
||||
out_file = c.convert(ino_nodes)
|
||||
|
||||
atexit.register(_delete_file, out_file)
|
||||
|
||||
|
||||
def _delete_file(path):
|
||||
try:
|
||||
if os.path.isfile(path):
|
||||
os.remove(path)
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
|
||||
@util.memoized()
|
||||
def _get_compiler_type(env):
|
||||
def GetCompilerType(env):
|
||||
if env.subst("$CC").endswith("-gcc"):
|
||||
return "gcc"
|
||||
try:
|
||||
@ -269,10 +39,6 @@ def _get_compiler_type(env):
|
||||
return None
|
||||
|
||||
|
||||
def GetCompilerType(env):
|
||||
return _get_compiler_type(env)
|
||||
|
||||
|
||||
def GetActualLDScript(env):
|
||||
def _lookup_in_ldpath(script):
|
||||
for d in env.get("LIBPATH", []):
|
||||
@ -318,7 +84,7 @@ def GetActualLDScript(env):
|
||||
env.Exit(1)
|
||||
|
||||
|
||||
def ConfigureDebugFlags(env):
|
||||
def ConfigureDebugTarget(env):
|
||||
def _cleanup_debug_flags(scope):
|
||||
if scope not in env:
|
||||
return
|
||||
@ -333,30 +99,28 @@ def ConfigureDebugFlags(env):
|
||||
for scope in ("ASFLAGS", "CCFLAGS", "LINKFLAGS"):
|
||||
_cleanup_debug_flags(scope)
|
||||
|
||||
debug_flags = env.ParseFlags(env.GetProjectOption("debug_build_flags"))
|
||||
debug_flags = env.ParseFlags(
|
||||
env.get("PIODEBUGFLAGS")
|
||||
if env.get("PIODEBUGFLAGS")
|
||||
and not env.GetProjectOptions(as_dict=True).get("debug_build_flags")
|
||||
else env.GetProjectOption("debug_build_flags")
|
||||
)
|
||||
|
||||
env.MergeFlags(debug_flags)
|
||||
optimization_flags = [
|
||||
f for f in debug_flags.get("CCFLAGS", []) if f.startswith(("-O", "-g"))
|
||||
]
|
||||
|
||||
if optimization_flags:
|
||||
env.AppendUnique(ASFLAGS=optimization_flags, LINKFLAGS=optimization_flags)
|
||||
|
||||
|
||||
def ConfigureTestTarget(env):
|
||||
env.Append(
|
||||
CPPDEFINES=["UNIT_TEST", "UNITY_INCLUDE_CONFIG_H"],
|
||||
CPPPATH=[os.path.join("$BUILD_DIR", "UnityTestLib")],
|
||||
)
|
||||
unitylib = env.BuildLibrary(
|
||||
os.path.join("$BUILD_DIR", "UnityTestLib"), get_core_package_dir("tool-unity")
|
||||
)
|
||||
env.Prepend(LIBS=[unitylib])
|
||||
|
||||
src_filter = ["+<*.cpp>", "+<*.c>"]
|
||||
if "PIOTEST_RUNNING_NAME" in env:
|
||||
src_filter.append("+<%s%s>" % (env["PIOTEST_RUNNING_NAME"], os.path.sep))
|
||||
env.Replace(PIOTEST_SRC_FILTER=src_filter)
|
||||
env.AppendUnique(
|
||||
ASFLAGS=[
|
||||
# skip -O flags for assembler
|
||||
f
|
||||
for f in optimization_flags
|
||||
if f.startswith("-g")
|
||||
],
|
||||
LINKFLAGS=optimization_flags,
|
||||
)
|
||||
|
||||
|
||||
def GetExtraScripts(env, scope):
|
||||
@ -369,18 +133,17 @@ def GetExtraScripts(env, scope):
|
||||
if not items:
|
||||
return items
|
||||
with fs.cd(env.subst("$PROJECT_DIR")):
|
||||
return [os.path.realpath(item) for item in items]
|
||||
return [os.path.abspath(env.subst(item)) for item in items]
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(GetCompilerType)
|
||||
env.AddMethod(GetActualLDScript)
|
||||
env.AddMethod(ConfigureDebugTarget)
|
||||
env.AddMethod(GetExtraScripts)
|
||||
# bakward-compatibility with Zephyr build script
|
||||
env.AddMethod(ConfigureDebugTarget, "ConfigureDebugFlags")
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(ConvertInoToCpp)
|
||||
env.AddMethod(GetCompilerType)
|
||||
env.AddMethod(GetActualLDScript)
|
||||
env.AddMethod(ConfigureDebugFlags)
|
||||
env.AddMethod(ConfigureTestTarget)
|
||||
env.AddMethod(GetExtraScripts)
|
||||
return env
|
||||
|
@ -12,16 +12,15 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from SCons.Script import ARGUMENTS # pylint: disable=import-error
|
||||
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
|
||||
from SCons.Script import DefaultEnvironment # pylint: disable=import-error
|
||||
|
||||
from platformio import fs, util
|
||||
from platformio.compat import WINDOWS
|
||||
from platformio.compat import IS_MACOS, IS_WINDOWS
|
||||
from platformio.package.meta import PackageItem
|
||||
from platformio.package.version import get_original_version
|
||||
from platformio.platform.exception import UnknownBoard
|
||||
@ -32,14 +31,13 @@ from platformio.project.config import ProjectOptions
|
||||
|
||||
|
||||
@util.memoized()
|
||||
def PioPlatform(env):
|
||||
variables = env.GetProjectOptions(as_dict=True)
|
||||
if "framework" in variables:
|
||||
# support PIO Core 3.0 dev/platforms
|
||||
variables["pioframework"] = variables["framework"]
|
||||
p = PlatformFactory.new(os.path.dirname(env["PLATFORM_MANIFEST"]))
|
||||
p.configure_default_packages(variables, COMMAND_LINE_TARGETS)
|
||||
return p
|
||||
def _PioPlatform():
|
||||
env = DefaultEnvironment()
|
||||
return PlatformFactory.from_env(env["PIOENV"], targets=COMMAND_LINE_TARGETS)
|
||||
|
||||
|
||||
def PioPlatform(_):
|
||||
return _PioPlatform()
|
||||
|
||||
|
||||
def BoardConfig(env, board=None):
|
||||
@ -49,9 +47,10 @@ def BoardConfig(env, board=None):
|
||||
board = board or env.get("BOARD")
|
||||
assert board, "BoardConfig: Board is not defined"
|
||||
return p.board_config(board)
|
||||
except (AssertionError, UnknownBoard) as e:
|
||||
sys.stderr.write("Error: %s\n" % str(e))
|
||||
except (AssertionError, UnknownBoard) as exc:
|
||||
sys.stderr.write("Error: %s\n" % str(exc))
|
||||
env.Exit(1)
|
||||
return None
|
||||
|
||||
|
||||
def GetFrameworkScript(env, framework):
|
||||
@ -70,24 +69,25 @@ def LoadPioPlatform(env):
|
||||
env["PIOPLATFORM"] = p.name
|
||||
|
||||
# Add toolchains and uploaders to $PATH and $*_LIBRARY_PATH
|
||||
systype = util.get_systype()
|
||||
for pkg in p.get_installed_packages():
|
||||
type_ = p.get_package_type(pkg.metadata.name)
|
||||
if type_ not in ("toolchain", "uploader", "debugger"):
|
||||
continue
|
||||
env.PrependENVPath(
|
||||
"PATH",
|
||||
os.path.join(pkg.path, "bin")
|
||||
if os.path.isdir(os.path.join(pkg.path, "bin"))
|
||||
else pkg.path,
|
||||
(
|
||||
os.path.join(pkg.path, "bin")
|
||||
if os.path.isdir(os.path.join(pkg.path, "bin"))
|
||||
else pkg.path
|
||||
),
|
||||
)
|
||||
if (
|
||||
not WINDOWS
|
||||
not IS_WINDOWS
|
||||
and os.path.isdir(os.path.join(pkg.path, "lib"))
|
||||
and type_ != "toolchain"
|
||||
):
|
||||
env.PrependENVPath(
|
||||
"DYLD_LIBRARY_PATH" if "darwin" in systype else "LD_LIBRARY_PATH",
|
||||
"DYLD_LIBRARY_PATH" if IS_MACOS else "LD_LIBRARY_PATH",
|
||||
os.path.join(pkg.path, "lib"),
|
||||
)
|
||||
|
||||
@ -160,7 +160,7 @@ def PrintConfiguration(env): # pylint: disable=too-many-statements
|
||||
and pkg_metadata
|
||||
and pkg_metadata.spec.external
|
||||
):
|
||||
data.append("(%s)" % pkg_metadata.spec.url)
|
||||
data.append("(%s)" % pkg_metadata.spec.uri)
|
||||
if board_config:
|
||||
data.extend([">", board_config.get("name")])
|
||||
return data
|
||||
@ -213,7 +213,7 @@ def PrintConfiguration(env): # pylint: disable=too-many-statements
|
||||
data = []
|
||||
for item in platform.dump_used_packages():
|
||||
original_version = get_original_version(item["version"])
|
||||
info = "%s %s" % (item["name"], item["version"])
|
||||
info = "%s @ %s" % (item["name"], item["version"])
|
||||
extra = []
|
||||
if original_version:
|
||||
extra.append(original_version)
|
||||
|
@ -12,9 +12,8 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from platformio.project.config import MISSING, ProjectConfig, ProjectOptions
|
||||
from platformio.compat import MISSING
|
||||
from platformio.project.config import ProjectConfig
|
||||
|
||||
|
||||
def GetProjectConfig(env):
|
||||
@ -30,15 +29,17 @@ def GetProjectOption(env, option, default=MISSING):
|
||||
|
||||
|
||||
def LoadProjectOptions(env):
|
||||
for option, value in env.GetProjectOptions():
|
||||
option_meta = ProjectOptions.get("env." + option)
|
||||
config = env.GetProjectConfig()
|
||||
section = "env:" + env["PIOENV"]
|
||||
for option in config.options(section):
|
||||
option_meta = config.find_option_meta(section, option)
|
||||
if (
|
||||
not option_meta
|
||||
or not option_meta.buildenvvar
|
||||
or option_meta.buildenvvar in env
|
||||
):
|
||||
continue
|
||||
env[option_meta.buildenvvar] = value
|
||||
env[option_meta.buildenvvar] = config.get(section, option)
|
||||
|
||||
|
||||
def exists(_):
|
||||
|
@ -12,8 +12,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
|
||||
from SCons.Action import Action # pylint: disable=import-error
|
||||
@ -29,9 +27,13 @@ def VerboseAction(_, act, actstr):
|
||||
return Action(act, actstr)
|
||||
|
||||
|
||||
def PioClean(env, clean_dir):
|
||||
def IsCleanTarget(env):
|
||||
return env.GetOption("clean")
|
||||
|
||||
|
||||
def CleanProject(env, fullclean=False):
|
||||
def _relpath(path):
|
||||
if compat.WINDOWS:
|
||||
if compat.IS_WINDOWS:
|
||||
prefix = os.getcwd()[:2].lower()
|
||||
if (
|
||||
":" not in prefix
|
||||
@ -41,21 +43,22 @@ def PioClean(env, clean_dir):
|
||||
return path
|
||||
return os.path.relpath(path)
|
||||
|
||||
if not os.path.isdir(clean_dir):
|
||||
def _clean_dir(path):
|
||||
clean_rel_path = _relpath(path)
|
||||
print(f"Removing {clean_rel_path}")
|
||||
fs.rmtree(path)
|
||||
|
||||
build_dir = env.subst("$BUILD_DIR")
|
||||
libdeps_dir = env.subst(os.path.join("$PROJECT_LIBDEPS_DIR", "$PIOENV"))
|
||||
if os.path.isdir(build_dir):
|
||||
_clean_dir(build_dir)
|
||||
else:
|
||||
print("Build environment is clean")
|
||||
env.Exit(0)
|
||||
clean_rel_path = _relpath(clean_dir)
|
||||
for root, _, files in os.walk(clean_dir):
|
||||
for f in files:
|
||||
dst = os.path.join(root, f)
|
||||
os.remove(dst)
|
||||
print(
|
||||
"Removed %s"
|
||||
% (dst if not clean_rel_path.startswith(".") else _relpath(dst))
|
||||
)
|
||||
|
||||
if fullclean and os.path.isdir(libdeps_dir):
|
||||
_clean_dir(libdeps_dir)
|
||||
|
||||
print("Done cleaning")
|
||||
fs.rmtree(clean_dir)
|
||||
env.Exit(0)
|
||||
|
||||
|
||||
def AddTarget( # pylint: disable=too-many-arguments
|
||||
@ -65,7 +68,7 @@ def AddTarget( # pylint: disable=too-many-arguments
|
||||
actions,
|
||||
title=None,
|
||||
description=None,
|
||||
group="Generic",
|
||||
group="General",
|
||||
always_build=True,
|
||||
):
|
||||
if "__PIO_TARGETS" not in env:
|
||||
@ -95,13 +98,6 @@ def DumpTargets(env):
|
||||
t["group"] == "Platform" for t in targets.values()
|
||||
):
|
||||
targets["upload"] = dict(name="upload", group="Platform", title="Upload")
|
||||
targets["compiledb"] = dict(
|
||||
name="compiledb",
|
||||
title="Compilation Database",
|
||||
description="Generate compilation database `compile_commands.json`",
|
||||
group="Advanced",
|
||||
)
|
||||
targets["clean"] = dict(name="clean", title="Clean", group="Generic")
|
||||
return list(targets.values())
|
||||
|
||||
|
||||
@ -111,7 +107,8 @@ def exists(_):
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(VerboseAction)
|
||||
env.AddMethod(PioClean)
|
||||
env.AddMethod(IsCleanTarget)
|
||||
env.AddMethod(CleanProject)
|
||||
env.AddMethod(AddTarget)
|
||||
env.AddMethod(AddPlatformTarget)
|
||||
env.AddMethod(AddCustomTarget)
|
||||
|
61
platformio/builder/tools/piotest.py
Normal file
61
platformio/builder/tools/piotest.py
Normal file
@ -0,0 +1,61 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
|
||||
from platformio.builder.tools import piobuild
|
||||
from platformio.test.result import TestSuite
|
||||
from platformio.test.runners.factory import TestRunnerFactory
|
||||
|
||||
|
||||
def ConfigureTestTarget(env):
|
||||
env.Append(
|
||||
CPPDEFINES=["UNIT_TEST"], # deprecated, use PIO_UNIT_TESTING
|
||||
PIOTEST_SRC_FILTER=[f"+<*.{ext}>" for ext in piobuild.SRC_BUILD_EXT],
|
||||
)
|
||||
env.Prepend(CPPPATH=["$PROJECT_TEST_DIR"])
|
||||
|
||||
if "PIOTEST_RUNNING_NAME" in env:
|
||||
test_name = env["PIOTEST_RUNNING_NAME"]
|
||||
while True:
|
||||
test_name = os.path.dirname(test_name) # parent dir
|
||||
# skip nested tests (user's side issue?)
|
||||
if not test_name or os.path.basename(test_name).startswith("test_"):
|
||||
break
|
||||
env.Prepend(
|
||||
PIOTEST_SRC_FILTER=[
|
||||
f"+<{test_name}{os.path.sep}*.{ext}>"
|
||||
for ext in piobuild.SRC_BUILD_EXT
|
||||
],
|
||||
CPPPATH=[os.path.join("$PROJECT_TEST_DIR", test_name)],
|
||||
)
|
||||
|
||||
env.Prepend(
|
||||
PIOTEST_SRC_FILTER=[f"+<$PIOTEST_RUNNING_NAME{os.path.sep}>"],
|
||||
CPPPATH=[os.path.join("$PROJECT_TEST_DIR", "$PIOTEST_RUNNING_NAME")],
|
||||
)
|
||||
|
||||
test_runner = TestRunnerFactory.new(
|
||||
TestSuite(env["PIOENV"], env.get("PIOTEST_RUNNING_NAME", "*")),
|
||||
env.GetProjectConfig(),
|
||||
)
|
||||
test_runner.configure_build_env(env)
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(ConfigureTestTarget)
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
@ -12,25 +12,22 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
# pylint: disable=unused-argument
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from fnmatch import fnmatch
|
||||
from os import environ
|
||||
from os.path import isfile, join
|
||||
from shutil import copyfile
|
||||
from time import sleep
|
||||
|
||||
from SCons.Script import ARGUMENTS # pylint: disable=import-error
|
||||
from serial import Serial, SerialException
|
||||
|
||||
from platformio import exception, fs, util
|
||||
from platformio.compat import WINDOWS
|
||||
from platformio import exception, fs
|
||||
from platformio.device.finder import SerialPortFinder, find_mbed_disk, is_pattern_port
|
||||
from platformio.device.list.util import list_serial_ports
|
||||
from platformio.proc import exec_command
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
|
||||
|
||||
def FlushSerialBuffer(env, port):
|
||||
s = Serial(env.subst(port))
|
||||
@ -62,7 +59,7 @@ def WaitForNewSerialPort(env, before):
|
||||
elapsed = 0
|
||||
before = [p["port"] for p in before]
|
||||
while elapsed < 5 and new_port is None:
|
||||
now = [p["port"] for p in util.get_serial_ports()]
|
||||
now = [p["port"] for p in list_serial_ports()]
|
||||
for p in now:
|
||||
if p not in before:
|
||||
new_port = p
|
||||
@ -97,67 +94,29 @@ def WaitForNewSerialPort(env, before):
|
||||
|
||||
def AutodetectUploadPort(*args, **kwargs):
|
||||
env = args[0]
|
||||
|
||||
def _get_pattern():
|
||||
if "UPLOAD_PORT" not in env:
|
||||
return None
|
||||
if set(["*", "?", "[", "]"]) & set(env["UPLOAD_PORT"]):
|
||||
return env["UPLOAD_PORT"]
|
||||
return None
|
||||
|
||||
def _is_match_pattern(port):
|
||||
pattern = _get_pattern()
|
||||
if not pattern:
|
||||
return True
|
||||
return fnmatch(port, pattern)
|
||||
|
||||
def _look_for_mbed_disk():
|
||||
msdlabels = ("mbed", "nucleo", "frdm", "microbit")
|
||||
for item in util.get_logical_devices():
|
||||
if item["path"].startswith("/net") or not _is_match_pattern(item["path"]):
|
||||
continue
|
||||
mbed_pages = [join(item["path"], n) for n in ("mbed.htm", "mbed.html")]
|
||||
if any(isfile(p) for p in mbed_pages):
|
||||
return item["path"]
|
||||
if item["name"] and any(l in item["name"].lower() for l in msdlabels):
|
||||
return item["path"]
|
||||
return None
|
||||
|
||||
def _look_for_serial_port():
|
||||
port = None
|
||||
board_hwids = []
|
||||
upload_protocol = env.subst("$UPLOAD_PROTOCOL")
|
||||
if "BOARD" in env and "build.hwids" in env.BoardConfig():
|
||||
board_hwids = env.BoardConfig().get("build.hwids")
|
||||
for item in util.get_serial_ports(filter_hwid=True):
|
||||
if not _is_match_pattern(item["port"]):
|
||||
continue
|
||||
port = item["port"]
|
||||
if upload_protocol.startswith("blackmagic"):
|
||||
if WINDOWS and port.startswith("COM") and len(port) > 4:
|
||||
port = "\\\\.\\%s" % port
|
||||
if "GDB" in item["description"]:
|
||||
return port
|
||||
for hwid in board_hwids:
|
||||
hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "")
|
||||
if hwid_str in item["hwid"]:
|
||||
return port
|
||||
return port
|
||||
|
||||
if "UPLOAD_PORT" in env and not _get_pattern():
|
||||
print(env.subst("Use manually specified: $UPLOAD_PORT"))
|
||||
initial_port = env.subst("$UPLOAD_PORT")
|
||||
upload_protocol = env.subst("$UPLOAD_PROTOCOL")
|
||||
if initial_port and not is_pattern_port(initial_port):
|
||||
print(env.subst("Using manually specified: $UPLOAD_PORT"))
|
||||
return
|
||||
|
||||
if env.subst("$UPLOAD_PROTOCOL") == "mbed" or (
|
||||
"mbed" in env.subst("$PIOFRAMEWORK") and not env.subst("$UPLOAD_PROTOCOL")
|
||||
if upload_protocol == "mbed" or (
|
||||
"mbed" in env.subst("$PIOFRAMEWORK") and not upload_protocol
|
||||
):
|
||||
env.Replace(UPLOAD_PORT=_look_for_mbed_disk())
|
||||
env.Replace(UPLOAD_PORT=find_mbed_disk(initial_port))
|
||||
else:
|
||||
try:
|
||||
fs.ensure_udev_rules()
|
||||
except exception.InvalidUdevRules as e:
|
||||
sys.stderr.write("\n%s\n\n" % e)
|
||||
env.Replace(UPLOAD_PORT=_look_for_serial_port())
|
||||
except exception.InvalidUdevRules as exc:
|
||||
sys.stderr.write("\n%s\n\n" % exc)
|
||||
env.Replace(
|
||||
UPLOAD_PORT=SerialPortFinder(
|
||||
board_config=env.BoardConfig() if "BOARD" in env else None,
|
||||
upload_protocol=upload_protocol,
|
||||
prefer_gdb_port="blackmagic" in upload_protocol,
|
||||
verbose=int(ARGUMENTS.get("PIOVERBOSE", 0)),
|
||||
).find(initial_port)
|
||||
)
|
||||
|
||||
if env.subst("$UPLOAD_PORT"):
|
||||
print(env.subst("Auto-detected: $UPLOAD_PORT"))
|
||||
@ -175,10 +134,12 @@ def UploadToDisk(_, target, source, env):
|
||||
assert "UPLOAD_PORT" in env
|
||||
progname = env.subst("$PROGNAME")
|
||||
for ext in ("bin", "hex"):
|
||||
fpath = join(env.subst("$BUILD_DIR"), "%s.%s" % (progname, ext))
|
||||
if not isfile(fpath):
|
||||
fpath = os.path.join(env.subst("$BUILD_DIR"), "%s.%s" % (progname, ext))
|
||||
if not os.path.isfile(fpath):
|
||||
continue
|
||||
copyfile(fpath, join(env.subst("$UPLOAD_PORT"), "%s.%s" % (progname, ext)))
|
||||
copyfile(
|
||||
fpath, os.path.join(env.subst("$UPLOAD_PORT"), "%s.%s" % (progname, ext))
|
||||
)
|
||||
print(
|
||||
"Firmware has been successfully uploaded.\n"
|
||||
"(Some boards may require manual hard reset)"
|
||||
@ -211,7 +172,7 @@ def CheckUploadSize(_, target, source, env):
|
||||
if not isinstance(cmd, list):
|
||||
cmd = cmd.split()
|
||||
cmd = [arg.replace("$SOURCES", str(source[0])) for arg in cmd if arg]
|
||||
sysenv = environ.copy()
|
||||
sysenv = os.environ.copy()
|
||||
sysenv["PATH"] = str(env["ENV"]["PATH"])
|
||||
result = exec_command(env.subst(cmd), env=sysenv)
|
||||
if result["returncode"] != 0:
|
||||
@ -236,9 +197,9 @@ def CheckUploadSize(_, target, source, env):
|
||||
def _format_availale_bytes(value, total):
|
||||
percent_raw = float(value) / float(total)
|
||||
blocks_per_progress = 10
|
||||
used_blocks = int(round(blocks_per_progress * percent_raw))
|
||||
if used_blocks > blocks_per_progress:
|
||||
used_blocks = blocks_per_progress
|
||||
used_blocks = min(
|
||||
int(round(blocks_per_progress * percent_raw)), blocks_per_progress
|
||||
)
|
||||
return "[{:{}}] {: 6.1%} (used {:d} bytes from {:d} bytes)".format(
|
||||
"=" * used_blocks, blocks_per_progress, percent_raw, value, total
|
||||
)
|
||||
@ -257,12 +218,11 @@ def CheckUploadSize(_, target, source, env):
|
||||
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
print(output)
|
||||
|
||||
# raise error
|
||||
# if data_max_size and data_size > data_max_size:
|
||||
# sys.stderr.write(
|
||||
# "Error: The data size (%d bytes) is greater "
|
||||
# "than maximum allowed (%s bytes)\n" % (data_size, data_max_size))
|
||||
# env.Exit(1)
|
||||
if data_max_size and data_size > data_max_size:
|
||||
sys.stderr.write(
|
||||
"Warning! The data size (%d bytes) is greater "
|
||||
"than maximum allowed (%s bytes)\n" % (data_size, data_max_size)
|
||||
)
|
||||
if program_size > program_max_size:
|
||||
sys.stderr.write(
|
||||
"Error: The program size (%d bytes) is greater "
|
||||
|
@ -23,7 +23,7 @@ from platformio.package.lockfile import LockFile
|
||||
from platformio.project.helpers import get_project_cache_dir
|
||||
|
||||
|
||||
class ContentCache(object):
|
||||
class ContentCache:
|
||||
def __init__(self, namespace=None):
|
||||
self.cache_dir = os.path.join(get_project_cache_dir(), namespace or "content")
|
||||
self._db_path = os.path.join(self.cache_dir, "db.data")
|
||||
@ -78,9 +78,9 @@ class ContentCache(object):
|
||||
if not os.path.isdir(os.path.dirname(cache_path)):
|
||||
os.makedirs(os.path.dirname(cache_path))
|
||||
try:
|
||||
with codecs.open(cache_path, "wb", encoding="utf8") as fp:
|
||||
with codecs.open(cache_path, mode="wb", encoding="utf8") as fp:
|
||||
fp.write(data)
|
||||
with open(self._db_path, "a") as fp:
|
||||
with open(self._db_path, mode="a", encoding="utf8") as fp:
|
||||
fp.write("%s=%s\n" % (str(expire_time), os.path.basename(cache_path)))
|
||||
except UnicodeError:
|
||||
if os.path.isfile(cache_path):
|
||||
@ -92,7 +92,7 @@ class ContentCache(object):
|
||||
return self._unlock_dbindex()
|
||||
|
||||
def delete(self, keys=None):
|
||||
""" Keys=None, delete expired items """
|
||||
"""Keys=None, delete expired items"""
|
||||
if not os.path.isfile(self._db_path):
|
||||
return None
|
||||
if not keys:
|
||||
@ -102,7 +102,7 @@ class ContentCache(object):
|
||||
paths_for_delete = [self.get_cache_path(k) for k in keys]
|
||||
found = False
|
||||
newlines = []
|
||||
with open(self._db_path) as fp:
|
||||
with open(self._db_path, encoding="utf8") as fp:
|
||||
for line in fp.readlines():
|
||||
line = line.strip()
|
||||
if "=" not in line:
|
||||
@ -129,7 +129,7 @@ class ContentCache(object):
|
||||
pass
|
||||
|
||||
if found and self._lock_dbindex():
|
||||
with open(self._db_path, "w") as fp:
|
||||
with open(self._db_path, mode="w", encoding="utf8") as fp:
|
||||
fp.write("\n".join(newlines) + "\n")
|
||||
self._unlock_dbindex()
|
||||
|
||||
|
@ -15,7 +15,9 @@
|
||||
# pylint: disable=too-many-arguments,too-many-locals,too-many-branches
|
||||
# pylint: disable=redefined-builtin,too-many-statements
|
||||
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
from collections import Counter
|
||||
from os.path import dirname, isfile
|
||||
from time import time
|
||||
@ -24,31 +26,27 @@ import click
|
||||
from tabulate import tabulate
|
||||
|
||||
from platformio import app, exception, fs, util
|
||||
from platformio.commands.check.defect import DefectItem
|
||||
from platformio.commands.check.tools import CheckToolFactory
|
||||
from platformio.compat import dump_json_to_unicode
|
||||
from platformio.check.defect import DefectItem
|
||||
from platformio.check.tools import CheckToolFactory
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.helpers import find_project_dir_above, get_project_dir
|
||||
|
||||
|
||||
@click.command("check", short_help="Static code analysis")
|
||||
@click.command("check", short_help="Static Code Analysis")
|
||||
@click.option("-e", "--environment", multiple=True)
|
||||
@click.option(
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=os.getcwd,
|
||||
type=click.Path(
|
||||
exists=True, file_okay=True, dir_okay=True, writable=True, resolve_path=True
|
||||
),
|
||||
type=click.Path(exists=True, file_okay=True, dir_okay=True, writable=True),
|
||||
)
|
||||
@click.option(
|
||||
"-c",
|
||||
"--project-conf",
|
||||
type=click.Path(
|
||||
exists=True, file_okay=True, dir_okay=False, readable=True, resolve_path=True
|
||||
),
|
||||
type=click.Path(exists=True, file_okay=True, dir_okay=False, readable=True),
|
||||
)
|
||||
@click.option("--pattern", multiple=True)
|
||||
@click.option("--pattern", multiple=True, hidden=True)
|
||||
@click.option("-f", "--src-filters", multiple=True)
|
||||
@click.option("--flags", multiple=True)
|
||||
@click.option(
|
||||
"--severity", multiple=True, type=click.Choice(DefectItem.SEVERITY_LABELS.values())
|
||||
@ -66,6 +64,7 @@ def cli(
|
||||
environment,
|
||||
project_dir,
|
||||
project_conf,
|
||||
src_filters,
|
||||
pattern,
|
||||
flags,
|
||||
severity,
|
||||
@ -104,19 +103,43 @@ def cli(
|
||||
"%s: %s" % (k, ", ".join(v) if isinstance(v, list) else v)
|
||||
)
|
||||
|
||||
default_patterns = [
|
||||
config.get_optional_dir("src"),
|
||||
config.get_optional_dir("include"),
|
||||
]
|
||||
default_src_filters = []
|
||||
for d in (
|
||||
config.get("platformio", "src_dir"),
|
||||
config.get("platformio", "include_dir"),
|
||||
):
|
||||
try:
|
||||
default_src_filters.append("+<%s>" % os.path.relpath(d))
|
||||
except ValueError as exc:
|
||||
# On Windows if sources are located on a different logical drive
|
||||
if not json_output and not silent:
|
||||
click.echo(
|
||||
"Error: Project cannot be analyzed! The project folder `%s`"
|
||||
" is located on a different logical drive\n" % d
|
||||
)
|
||||
raise exception.ReturnErrorCode(1) from exc
|
||||
|
||||
env_src_filters = (
|
||||
src_filters
|
||||
or pattern
|
||||
or env_options.get(
|
||||
"check_src_filters",
|
||||
env_options.get("check_patterns", default_src_filters),
|
||||
)
|
||||
)
|
||||
|
||||
tool_options = dict(
|
||||
verbose=verbose,
|
||||
silent=silent,
|
||||
patterns=pattern or env_options.get("check_patterns", default_patterns),
|
||||
src_filters=env_src_filters,
|
||||
flags=flags or env_options.get("check_flags"),
|
||||
severity=[DefectItem.SEVERITY_LABELS[DefectItem.SEVERITY_HIGH]]
|
||||
if silent
|
||||
else severity or config.get("env:" + envname, "check_severity"),
|
||||
severity=(
|
||||
[DefectItem.SEVERITY_LABELS[DefectItem.SEVERITY_HIGH]]
|
||||
if silent
|
||||
else severity or config.get("env:" + envname, "check_severity")
|
||||
),
|
||||
skip_packages=skip_packages or env_options.get("check_skip_packages"),
|
||||
platform_packages=env_options.get("platform_packages"),
|
||||
)
|
||||
|
||||
for tool in config.get("env:" + envname, "check_tool"):
|
||||
@ -132,9 +155,11 @@ def cli(
|
||||
|
||||
result = {"env": envname, "tool": tool, "duration": time()}
|
||||
rc = ct.check(
|
||||
on_defect_callback=None
|
||||
if (json_output or verbose)
|
||||
else lambda defect: click.echo(repr(defect))
|
||||
on_defect_callback=(
|
||||
None
|
||||
if (json_output or verbose)
|
||||
else lambda defect: click.echo(repr(defect))
|
||||
)
|
||||
)
|
||||
|
||||
result["defects"] = ct.get_defects()
|
||||
@ -163,9 +188,12 @@ def cli(
|
||||
print_processing_footer(result)
|
||||
|
||||
if json_output:
|
||||
click.echo(dump_json_to_unicode(results_to_json(results)))
|
||||
click.echo(json.dumps(results_to_json(results)))
|
||||
elif not silent:
|
||||
print_check_summary(results)
|
||||
print_check_summary(results, verbose=verbose)
|
||||
|
||||
# Reset custom project config
|
||||
app.set_session_var("custom_project_conf", None)
|
||||
|
||||
command_failed = any(r.get("succeeded") is False for r in results)
|
||||
if command_failed:
|
||||
@ -193,7 +221,7 @@ def print_processing_header(tool, envname, envdump):
|
||||
"Checking %s > %s (%s)"
|
||||
% (click.style(envname, fg="cyan", bold=True), tool, "; ".join(envdump))
|
||||
)
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
terminal_width = shutil.get_terminal_size().columns
|
||||
click.secho("-" * terminal_width, bold=True)
|
||||
|
||||
|
||||
@ -214,7 +242,7 @@ def print_processing_footer(result):
|
||||
|
||||
|
||||
def collect_component_stats(result):
|
||||
components = dict()
|
||||
components = {}
|
||||
|
||||
def _append_defect(component, defect):
|
||||
if not components.get(component):
|
||||
@ -249,7 +277,7 @@ def print_defects_stats(results):
|
||||
|
||||
severity_labels = list(DefectItem.SEVERITY_LABELS.values())
|
||||
severity_labels.reverse()
|
||||
tabular_data = list()
|
||||
tabular_data = []
|
||||
for k, v in component_stats.items():
|
||||
tool_defect = [v.get(s, 0) for s in severity_labels]
|
||||
tabular_data.append([k] + tool_defect)
|
||||
@ -260,13 +288,13 @@ def print_defects_stats(results):
|
||||
tabular_data.append(total)
|
||||
|
||||
headers = ["Component"]
|
||||
headers.extend([l.upper() for l in severity_labels])
|
||||
headers.extend([label.upper() for label in severity_labels])
|
||||
headers = [click.style(h, bold=True) for h in headers]
|
||||
click.echo(tabulate(tabular_data, headers=headers, numalign="center"))
|
||||
click.echo()
|
||||
|
||||
|
||||
def print_check_summary(results):
|
||||
def print_check_summary(results, verbose=False):
|
||||
click.echo()
|
||||
|
||||
tabular_data = []
|
||||
@ -283,6 +311,8 @@ def print_check_summary(results):
|
||||
status_str = click.style("FAILED", fg="red")
|
||||
elif result.get("succeeded") is None:
|
||||
status_str = "IGNORED"
|
||||
if not verbose:
|
||||
continue
|
||||
else:
|
||||
succeeded_nums += 1
|
||||
status_str = click.style("PASSED", fg="green")
|
@ -16,14 +16,14 @@ import os
|
||||
|
||||
import click
|
||||
|
||||
from platformio.exception import PlatformioException
|
||||
from platformio.project.helpers import get_project_dir
|
||||
|
||||
# pylint: disable=too-many-instance-attributes, redefined-builtin
|
||||
# pylint: disable=too-many-arguments
|
||||
|
||||
|
||||
class DefectItem(object):
|
||||
|
||||
class DefectItem:
|
||||
SEVERITY_HIGH = 1
|
||||
SEVERITY_MEDIUM = 2
|
||||
SEVERITY_LOW = 4
|
||||
@ -34,7 +34,7 @@ class DefectItem(object):
|
||||
severity,
|
||||
category,
|
||||
message,
|
||||
file="unknown",
|
||||
file=None,
|
||||
line=0,
|
||||
column=0,
|
||||
id=None,
|
||||
@ -50,7 +50,7 @@ class DefectItem(object):
|
||||
self.callstack = callstack
|
||||
self.cwe = cwe
|
||||
self.id = id
|
||||
self.file = file
|
||||
self.file = file or "unknown"
|
||||
if file.lower().startswith(get_project_dir().lower()):
|
||||
self.file = os.path.relpath(file, get_project_dir())
|
||||
|
||||
@ -79,14 +79,14 @@ class DefectItem(object):
|
||||
for key, value in DefectItem.SEVERITY_LABELS.items():
|
||||
if label == value:
|
||||
return key
|
||||
raise Exception("Unknown severity label -> %s" % label)
|
||||
raise PlatformioException("Unknown severity label -> %s" % label)
|
||||
|
||||
def as_dict(self):
|
||||
return {
|
||||
"severity": self.SEVERITY_LABELS[self.severity],
|
||||
"category": self.category,
|
||||
"message": self.message,
|
||||
"file": os.path.realpath(self.file),
|
||||
"file": os.path.abspath(self.file),
|
||||
"line": self.line,
|
||||
"column": self.column,
|
||||
"callstack": self.callstack,
|
@ -13,12 +13,12 @@
|
||||
# limitations under the License.
|
||||
|
||||
from platformio import exception
|
||||
from platformio.commands.check.tools.clangtidy import ClangtidyCheckTool
|
||||
from platformio.commands.check.tools.cppcheck import CppcheckCheckTool
|
||||
from platformio.commands.check.tools.pvsstudio import PvsStudioCheckTool
|
||||
from platformio.check.tools.clangtidy import ClangtidyCheckTool
|
||||
from platformio.check.tools.cppcheck import CppcheckCheckTool
|
||||
from platformio.check.tools.pvsstudio import PvsStudioCheckTool
|
||||
|
||||
|
||||
class CheckToolFactory(object):
|
||||
class CheckToolFactory:
|
||||
@staticmethod
|
||||
def new(tool, project_dir, config, envname, options):
|
||||
cls = None
|
105
platformio/commands/check/tools/base.py → platformio/check/tools/base.py
Normal file → Executable file
105
platformio/commands/check/tools/base.py → platformio/check/tools/base.py
Normal file → Executable file
@ -13,20 +13,23 @@
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
from tempfile import NamedTemporaryFile
|
||||
import tempfile
|
||||
|
||||
import click
|
||||
|
||||
from platformio import compat, fs, proc
|
||||
from platformio.commands.check.defect import DefectItem
|
||||
from platformio.project.helpers import load_project_ide_data
|
||||
from platformio import fs, proc
|
||||
from platformio.check.defect import DefectItem
|
||||
from platformio.package.manager.core import get_core_package_dir
|
||||
from platformio.package.meta import PackageSpec
|
||||
from platformio.project.helpers import load_build_metadata
|
||||
|
||||
|
||||
class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
|
||||
class CheckToolBase: # pylint: disable=too-many-instance-attributes
|
||||
def __init__(self, project_dir, config, envname, options):
|
||||
self.config = config
|
||||
self.envname = envname
|
||||
self.options = options
|
||||
self.project_dir = project_dir
|
||||
self.cc_flags = []
|
||||
self.cxx_flags = []
|
||||
self.cpp_includes = []
|
||||
@ -38,7 +41,7 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
|
||||
self._defects = []
|
||||
self._on_defect_callback = None
|
||||
self._bad_input = False
|
||||
self._load_cpp_data(project_dir)
|
||||
self._load_cpp_data()
|
||||
|
||||
# detect all defects by default
|
||||
if not self.options.get("severity"):
|
||||
@ -53,18 +56,25 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
|
||||
for s in self.options["severity"]
|
||||
]
|
||||
|
||||
def _load_cpp_data(self, project_dir):
|
||||
data = load_project_ide_data(project_dir, self.envname)
|
||||
def _load_cpp_data(self):
|
||||
data = load_build_metadata(self.project_dir, self.envname)
|
||||
if not data:
|
||||
return
|
||||
self.cc_flags = click.parser.split_arg_string(data.get("cc_flags", ""))
|
||||
self.cxx_flags = click.parser.split_arg_string(data.get("cxx_flags", ""))
|
||||
self.cc_flags = data.get("cc_flags", [])
|
||||
self.cxx_flags = data.get("cxx_flags", [])
|
||||
self.cpp_includes = self._dump_includes(data.get("includes", {}))
|
||||
self.cpp_defines = data.get("defines", [])
|
||||
self.cc_path = data.get("cc_path")
|
||||
self.cxx_path = data.get("cxx_path")
|
||||
self.toolchain_defines = self._get_toolchain_defines()
|
||||
|
||||
def get_tool_dir(self, pkg_name):
|
||||
for spec in self.options["platform_packages"] or []:
|
||||
spec = PackageSpec(spec)
|
||||
if spec.name == pkg_name:
|
||||
return get_core_package_dir(pkg_name, spec=spec)
|
||||
return get_core_package_dir(pkg_name)
|
||||
|
||||
def get_flags(self, tool):
|
||||
result = []
|
||||
flags = self.options.get("flags") or []
|
||||
@ -89,6 +99,13 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
|
||||
includes_file,
|
||||
)
|
||||
result = proc.exec_command(cmd, shell=True)
|
||||
|
||||
if result["returncode"] != 0:
|
||||
click.echo("Warning: Failed to extract toolchain defines!")
|
||||
if self.options.get("verbose"):
|
||||
click.echo(result["out"])
|
||||
click.echo(result["err"])
|
||||
|
||||
for line in result["out"].split("\n"):
|
||||
tokens = line.strip().split(" ", 2)
|
||||
if not tokens or tokens[0] != "#define":
|
||||
@ -104,7 +121,7 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
|
||||
return {lang: _extract_defines(lang, incflags_file) for lang in ("c", "c++")}
|
||||
|
||||
def _create_tmp_file(self, data):
|
||||
with NamedTemporaryFile("w", delete=False) as fp:
|
||||
with tempfile.NamedTemporaryFile("w", delete=False) as fp:
|
||||
fp.write(data)
|
||||
self._tmp_files.append(fp.name)
|
||||
return fp.name
|
||||
@ -168,7 +185,30 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
|
||||
os.remove(f)
|
||||
|
||||
@staticmethod
|
||||
def get_project_target_files(patterns):
|
||||
def is_check_successful(cmd_result):
|
||||
return cmd_result["returncode"] == 0
|
||||
|
||||
def execute_check_cmd(self, cmd):
|
||||
result = proc.exec_command(
|
||||
cmd,
|
||||
stdout=proc.LineBufferedAsyncPipe(self.on_tool_output),
|
||||
stderr=proc.LineBufferedAsyncPipe(self.on_tool_output),
|
||||
)
|
||||
|
||||
if not self.is_check_successful(result):
|
||||
click.echo(
|
||||
"\nError: Failed to execute check command! Exited with code %d."
|
||||
% result["returncode"]
|
||||
)
|
||||
if self.options.get("verbose"):
|
||||
click.echo(result["out"])
|
||||
click.echo(result["err"])
|
||||
self._bad_input = True
|
||||
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def get_project_target_files(project_dir, src_filters):
|
||||
c_extension = (".c",)
|
||||
cpp_extensions = (".cc", ".cpp", ".cxx", ".ino")
|
||||
header_extensions = (".h", ".hh", ".hpp", ".hxx")
|
||||
@ -177,19 +217,15 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
|
||||
|
||||
def _add_file(path):
|
||||
if path.endswith(header_extensions):
|
||||
result["headers"].append(os.path.realpath(path))
|
||||
result["headers"].append(os.path.abspath(path))
|
||||
elif path.endswith(c_extension):
|
||||
result["c"].append(os.path.realpath(path))
|
||||
result["c"].append(os.path.abspath(path))
|
||||
elif path.endswith(cpp_extensions):
|
||||
result["c++"].append(os.path.realpath(path))
|
||||
result["c++"].append(os.path.abspath(path))
|
||||
|
||||
for pattern in patterns:
|
||||
for item in compat.glob_recursive(pattern):
|
||||
if not os.path.isdir(item):
|
||||
_add_file(item)
|
||||
for root, _, files in os.walk(item, followlinks=True):
|
||||
for f in files:
|
||||
_add_file(os.path.join(root, f))
|
||||
src_filters = normalize_src_filters(src_filters)
|
||||
for f in fs.match_src_files(project_dir, src_filters):
|
||||
_add_file(f)
|
||||
|
||||
return result
|
||||
|
||||
@ -200,11 +236,7 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
|
||||
if self.options.get("verbose"):
|
||||
click.echo(" ".join(cmd))
|
||||
|
||||
proc.exec_command(
|
||||
cmd,
|
||||
stdout=proc.LineBufferedAsyncPipe(self.on_tool_output),
|
||||
stderr=proc.LineBufferedAsyncPipe(self.on_tool_output),
|
||||
)
|
||||
self.execute_check_cmd(cmd)
|
||||
|
||||
else:
|
||||
if self.options.get("verbose"):
|
||||
@ -214,3 +246,22 @@ class CheckToolBase(object): # pylint: disable=too-many-instance-attributes
|
||||
self.clean_up()
|
||||
|
||||
return self._bad_input
|
||||
|
||||
|
||||
#
|
||||
# Helpers
|
||||
#
|
||||
|
||||
|
||||
def normalize_src_filters(src_filters):
|
||||
def _normalize(src_filters):
|
||||
return (
|
||||
src_filters
|
||||
if src_filters.startswith(("+<", "-<"))
|
||||
else "+<%s>" % src_filters
|
||||
)
|
||||
|
||||
if isinstance(src_filters, (list, tuple)):
|
||||
return " ".join([_normalize(f) for f in src_filters])
|
||||
|
||||
return _normalize(src_filters)
|
@ -15,13 +15,12 @@
|
||||
import re
|
||||
from os.path import join
|
||||
|
||||
from platformio.commands.check.defect import DefectItem
|
||||
from platformio.commands.check.tools.base import CheckToolBase
|
||||
from platformio.package.manager.core import get_core_package_dir
|
||||
from platformio.check.defect import DefectItem
|
||||
from platformio.check.tools.base import CheckToolBase
|
||||
|
||||
|
||||
class ClangtidyCheckTool(CheckToolBase):
|
||||
def tool_output_filter(self, line):
|
||||
def tool_output_filter(self, line): # pylint: disable=arguments-differ
|
||||
if not self.options.get("verbose") and "[clang-diagnostic-error]" in line:
|
||||
return ""
|
||||
|
||||
@ -34,7 +33,7 @@ class ClangtidyCheckTool(CheckToolBase):
|
||||
|
||||
return ""
|
||||
|
||||
def parse_defect(self, raw_line):
|
||||
def parse_defect(self, raw_line): # pylint: disable=arguments-differ
|
||||
match = re.match(r"^(.*):(\d+):(\d+):\s+([^:]+):\s(.+)\[([^]]+)\]$", raw_line)
|
||||
if not match:
|
||||
return raw_line
|
||||
@ -49,19 +48,29 @@ class ClangtidyCheckTool(CheckToolBase):
|
||||
|
||||
return DefectItem(severity, category, message, file_, line, column, defect_id)
|
||||
|
||||
@staticmethod
|
||||
def is_check_successful(cmd_result):
|
||||
# Note: Clang-Tidy returns 1 for not critical compilation errors,
|
||||
# so 0 and 1 are only acceptable values
|
||||
return cmd_result["returncode"] < 2
|
||||
|
||||
def configure_command(self):
|
||||
tool_path = join(get_core_package_dir("tool-clangtidy"), "clang-tidy")
|
||||
tool_path = join(self.get_tool_dir("tool-clangtidy"), "clang-tidy")
|
||||
|
||||
cmd = [tool_path, "--quiet"]
|
||||
flags = self.get_flags("clangtidy")
|
||||
if not self.is_flag_set("--checks", flags):
|
||||
if not (
|
||||
self.is_flag_set("--checks", flags) or self.is_flag_set("--config", flags)
|
||||
):
|
||||
cmd.append("--checks=*")
|
||||
|
||||
project_files = self.get_project_target_files(self.options["patterns"])
|
||||
project_files = self.get_project_target_files(
|
||||
self.project_dir, self.options["src_filters"]
|
||||
)
|
||||
|
||||
src_files = []
|
||||
for scope in project_files:
|
||||
src_files.extend(project_files[scope])
|
||||
for items in project_files.values():
|
||||
src_files.extend(items)
|
||||
|
||||
cmd.extend(flags + src_files + ["--"])
|
||||
cmd.extend(
|
||||
@ -71,7 +80,7 @@ class ClangtidyCheckTool(CheckToolBase):
|
||||
includes = []
|
||||
for inc in self.cpp_includes:
|
||||
if self.options.get("skip_packages") and inc.lower().startswith(
|
||||
self.config.get_optional_dir("packages").lower()
|
||||
self.config.get("platformio", "packages_dir").lower()
|
||||
):
|
||||
continue
|
||||
includes.append(inc)
|
@ -17,13 +17,13 @@ import os
|
||||
import click
|
||||
|
||||
from platformio import proc
|
||||
from platformio.commands.check.defect import DefectItem
|
||||
from platformio.commands.check.tools.base import CheckToolBase
|
||||
from platformio.package.manager.core import get_core_package_dir
|
||||
from platformio.check.defect import DefectItem
|
||||
from platformio.check.tools.base import CheckToolBase
|
||||
|
||||
|
||||
class CppcheckCheckTool(CheckToolBase):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._field_delimiter = "<&PIO&>"
|
||||
self._buffer = ""
|
||||
self.defect_fields = [
|
||||
@ -36,9 +36,8 @@ class CppcheckCheckTool(CheckToolBase):
|
||||
"cwe",
|
||||
"id",
|
||||
]
|
||||
super(CppcheckCheckTool, self).__init__(*args, **kwargs)
|
||||
|
||||
def tool_output_filter(self, line):
|
||||
def tool_output_filter(self, line): # pylint: disable=arguments-differ
|
||||
if (
|
||||
not self.options.get("verbose")
|
||||
and "--suppress=unmatchedSuppression:" in line
|
||||
@ -50,13 +49,14 @@ class CppcheckCheckTool(CheckToolBase):
|
||||
for msg in (
|
||||
"No C or C++ source files found",
|
||||
"unrecognized command line option",
|
||||
"there was an internal error",
|
||||
)
|
||||
):
|
||||
self._bad_input = True
|
||||
|
||||
return line
|
||||
|
||||
def parse_defect(self, raw_line):
|
||||
def parse_defect(self, raw_line): # pylint: disable=arguments-differ
|
||||
if self._field_delimiter not in raw_line:
|
||||
return None
|
||||
|
||||
@ -64,7 +64,7 @@ class CppcheckCheckTool(CheckToolBase):
|
||||
if any(f not in self._buffer for f in self.defect_fields):
|
||||
return None
|
||||
|
||||
args = dict()
|
||||
args = {}
|
||||
for field in self._buffer.split(self._field_delimiter):
|
||||
field = field.strip().replace('"', "")
|
||||
name, value = field.split("=", 1)
|
||||
@ -84,7 +84,7 @@ class CppcheckCheckTool(CheckToolBase):
|
||||
if (
|
||||
args.get("file", "")
|
||||
.lower()
|
||||
.startswith(self.config.get_optional_dir("packages").lower())
|
||||
.startswith(self.config.get("platformio", "packages_dir").lower())
|
||||
):
|
||||
if args["id"] in breaking_defect_ids:
|
||||
if self.options.get("verbose"):
|
||||
@ -96,20 +96,19 @@ class CppcheckCheckTool(CheckToolBase):
|
||||
)
|
||||
click.echo()
|
||||
self._bad_input = True
|
||||
self._buffer = ""
|
||||
return None
|
||||
|
||||
self._buffer = ""
|
||||
return DefectItem(**args)
|
||||
|
||||
def configure_command(
|
||||
self, language, src_files
|
||||
): # pylint: disable=arguments-differ
|
||||
tool_path = os.path.join(get_core_package_dir("tool-cppcheck"), "cppcheck")
|
||||
def configure_command(self, language, src_file): # pylint: disable=arguments-differ
|
||||
tool_path = os.path.join(self.get_tool_dir("tool-cppcheck"), "cppcheck")
|
||||
|
||||
cmd = [
|
||||
tool_path,
|
||||
"--addon-python=%s" % proc.get_pythonexe_path(),
|
||||
"--error-exitcode=1",
|
||||
"--error-exitcode=3",
|
||||
"--verbose" if self.options.get("verbose") else "--quiet",
|
||||
]
|
||||
|
||||
@ -142,10 +141,11 @@ class CppcheckCheckTool(CheckToolBase):
|
||||
|
||||
build_flags = self.cxx_flags if language == "c++" else self.cc_flags
|
||||
|
||||
for flag in build_flags:
|
||||
if "-std" in flag:
|
||||
# Standards with GNU extensions are not allowed
|
||||
cmd.append("-" + flag.replace("gnu", "c"))
|
||||
if not self.is_flag_set("--std", flags):
|
||||
# Try to guess the standard version from the build flags
|
||||
for flag in build_flags:
|
||||
if "-std" in flag:
|
||||
cmd.append("-" + self.convert_language_standard(flag))
|
||||
|
||||
cmd.extend(
|
||||
["-D%s" % d for d in self.cpp_defines + self.toolchain_defines[language]]
|
||||
@ -157,8 +157,8 @@ class CppcheckCheckTool(CheckToolBase):
|
||||
"--include=" + inc
|
||||
for inc in self.get_forced_includes(build_flags, self.cpp_includes)
|
||||
)
|
||||
cmd.append("--file-list=%s" % self._generate_src_file(src_files))
|
||||
cmd.append("--includes-file=%s" % self._generate_inc_file())
|
||||
cmd.append('"%s"' % src_file)
|
||||
|
||||
return cmd
|
||||
|
||||
@ -201,48 +201,70 @@ class CppcheckCheckTool(CheckToolBase):
|
||||
result = []
|
||||
for inc in self.cpp_includes:
|
||||
if self.options.get("skip_packages") and inc.lower().startswith(
|
||||
self.config.get_optional_dir("packages").lower()
|
||||
self.config.get("platformio", "packages_dir").lower()
|
||||
):
|
||||
continue
|
||||
result.append(inc)
|
||||
return self._create_tmp_file("\n".join(result))
|
||||
|
||||
def clean_up(self):
|
||||
super(CppcheckCheckTool, self).clean_up()
|
||||
super().clean_up()
|
||||
|
||||
# delete temporary dump files generated by addons
|
||||
if not self.is_flag_set("--addon", self.get_flags("cppcheck")):
|
||||
return
|
||||
|
||||
for files in self.get_project_target_files(self.options["patterns"]).values():
|
||||
for files in self.get_project_target_files(
|
||||
self.project_dir, self.options["src_filters"]
|
||||
).values():
|
||||
for f in files:
|
||||
dump_file = f + ".dump"
|
||||
if os.path.isfile(dump_file):
|
||||
os.remove(dump_file)
|
||||
|
||||
@staticmethod
|
||||
def is_check_successful(cmd_result):
|
||||
# Cppcheck is configured to return '3' if a defect is found
|
||||
return cmd_result["returncode"] in (0, 3)
|
||||
|
||||
@staticmethod
|
||||
def convert_language_standard(flag):
|
||||
cpp_standards_map = {
|
||||
"0x": "11",
|
||||
"1y": "14",
|
||||
"1z": "17",
|
||||
"2a": "20",
|
||||
}
|
||||
|
||||
standard = flag[-2:]
|
||||
# Note: GNU extensions are not supported and converted to regular standards
|
||||
return flag.replace("gnu", "c").replace(
|
||||
standard, cpp_standards_map.get(standard, standard)
|
||||
)
|
||||
|
||||
def check(self, on_defect_callback=None):
|
||||
self._on_defect_callback = on_defect_callback
|
||||
project_files = self.get_project_target_files(self.options["patterns"])
|
||||
|
||||
languages = ("c", "c++")
|
||||
if not any([project_files[t] for t in languages]):
|
||||
project_files = self.get_project_target_files(
|
||||
self.project_dir, self.options["src_filters"]
|
||||
)
|
||||
src_files_scope = ("c", "c++")
|
||||
if not any(project_files[t] for t in src_files_scope):
|
||||
click.echo("Error: Nothing to check.")
|
||||
return True
|
||||
for language in languages:
|
||||
if not project_files[language]:
|
||||
continue
|
||||
cmd = self.configure_command(language, project_files[language])
|
||||
if not cmd:
|
||||
self._bad_input = True
|
||||
continue
|
||||
if self.options.get("verbose"):
|
||||
click.echo(" ".join(cmd))
|
||||
|
||||
proc.exec_command(
|
||||
cmd,
|
||||
stdout=proc.LineBufferedAsyncPipe(self.on_tool_output),
|
||||
stderr=proc.LineBufferedAsyncPipe(self.on_tool_output),
|
||||
)
|
||||
for scope, files in project_files.items():
|
||||
if scope not in src_files_scope:
|
||||
continue
|
||||
for src_file in files:
|
||||
cmd = self.configure_command(scope, src_file)
|
||||
if not cmd:
|
||||
self._bad_input = True
|
||||
continue
|
||||
if self.options.get("verbose"):
|
||||
click.echo(" ".join(cmd))
|
||||
|
||||
self.execute_check_cmd(cmd)
|
||||
|
||||
self.clean_up()
|
||||
|
@ -19,39 +19,50 @@ from xml.etree.ElementTree import fromstring
|
||||
|
||||
import click
|
||||
|
||||
from platformio import proc, util
|
||||
from platformio.commands.check.defect import DefectItem
|
||||
from platformio.commands.check.tools.base import CheckToolBase
|
||||
from platformio.package.manager.core import get_core_package_dir
|
||||
from platformio import proc
|
||||
from platformio.check.defect import DefectItem
|
||||
from platformio.check.tools.base import CheckToolBase
|
||||
from platformio.compat import IS_WINDOWS
|
||||
|
||||
|
||||
class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-attributes
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._tmp_dir = tempfile.mkdtemp(prefix="piocheck")
|
||||
self._tmp_preprocessed_file = self._generate_tmp_file_path() + ".i"
|
||||
self._tmp_output_file = self._generate_tmp_file_path() + ".pvs"
|
||||
self._tmp_cfg_file = self._generate_tmp_file_path() + ".cfg"
|
||||
self._tmp_cmd_file = self._generate_tmp_file_path() + ".cmd"
|
||||
self.tool_path = os.path.join(
|
||||
get_core_package_dir("tool-pvs-studio"),
|
||||
"x64" if "windows" in util.get_systype() else "bin",
|
||||
self.get_tool_dir("tool-pvs-studio"),
|
||||
"x64" if IS_WINDOWS else "bin",
|
||||
"pvs-studio",
|
||||
)
|
||||
super(PvsStudioCheckTool, self).__init__(*args, **kwargs)
|
||||
|
||||
with open(self._tmp_cfg_file, "w") as fp:
|
||||
with open(self._tmp_cfg_file, mode="w", encoding="utf8") as fp:
|
||||
fp.write(
|
||||
"exclude-path = "
|
||||
+ self.config.get_optional_dir("packages").replace("\\", "/")
|
||||
+ self.config.get("platformio", "packages_dir").replace("\\", "/")
|
||||
)
|
||||
|
||||
with open(self._tmp_cmd_file, "w") as fp:
|
||||
with open(self._tmp_cmd_file, mode="w", encoding="utf8") as fp:
|
||||
fp.write(
|
||||
" ".join(
|
||||
['-I"%s"' % inc.replace("\\", "/") for inc in self.cpp_includes]
|
||||
)
|
||||
)
|
||||
|
||||
def tool_output_filter(self, line): # pylint: disable=arguments-differ
|
||||
if any(
|
||||
err_msg in line.lower()
|
||||
for err_msg in (
|
||||
"license was not entered",
|
||||
"license information is incorrect",
|
||||
)
|
||||
):
|
||||
self._bad_input = True
|
||||
return line
|
||||
|
||||
def _process_defects(self, defects):
|
||||
for defect in defects:
|
||||
if not isinstance(defect, DefectItem):
|
||||
@ -64,10 +75,8 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at
|
||||
|
||||
def _demangle_report(self, output_file):
|
||||
converter_tool = os.path.join(
|
||||
get_core_package_dir("tool-pvs-studio"),
|
||||
"HtmlGenerator"
|
||||
if "windows" in util.get_systype()
|
||||
else os.path.join("bin", "plog-converter"),
|
||||
self.get_tool_dir("tool-pvs-studio"),
|
||||
"HtmlGenerator" if IS_WINDOWS else os.path.join("bin", "plog-converter"),
|
||||
)
|
||||
|
||||
cmd = (
|
||||
@ -182,9 +191,15 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at
|
||||
flags = self.cc_flags
|
||||
compiler = self.cc_path
|
||||
|
||||
cmd = [compiler, src_file, "-E", "-o", self._tmp_preprocessed_file]
|
||||
cmd = [
|
||||
compiler,
|
||||
'"%s"' % src_file,
|
||||
"-E",
|
||||
"-o",
|
||||
'"%s"' % self._tmp_preprocessed_file,
|
||||
]
|
||||
cmd.extend([f for f in flags if f])
|
||||
cmd.extend(["-D%s" % d for d in self.cpp_defines])
|
||||
cmd.extend(['"-D%s"' % d.replace('"', '\\"') for d in self.cpp_defines])
|
||||
cmd.append('@"%s"' % self._tmp_cmd_file)
|
||||
|
||||
# Explicitly specify C++ as the language used in .ino files
|
||||
@ -199,14 +214,20 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at
|
||||
self._bad_input = True
|
||||
|
||||
def clean_up(self):
|
||||
super(PvsStudioCheckTool, self).clean_up()
|
||||
super().clean_up()
|
||||
if os.path.isdir(self._tmp_dir):
|
||||
shutil.rmtree(self._tmp_dir)
|
||||
|
||||
@staticmethod
|
||||
def is_check_successful(cmd_result):
|
||||
return (
|
||||
"license" not in cmd_result["err"].lower() and cmd_result["returncode"] == 0
|
||||
)
|
||||
|
||||
def check(self, on_defect_callback=None):
|
||||
self._on_defect_callback = on_defect_callback
|
||||
for scope, files in self.get_project_target_files(
|
||||
self.options["patterns"]
|
||||
self.project_dir, self.options["src_filters"]
|
||||
).items():
|
||||
if scope not in ("c", "c++"):
|
||||
continue
|
||||
@ -219,11 +240,8 @@ class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-at
|
||||
self._bad_input = True
|
||||
continue
|
||||
|
||||
result = proc.exec_command(cmd)
|
||||
# pylint: disable=unsupported-membership-test
|
||||
if result["returncode"] != 0 or "license" in result["err"].lower():
|
||||
self._bad_input = True
|
||||
click.echo(result["err"])
|
||||
result = self.execute_check_cmd(cmd)
|
||||
if result["returncode"] != 0:
|
||||
continue
|
||||
|
||||
self._process_defects(self.parse_defects(self._tmp_output_file))
|
110
platformio/cli.py
Normal file
110
platformio/cli.py
Normal file
@ -0,0 +1,110 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import importlib
|
||||
from pathlib import Path
|
||||
|
||||
import click
|
||||
|
||||
|
||||
class PlatformioCLI(click.MultiCommand):
|
||||
leftover_args = []
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._pio_root_path = Path(__file__).parent
|
||||
self._pio_cmd_aliases = dict(package="pkg")
|
||||
|
||||
def _find_pio_commands(self):
|
||||
def _to_module_path(p):
|
||||
return (
|
||||
"platformio." + ".".join(p.relative_to(self._pio_root_path).parts)[:-3]
|
||||
)
|
||||
|
||||
result = {}
|
||||
for p in self._pio_root_path.rglob("cli.py"):
|
||||
# skip this module
|
||||
if p.parent == self._pio_root_path:
|
||||
continue
|
||||
cmd_name = p.parent.name
|
||||
result[self._pio_cmd_aliases.get(cmd_name, cmd_name)] = _to_module_path(p)
|
||||
|
||||
# find legacy commands
|
||||
for p in (self._pio_root_path / "commands").iterdir():
|
||||
if p.name.startswith("_"):
|
||||
continue
|
||||
if (p / "command.py").is_file():
|
||||
result[p.name] = _to_module_path(p / "command.py")
|
||||
elif p.name.endswith(".py"):
|
||||
result[p.name[:-3]] = _to_module_path(p)
|
||||
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def in_silence():
|
||||
args = PlatformioCLI.leftover_args
|
||||
return args and any(
|
||||
[
|
||||
args[0] == "debug" and "--interpreter" in " ".join(args),
|
||||
args[0] == "upgrade",
|
||||
"--json-output" in args,
|
||||
"--version" in args,
|
||||
]
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def reveal_cmd_path_args(cls, ctx):
|
||||
result = []
|
||||
group = ctx.command
|
||||
args = cls.leftover_args[::]
|
||||
while args:
|
||||
cmd_name = args.pop(0)
|
||||
next_group = group.get_command(ctx, cmd_name)
|
||||
if next_group:
|
||||
group = next_group
|
||||
result.append(cmd_name)
|
||||
if not hasattr(group, "get_command"):
|
||||
break
|
||||
return result
|
||||
|
||||
def invoke(self, ctx):
|
||||
PlatformioCLI.leftover_args = ctx.args
|
||||
if hasattr(ctx, "protected_args"):
|
||||
PlatformioCLI.leftover_args = ctx.protected_args + ctx.args
|
||||
return super().invoke(ctx)
|
||||
|
||||
def list_commands(self, ctx):
|
||||
return sorted(list(self._find_pio_commands()))
|
||||
|
||||
def get_command(self, ctx, cmd_name):
|
||||
commands = self._find_pio_commands()
|
||||
if cmd_name not in commands:
|
||||
return self._handle_obsolate_command(ctx, cmd_name)
|
||||
module = importlib.import_module(commands[cmd_name])
|
||||
return getattr(module, "cli")
|
||||
|
||||
@staticmethod
|
||||
def _handle_obsolate_command(ctx, cmd_name):
|
||||
# pylint: disable=import-outside-toplevel
|
||||
if cmd_name == "init":
|
||||
from platformio.project.commands.init import project_init_cmd
|
||||
|
||||
return project_init_cmd
|
||||
|
||||
if cmd_name == "package":
|
||||
from platformio.package.cli import cli
|
||||
|
||||
return cli
|
||||
|
||||
raise click.UsageError('No such command "%s"' % cmd_name, ctx)
|
@ -1,205 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import math
|
||||
import os
|
||||
import socket
|
||||
|
||||
import requests.adapters
|
||||
from requests.packages.urllib3.util.retry import Retry # pylint:disable=import-error
|
||||
|
||||
from platformio import __check_internet_hosts__, __default_requests_timeout__, app, util
|
||||
from platformio.cache import ContentCache
|
||||
from platformio.exception import PlatformioException, UserSideException
|
||||
|
||||
try:
|
||||
from urllib.parse import urljoin
|
||||
except ImportError:
|
||||
from urlparse import urljoin
|
||||
|
||||
|
||||
class HTTPClientError(PlatformioException):
|
||||
def __init__(self, message, response=None):
|
||||
super(HTTPClientError, self).__init__()
|
||||
self.message = message
|
||||
self.response = response
|
||||
|
||||
def __str__(self): # pragma: no cover
|
||||
return self.message
|
||||
|
||||
|
||||
class InternetIsOffline(UserSideException):
|
||||
|
||||
MESSAGE = (
|
||||
"You are not connected to the Internet.\n"
|
||||
"PlatformIO needs the Internet connection to"
|
||||
" download dependent packages or to work with PlatformIO Account."
|
||||
)
|
||||
|
||||
|
||||
class EndpointSession(requests.Session):
|
||||
def __init__(self, base_url, *args, **kwargs):
|
||||
super(EndpointSession, self).__init__(*args, **kwargs)
|
||||
self.base_url = base_url
|
||||
|
||||
def request( # pylint: disable=signature-differs,arguments-differ
|
||||
self, method, url, *args, **kwargs
|
||||
):
|
||||
# print(self.base_url, method, url, args, kwargs)
|
||||
return super(EndpointSession, self).request(
|
||||
method, urljoin(self.base_url, url), *args, **kwargs
|
||||
)
|
||||
|
||||
|
||||
class EndpointSessionIterator(object):
|
||||
def __init__(self, endpoints):
|
||||
if not isinstance(endpoints, list):
|
||||
endpoints = [endpoints]
|
||||
self.endpoints = endpoints
|
||||
self.endpoints_iter = iter(endpoints)
|
||||
self.retry = Retry(
|
||||
total=math.ceil(6 / len(self.endpoints)),
|
||||
backoff_factor=1,
|
||||
# method_whitelist=list(Retry.DEFAULT_METHOD_WHITELIST) + ["POST"],
|
||||
status_forcelist=[413, 429, 500, 502, 503, 504],
|
||||
)
|
||||
|
||||
def __iter__(self): # pylint: disable=non-iterator-returned
|
||||
return self
|
||||
|
||||
def next(self):
|
||||
""" For Python 2 compatibility """
|
||||
return self.__next__()
|
||||
|
||||
def __next__(self):
|
||||
base_url = next(self.endpoints_iter)
|
||||
session = EndpointSession(base_url)
|
||||
session.headers.update({"User-Agent": app.get_user_agent()})
|
||||
adapter = requests.adapters.HTTPAdapter(max_retries=self.retry)
|
||||
session.mount(base_url, adapter)
|
||||
return session
|
||||
|
||||
|
||||
class HTTPClient(object):
|
||||
def __init__(self, endpoints):
|
||||
self._session_iter = EndpointSessionIterator(endpoints)
|
||||
self._session = None
|
||||
self._next_session()
|
||||
|
||||
def __del__(self):
|
||||
if not self._session:
|
||||
return
|
||||
self._session.close()
|
||||
self._session = None
|
||||
|
||||
def _next_session(self):
|
||||
if self._session:
|
||||
self._session.close()
|
||||
self._session = next(self._session_iter)
|
||||
|
||||
@util.throttle(500)
|
||||
def send_request(self, method, path, **kwargs):
|
||||
# check Internet before and resolve issue with 60 seconds timeout
|
||||
ensure_internet_on(raise_exception=True)
|
||||
|
||||
# set default timeout
|
||||
if "timeout" not in kwargs:
|
||||
kwargs["timeout"] = __default_requests_timeout__
|
||||
|
||||
while True:
|
||||
try:
|
||||
return getattr(self._session, method)(path, **kwargs)
|
||||
except (
|
||||
requests.exceptions.ConnectionError,
|
||||
requests.exceptions.Timeout,
|
||||
) as e:
|
||||
try:
|
||||
self._next_session()
|
||||
except: # pylint: disable=bare-except
|
||||
raise HTTPClientError(str(e))
|
||||
|
||||
def fetch_json_data(self, method, path, **kwargs):
|
||||
cache_valid = kwargs.pop("cache_valid") if "cache_valid" in kwargs else None
|
||||
if not cache_valid:
|
||||
return self._parse_json_response(self.send_request(method, path, **kwargs))
|
||||
cache_key = ContentCache.key_from_args(
|
||||
method, path, kwargs.get("params"), kwargs.get("data")
|
||||
)
|
||||
with ContentCache("http") as cc:
|
||||
result = cc.get(cache_key)
|
||||
if result is not None:
|
||||
return json.loads(result)
|
||||
response = self.send_request(method, path, **kwargs)
|
||||
data = self._parse_json_response(response)
|
||||
cc.set(cache_key, response.text, cache_valid)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def _parse_json_response(response, expected_codes=(200, 201, 202)):
|
||||
if response.status_code in expected_codes:
|
||||
try:
|
||||
return response.json()
|
||||
except ValueError:
|
||||
pass
|
||||
try:
|
||||
message = response.json()["message"]
|
||||
except (KeyError, ValueError):
|
||||
message = response.text
|
||||
raise HTTPClientError(message, response)
|
||||
|
||||
|
||||
#
|
||||
# Helpers
|
||||
#
|
||||
|
||||
|
||||
@util.memoized(expire="10s")
|
||||
def _internet_on():
|
||||
timeout = 2
|
||||
socket.setdefaulttimeout(timeout)
|
||||
for host in __check_internet_hosts__:
|
||||
try:
|
||||
for var in ("HTTP_PROXY", "HTTPS_PROXY"):
|
||||
if not os.getenv(var) and not os.getenv(var.lower()):
|
||||
continue
|
||||
requests.get("http://%s" % host, allow_redirects=False, timeout=timeout)
|
||||
return True
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
s.connect((host, 80))
|
||||
s.close()
|
||||
return True
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
def ensure_internet_on(raise_exception=False):
|
||||
result = _internet_on()
|
||||
if raise_exception and not result:
|
||||
raise InternetIsOffline()
|
||||
return result
|
||||
|
||||
|
||||
def fetch_remote_content(*args, **kwargs):
|
||||
kwargs["headers"] = kwargs.get("headers", {})
|
||||
if "User-Agent" not in kwargs["headers"]:
|
||||
kwargs["headers"]["User-Agent"] = app.get_user_agent()
|
||||
|
||||
if "timeout" not in kwargs:
|
||||
kwargs["timeout"] = __default_requests_timeout__
|
||||
|
||||
r = requests.get(*args, **kwargs)
|
||||
r.raise_for_status()
|
||||
return r.text
|
@ -1,147 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from platformio import __registry_api__, fs
|
||||
from platformio.clients.account import AccountClient
|
||||
from platformio.clients.http import HTTPClient, HTTPClientError
|
||||
from platformio.package.meta import PackageType
|
||||
|
||||
# pylint: disable=too-many-arguments
|
||||
|
||||
|
||||
class RegistryClient(HTTPClient):
|
||||
def __init__(self):
|
||||
super(RegistryClient, self).__init__(__registry_api__)
|
||||
|
||||
def send_auth_request(self, *args, **kwargs):
|
||||
headers = kwargs.get("headers", {})
|
||||
if "Authorization" not in headers:
|
||||
token = AccountClient().fetch_authentication_token()
|
||||
headers["Authorization"] = "Bearer %s" % token
|
||||
kwargs["headers"] = headers
|
||||
return self.fetch_json_data(*args, **kwargs)
|
||||
|
||||
def publish_package(
|
||||
self, archive_path, owner=None, released_at=None, private=False, notify=True
|
||||
):
|
||||
account = AccountClient()
|
||||
if not owner:
|
||||
owner = (
|
||||
account.get_account_info(offline=True).get("profile").get("username")
|
||||
)
|
||||
with open(archive_path, "rb") as fp:
|
||||
return self.send_auth_request(
|
||||
"post",
|
||||
"/v3/packages/%s/%s" % (owner, PackageType.from_archive(archive_path)),
|
||||
params={
|
||||
"private": 1 if private else 0,
|
||||
"notify": 1 if notify else 0,
|
||||
"released_at": released_at,
|
||||
},
|
||||
headers={
|
||||
"Content-Type": "application/octet-stream",
|
||||
"X-PIO-Content-SHA256": fs.calculate_file_hashsum(
|
||||
"sha256", archive_path
|
||||
),
|
||||
},
|
||||
data=fp,
|
||||
)
|
||||
|
||||
def unpublish_package( # pylint: disable=redefined-builtin
|
||||
self, type, name, owner=None, version=None, undo=False
|
||||
):
|
||||
account = AccountClient()
|
||||
if not owner:
|
||||
owner = (
|
||||
account.get_account_info(offline=True).get("profile").get("username")
|
||||
)
|
||||
path = "/v3/packages/%s/%s/%s" % (owner, type, name)
|
||||
if version:
|
||||
path += "/" + version
|
||||
return self.send_auth_request(
|
||||
"delete",
|
||||
path,
|
||||
params={"undo": 1 if undo else 0},
|
||||
)
|
||||
|
||||
def update_resource(self, urn, private):
|
||||
return self.send_auth_request(
|
||||
"put",
|
||||
"/v3/resources/%s" % urn,
|
||||
data={"private": int(private)},
|
||||
)
|
||||
|
||||
def grant_access_for_resource(self, urn, client, level):
|
||||
return self.send_auth_request(
|
||||
"put",
|
||||
"/v3/resources/%s/access" % urn,
|
||||
data={"client": client, "level": level},
|
||||
)
|
||||
|
||||
def revoke_access_from_resource(self, urn, client):
|
||||
return self.send_auth_request(
|
||||
"delete",
|
||||
"/v3/resources/%s/access" % urn,
|
||||
data={"client": client},
|
||||
)
|
||||
|
||||
def list_resources(self, owner):
|
||||
return self.send_auth_request(
|
||||
"get", "/v3/resources", params={"owner": owner} if owner else None
|
||||
)
|
||||
|
||||
def list_packages(self, query=None, filters=None, page=None):
|
||||
assert query or filters
|
||||
search_query = []
|
||||
if filters:
|
||||
valid_filters = (
|
||||
"authors",
|
||||
"keywords",
|
||||
"frameworks",
|
||||
"platforms",
|
||||
"headers",
|
||||
"ids",
|
||||
"names",
|
||||
"owners",
|
||||
"types",
|
||||
)
|
||||
assert set(filters.keys()) <= set(valid_filters)
|
||||
for name, values in filters.items():
|
||||
for value in set(
|
||||
values if isinstance(values, (list, tuple)) else [values]
|
||||
):
|
||||
search_query.append('%s:"%s"' % (name[:-1], value))
|
||||
if query:
|
||||
search_query.append(query)
|
||||
params = dict(query=" ".join(search_query))
|
||||
if page:
|
||||
params["page"] = int(page)
|
||||
return self.fetch_json_data(
|
||||
"get", "/v3/packages", params=params, cache_valid="1h"
|
||||
)
|
||||
|
||||
def get_package(self, type_, owner, name, version=None):
|
||||
try:
|
||||
return self.fetch_json_data(
|
||||
"get",
|
||||
"/v3/packages/{owner}/{type}/{name}".format(
|
||||
type=type_, owner=owner.lower(), name=name.lower()
|
||||
),
|
||||
params=dict(version=version) if version else None,
|
||||
cache_valid="1h",
|
||||
)
|
||||
except HTTPClientError as e:
|
||||
if e.response is not None and e.response.status_code == 404:
|
||||
return None
|
||||
raise e
|
@ -11,70 +11,3 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
|
||||
import click
|
||||
|
||||
|
||||
class PlatformioCLI(click.MultiCommand):
|
||||
|
||||
leftover_args = []
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(PlatformioCLI, self).__init__(*args, **kwargs)
|
||||
self._pio_cmds_dir = os.path.dirname(__file__)
|
||||
|
||||
@staticmethod
|
||||
def in_silence():
|
||||
args = PlatformioCLI.leftover_args
|
||||
return args and any(
|
||||
[
|
||||
args[0] == "debug" and "--interpreter" in " ".join(args),
|
||||
args[0] == "upgrade",
|
||||
"--json-output" in args,
|
||||
"--version" in args,
|
||||
]
|
||||
)
|
||||
|
||||
def invoke(self, ctx):
|
||||
PlatformioCLI.leftover_args = ctx.args
|
||||
if hasattr(ctx, "protected_args"):
|
||||
PlatformioCLI.leftover_args = ctx.protected_args + ctx.args
|
||||
return super(PlatformioCLI, self).invoke(ctx)
|
||||
|
||||
def list_commands(self, ctx):
|
||||
cmds = []
|
||||
for cmd_name in os.listdir(self._pio_cmds_dir):
|
||||
if cmd_name.startswith("__init__"):
|
||||
continue
|
||||
if os.path.isfile(os.path.join(self._pio_cmds_dir, cmd_name, "command.py")):
|
||||
cmds.append(cmd_name)
|
||||
elif cmd_name.endswith(".py"):
|
||||
cmds.append(cmd_name[:-3])
|
||||
cmds.sort()
|
||||
return cmds
|
||||
|
||||
def get_command(self, ctx, cmd_name):
|
||||
mod = None
|
||||
try:
|
||||
mod_path = "platformio.commands." + cmd_name
|
||||
if os.path.isfile(os.path.join(self._pio_cmds_dir, cmd_name, "command.py")):
|
||||
mod_path = "platformio.commands.%s.command" % cmd_name
|
||||
mod = __import__(mod_path, None, None, ["cli"])
|
||||
except ImportError:
|
||||
try:
|
||||
return self._handle_obsolate_command(cmd_name)
|
||||
except AttributeError:
|
||||
pass
|
||||
raise click.UsageError('No such command "%s"' % cmd_name, ctx)
|
||||
return mod.cli
|
||||
|
||||
@staticmethod
|
||||
def _handle_obsolate_command(name):
|
||||
# pylint: disable=import-outside-toplevel
|
||||
if name == "init":
|
||||
from platformio.commands.project import project_init
|
||||
|
||||
return project_init
|
||||
raise AttributeError()
|
||||
|
@ -1,146 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
|
||||
import json
|
||||
import re
|
||||
|
||||
import click
|
||||
from tabulate import tabulate
|
||||
|
||||
from platformio.clients.registry import RegistryClient
|
||||
from platformio.commands.account import validate_username
|
||||
from platformio.commands.team import validate_orgname_teamname
|
||||
|
||||
|
||||
def validate_client(value):
|
||||
if ":" in value:
|
||||
validate_orgname_teamname(value)
|
||||
else:
|
||||
validate_username(value)
|
||||
return value
|
||||
|
||||
|
||||
@click.group("access", short_help="Manage resource access")
|
||||
def cli():
|
||||
pass
|
||||
|
||||
|
||||
def validate_urn(value):
|
||||
value = str(value).strip()
|
||||
if not re.match(r"^prn:reg:pkg:(\d+):(\w+)$", value, flags=re.I):
|
||||
raise click.BadParameter("Invalid URN format.")
|
||||
return value
|
||||
|
||||
|
||||
@cli.command("public", short_help="Make resource public")
|
||||
@click.argument(
|
||||
"urn",
|
||||
callback=lambda _, __, value: validate_urn(value),
|
||||
)
|
||||
@click.option("--urn-type", type=click.Choice(["prn:reg:pkg"]), default="prn:reg:pkg")
|
||||
def access_public(urn, urn_type):
|
||||
client = RegistryClient()
|
||||
client.update_resource(urn=urn, private=0)
|
||||
return click.secho(
|
||||
"The resource %s has been successfully updated." % urn,
|
||||
fg="green",
|
||||
)
|
||||
|
||||
|
||||
@cli.command("private", short_help="Make resource private")
|
||||
@click.argument(
|
||||
"urn",
|
||||
callback=lambda _, __, value: validate_urn(value),
|
||||
)
|
||||
@click.option("--urn-type", type=click.Choice(["prn:reg:pkg"]), default="prn:reg:pkg")
|
||||
def access_private(urn, urn_type):
|
||||
client = RegistryClient()
|
||||
client.update_resource(urn=urn, private=1)
|
||||
return click.secho(
|
||||
"The resource %s has been successfully updated." % urn,
|
||||
fg="green",
|
||||
)
|
||||
|
||||
|
||||
@cli.command("grant", short_help="Grant access")
|
||||
@click.argument("level", type=click.Choice(["admin", "maintainer", "guest"]))
|
||||
@click.argument(
|
||||
"client",
|
||||
metavar="[<ORGNAME:TEAMNAME>|<USERNAME>]",
|
||||
callback=lambda _, __, value: validate_client(value),
|
||||
)
|
||||
@click.argument(
|
||||
"urn",
|
||||
callback=lambda _, __, value: validate_urn(value),
|
||||
)
|
||||
@click.option("--urn-type", type=click.Choice(["prn:reg:pkg"]), default="prn:reg:pkg")
|
||||
def access_grant(level, client, urn, urn_type):
|
||||
reg_client = RegistryClient()
|
||||
reg_client.grant_access_for_resource(urn=urn, client=client, level=level)
|
||||
return click.secho(
|
||||
"Access for resource %s has been granted for %s" % (urn, client),
|
||||
fg="green",
|
||||
)
|
||||
|
||||
|
||||
@cli.command("revoke", short_help="Revoke access")
|
||||
@click.argument(
|
||||
"client",
|
||||
metavar="[ORGNAME:TEAMNAME|USERNAME]",
|
||||
callback=lambda _, __, value: validate_client(value),
|
||||
)
|
||||
@click.argument(
|
||||
"urn",
|
||||
callback=lambda _, __, value: validate_urn(value),
|
||||
)
|
||||
@click.option("--urn-type", type=click.Choice(["prn:reg:pkg"]), default="prn:reg:pkg")
|
||||
def access_revoke(client, urn, urn_type):
|
||||
reg_client = RegistryClient()
|
||||
reg_client.revoke_access_from_resource(urn=urn, client=client)
|
||||
return click.secho(
|
||||
"Access for resource %s has been revoked for %s" % (urn, client),
|
||||
fg="green",
|
||||
)
|
||||
|
||||
|
||||
@cli.command("list", short_help="List published resources")
|
||||
@click.argument("owner", required=False)
|
||||
@click.option("--urn-type", type=click.Choice(["prn:reg:pkg"]), default="prn:reg:pkg")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def access_list(owner, urn_type, json_output):
|
||||
reg_client = RegistryClient()
|
||||
resources = reg_client.list_resources(owner=owner)
|
||||
if json_output:
|
||||
return click.echo(json.dumps(resources))
|
||||
if not resources:
|
||||
return click.secho("You do not have any resources.", fg="yellow")
|
||||
for resource in resources:
|
||||
click.echo()
|
||||
click.secho(resource.get("name"), fg="cyan")
|
||||
click.echo("-" * len(resource.get("name")))
|
||||
table_data = []
|
||||
table_data.append(("URN:", resource.get("urn")))
|
||||
table_data.append(("Owner:", resource.get("owner")))
|
||||
table_data.append(
|
||||
(
|
||||
"Access level(s):",
|
||||
", ".join(
|
||||
(level.capitalize() for level in resource.get("access_levels"))
|
||||
),
|
||||
)
|
||||
)
|
||||
click.echo(tabulate(table_data, tablefmt="plain"))
|
||||
return click.echo()
|
@ -1,299 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import re
|
||||
|
||||
import click
|
||||
from tabulate import tabulate
|
||||
|
||||
from platformio.clients.account import AccountClient, AccountNotAuthorized
|
||||
|
||||
|
||||
@click.group("account", short_help="Manage PlatformIO account")
|
||||
def cli():
|
||||
pass
|
||||
|
||||
|
||||
def validate_username(value, field="username"):
|
||||
value = str(value).strip()
|
||||
if not re.match(r"^[a-z\d](?:[a-z\d]|-(?=[a-z\d])){0,37}$", value, flags=re.I):
|
||||
raise click.BadParameter(
|
||||
"Invalid %s format. "
|
||||
"%s must contain only alphanumeric characters "
|
||||
"or single hyphens, cannot begin or end with a hyphen, "
|
||||
"and must not be longer than 38 characters."
|
||||
% (field.lower(), field.capitalize())
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
def validate_email(value):
|
||||
value = str(value).strip()
|
||||
if not re.match(r"^[a-z\d_.+-]+@[a-z\d\-]+\.[a-z\d\-.]+$", value, flags=re.I):
|
||||
raise click.BadParameter("Invalid email address")
|
||||
return value
|
||||
|
||||
|
||||
def validate_password(value):
|
||||
value = str(value).strip()
|
||||
if not re.match(r"^(?=.*[a-z])(?=.*\d).{8,}$", value):
|
||||
raise click.BadParameter(
|
||||
"Invalid password format. "
|
||||
"Password must contain at least 8 characters"
|
||||
" including a number and a lowercase letter"
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
@cli.command("register", short_help="Create new PlatformIO Account")
|
||||
@click.option(
|
||||
"-u",
|
||||
"--username",
|
||||
prompt=True,
|
||||
callback=lambda _, __, value: validate_username(value),
|
||||
)
|
||||
@click.option(
|
||||
"-e", "--email", prompt=True, callback=lambda _, __, value: validate_email(value)
|
||||
)
|
||||
@click.option(
|
||||
"-p",
|
||||
"--password",
|
||||
prompt=True,
|
||||
hide_input=True,
|
||||
confirmation_prompt=True,
|
||||
callback=lambda _, __, value: validate_password(value),
|
||||
)
|
||||
@click.option("--firstname", prompt=True)
|
||||
@click.option("--lastname", prompt=True)
|
||||
def account_register(username, email, password, firstname, lastname):
|
||||
client = AccountClient()
|
||||
client.registration(username, email, password, firstname, lastname)
|
||||
return click.secho(
|
||||
"An account has been successfully created. "
|
||||
"Please check your mail to activate your account and verify your email address.",
|
||||
fg="green",
|
||||
)
|
||||
|
||||
|
||||
@cli.command("login", short_help="Log in to PlatformIO Account")
|
||||
@click.option("-u", "--username", prompt="Username or email")
|
||||
@click.option("-p", "--password", prompt=True, hide_input=True)
|
||||
def account_login(username, password):
|
||||
client = AccountClient()
|
||||
client.login(username, password)
|
||||
return click.secho("Successfully logged in!", fg="green")
|
||||
|
||||
|
||||
@cli.command("logout", short_help="Log out of PlatformIO Account")
|
||||
def account_logout():
|
||||
client = AccountClient()
|
||||
client.logout()
|
||||
return click.secho("Successfully logged out!", fg="green")
|
||||
|
||||
|
||||
@cli.command("password", short_help="Change password")
|
||||
@click.option("--old-password", prompt=True, hide_input=True)
|
||||
@click.option("--new-password", prompt=True, hide_input=True, confirmation_prompt=True)
|
||||
def account_password(old_password, new_password):
|
||||
client = AccountClient()
|
||||
client.change_password(old_password, new_password)
|
||||
return click.secho("Password successfully changed!", fg="green")
|
||||
|
||||
|
||||
@cli.command("token", short_help="Get or regenerate Authentication Token")
|
||||
@click.option("-p", "--password", prompt=True, hide_input=True)
|
||||
@click.option("--regenerate", is_flag=True)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def account_token(password, regenerate, json_output):
|
||||
client = AccountClient()
|
||||
auth_token = client.auth_token(password, regenerate)
|
||||
if json_output:
|
||||
return click.echo(json.dumps({"status": "success", "result": auth_token}))
|
||||
return click.secho("Personal Authentication Token: %s" % auth_token, fg="green")
|
||||
|
||||
|
||||
@cli.command("forgot", short_help="Forgot password")
|
||||
@click.option("--username", prompt="Username or email")
|
||||
def account_forgot(username):
|
||||
client = AccountClient()
|
||||
client.forgot_password(username)
|
||||
return click.secho(
|
||||
"If this account is registered, we will send the "
|
||||
"further instructions to your email.",
|
||||
fg="green",
|
||||
)
|
||||
|
||||
|
||||
@cli.command("update", short_help="Update profile information")
|
||||
@click.option("--current-password", prompt=True, hide_input=True)
|
||||
@click.option("--username")
|
||||
@click.option("--email")
|
||||
@click.option("--firstname")
|
||||
@click.option("--lastname")
|
||||
def account_update(current_password, **kwargs):
|
||||
client = AccountClient()
|
||||
profile = client.get_profile()
|
||||
new_profile = profile.copy()
|
||||
if not any(kwargs.values()):
|
||||
for field in profile:
|
||||
new_profile[field] = click.prompt(
|
||||
field.replace("_", " ").capitalize(), default=profile[field]
|
||||
)
|
||||
if field == "email":
|
||||
validate_email(new_profile[field])
|
||||
if field == "username":
|
||||
validate_username(new_profile[field])
|
||||
else:
|
||||
new_profile.update({key: value for key, value in kwargs.items() if value})
|
||||
client.update_profile(new_profile, current_password)
|
||||
click.secho("Profile successfully updated!", fg="green")
|
||||
username_changed = new_profile["username"] != profile["username"]
|
||||
email_changed = new_profile["email"] != profile["email"]
|
||||
if not username_changed and not email_changed:
|
||||
return None
|
||||
try:
|
||||
client.logout()
|
||||
except AccountNotAuthorized:
|
||||
pass
|
||||
if email_changed:
|
||||
return click.secho(
|
||||
"Please check your mail to verify your new email address and re-login. ",
|
||||
fg="yellow",
|
||||
)
|
||||
return click.secho("Please re-login.", fg="yellow")
|
||||
|
||||
|
||||
@cli.command("destroy", short_help="Destroy account")
|
||||
def account_destroy():
|
||||
client = AccountClient()
|
||||
click.confirm(
|
||||
"Are you sure you want to delete the %s user account?\n"
|
||||
"Warning! All linked data will be permanently removed and can not be restored."
|
||||
% client.get_account_info().get("profile").get("username"),
|
||||
abort=True,
|
||||
)
|
||||
client.destroy_account()
|
||||
try:
|
||||
client.logout()
|
||||
except AccountNotAuthorized:
|
||||
pass
|
||||
return click.secho(
|
||||
"User account has been destroyed.",
|
||||
fg="green",
|
||||
)
|
||||
|
||||
|
||||
@cli.command("show", short_help="PlatformIO Account information")
|
||||
@click.option("--offline", is_flag=True)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def account_show(offline, json_output):
|
||||
client = AccountClient()
|
||||
info = client.get_account_info(offline)
|
||||
if json_output:
|
||||
return click.echo(json.dumps(info))
|
||||
click.echo()
|
||||
if info.get("profile"):
|
||||
print_profile(info["profile"])
|
||||
if info.get("packages"):
|
||||
print_packages(info["packages"])
|
||||
if info.get("subscriptions"):
|
||||
print_subscriptions(info["subscriptions"])
|
||||
return click.echo()
|
||||
|
||||
|
||||
def print_profile(profile):
|
||||
click.secho("Profile", fg="cyan", bold=True)
|
||||
click.echo("=" * len("Profile"))
|
||||
data = []
|
||||
if profile.get("username"):
|
||||
data.append(("Username:", profile["username"]))
|
||||
if profile.get("email"):
|
||||
data.append(("Email:", profile["email"]))
|
||||
if profile.get("firstname"):
|
||||
data.append(("First name:", profile["firstname"]))
|
||||
if profile.get("lastname"):
|
||||
data.append(("Last name:", profile["lastname"]))
|
||||
click.echo(tabulate(data, tablefmt="plain"))
|
||||
|
||||
|
||||
def print_packages(packages):
|
||||
click.echo()
|
||||
click.secho("Packages", fg="cyan")
|
||||
click.echo("=" * len("Packages"))
|
||||
for package in packages:
|
||||
click.echo()
|
||||
click.secho(package.get("name"), bold=True)
|
||||
click.echo("-" * len(package.get("name")))
|
||||
if package.get("description"):
|
||||
click.echo(package.get("description"))
|
||||
data = []
|
||||
expire = "-"
|
||||
if "subscription" in package:
|
||||
expire = datetime.datetime.strptime(
|
||||
(
|
||||
package["subscription"].get("end_at")
|
||||
or package["subscription"].get("next_bill_at")
|
||||
),
|
||||
"%Y-%m-%dT%H:%M:%SZ",
|
||||
).strftime("%Y-%m-%d")
|
||||
data.append(("Expire:", expire))
|
||||
services = []
|
||||
for key in package:
|
||||
if not key.startswith("service."):
|
||||
continue
|
||||
if isinstance(package[key], dict):
|
||||
services.append(package[key].get("title"))
|
||||
else:
|
||||
services.append(package[key])
|
||||
if services:
|
||||
data.append(("Services:", ", ".join(services)))
|
||||
click.echo(tabulate(data, tablefmt="plain"))
|
||||
|
||||
|
||||
def print_subscriptions(subscriptions):
|
||||
click.echo()
|
||||
click.secho("Subscriptions", fg="cyan")
|
||||
click.echo("=" * len("Subscriptions"))
|
||||
for subscription in subscriptions:
|
||||
click.echo()
|
||||
click.secho(subscription.get("product_name"), bold=True)
|
||||
click.echo("-" * len(subscription.get("product_name")))
|
||||
data = [("State:", subscription.get("status"))]
|
||||
begin_at = datetime.datetime.strptime(
|
||||
subscription.get("begin_at"), "%Y-%m-%dT%H:%M:%SZ"
|
||||
).strftime("%Y-%m-%d %H:%M:%S")
|
||||
data.append(("Start date:", begin_at or "-"))
|
||||
end_at = subscription.get("end_at")
|
||||
if end_at:
|
||||
end_at = datetime.datetime.strptime(
|
||||
subscription.get("end_at"), "%Y-%m-%dT%H:%M:%SZ"
|
||||
).strftime("%Y-%m-%d %H:%M:%S")
|
||||
data.append(("End date:", end_at or "-"))
|
||||
next_bill_at = subscription.get("next_bill_at")
|
||||
if next_bill_at:
|
||||
next_bill_at = datetime.datetime.strptime(
|
||||
subscription.get("next_bill_at"), "%Y-%m-%dT%H:%M:%SZ"
|
||||
).strftime("%Y-%m-%d %H:%M:%S")
|
||||
data.append(("Next payment:", next_bill_at or "-"))
|
||||
data.append(
|
||||
("Edit:", click.style(subscription.get("update_url"), fg="blue") or "-")
|
||||
)
|
||||
data.append(
|
||||
("Cancel:", click.style(subscription.get("cancel_url"), fg="blue") or "-")
|
||||
)
|
||||
click.echo(tabulate(data, tablefmt="plain"))
|
@ -13,16 +13,16 @@
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import shutil
|
||||
|
||||
import click
|
||||
from tabulate import tabulate
|
||||
|
||||
from platformio import fs
|
||||
from platformio.compat import dump_json_to_unicode
|
||||
from platformio.package.manager.platform import PlatformPackageManager
|
||||
|
||||
|
||||
@click.command("boards", short_help="Embedded board explorer")
|
||||
@click.command("boards", short_help="Board Explorer")
|
||||
@click.argument("query", required=False)
|
||||
@click.option("--installed", is_flag=True)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@ -41,8 +41,8 @@ def cli(query, installed, json_output): # pylint: disable=R0912
|
||||
grpboards[board["platform"]] = []
|
||||
grpboards[board["platform"]].append(board)
|
||||
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
for (platform, boards) in sorted(grpboards.items()):
|
||||
terminal_width = shutil.get_terminal_size().columns
|
||||
for platform, boards in sorted(grpboards.items()):
|
||||
click.echo("")
|
||||
click.echo("Platform: ", nl=False)
|
||||
click.secho(platform, bold=True)
|
||||
@ -83,4 +83,4 @@ def _print_boards_json(query, installed=False):
|
||||
if query.lower() not in search_data.lower():
|
||||
continue
|
||||
result.append(board)
|
||||
click.echo(dump_json_to_unicode(result))
|
||||
click.echo(json.dumps(result))
|
||||
|
@ -12,19 +12,18 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from os import getenv, makedirs, remove
|
||||
from os.path import basename, isdir, isfile, join, realpath
|
||||
from shutil import copyfile, copytree
|
||||
from tempfile import mkdtemp
|
||||
import glob
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
import click
|
||||
|
||||
from platformio import app, compat, fs
|
||||
from platformio.commands.project import project_init as cmd_project_init
|
||||
from platformio.commands.project import validate_boards
|
||||
from platformio.commands.run.command import cli as cmd_run
|
||||
from platformio import fs
|
||||
from platformio.exception import CIBuildEnvsEmpty
|
||||
from platformio.project.commands.init import project_init_cmd, validate_boards
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.run.cli import cli as cmd_run
|
||||
|
||||
|
||||
def validate_path(ctx, param, value): # pylint: disable=unused-argument
|
||||
@ -33,36 +32,35 @@ def validate_path(ctx, param, value): # pylint: disable=unused-argument
|
||||
for i, p in enumerate(value):
|
||||
if p.startswith("~"):
|
||||
value[i] = fs.expanduser(p)
|
||||
value[i] = realpath(value[i])
|
||||
if not compat.glob_recursive(value[i]):
|
||||
value[i] = os.path.abspath(value[i])
|
||||
if not glob.glob(value[i], recursive=True):
|
||||
invalid_path = p
|
||||
break
|
||||
try:
|
||||
assert invalid_path is None
|
||||
return value
|
||||
except AssertionError:
|
||||
raise click.BadParameter("Found invalid path: %s" % invalid_path)
|
||||
except AssertionError as exc:
|
||||
raise click.BadParameter("Found invalid path: %s" % invalid_path) from exc
|
||||
|
||||
|
||||
@click.command("ci", short_help="Continuous integration")
|
||||
@click.command("ci", short_help="Continuous Integration")
|
||||
@click.argument("src", nargs=-1, callback=validate_path)
|
||||
@click.option("-l", "--lib", multiple=True, callback=validate_path, metavar="DIRECTORY")
|
||||
@click.option("--exclude", multiple=True)
|
||||
@click.option("-b", "--board", multiple=True, metavar="ID", callback=validate_boards)
|
||||
@click.option(
|
||||
"--build-dir",
|
||||
default=mkdtemp,
|
||||
type=click.Path(file_okay=False, dir_okay=True, writable=True, resolve_path=True),
|
||||
default=tempfile.mkdtemp,
|
||||
type=click.Path(file_okay=False, dir_okay=True, writable=True),
|
||||
)
|
||||
@click.option("--keep-build-dir", is_flag=True)
|
||||
@click.option(
|
||||
"-c",
|
||||
"--project-conf",
|
||||
type=click.Path(
|
||||
exists=True, file_okay=True, dir_okay=False, readable=True, resolve_path=True
|
||||
),
|
||||
type=click.Path(exists=True, file_okay=True, dir_okay=False, readable=True),
|
||||
)
|
||||
@click.option("-O", "--project-option", multiple=True)
|
||||
@click.option("-e", "--environment", "environments", multiple=True)
|
||||
@click.option("-v", "--verbose", is_flag=True)
|
||||
@click.pass_context
|
||||
def cli( # pylint: disable=too-many-arguments, too-many-branches
|
||||
@ -75,31 +73,29 @@ def cli( # pylint: disable=too-many-arguments, too-many-branches
|
||||
keep_build_dir,
|
||||
project_conf,
|
||||
project_option,
|
||||
environments,
|
||||
verbose,
|
||||
):
|
||||
|
||||
if not src and getenv("PLATFORMIO_CI_SRC"):
|
||||
src = validate_path(ctx, None, getenv("PLATFORMIO_CI_SRC").split(":"))
|
||||
if not src and os.getenv("PLATFORMIO_CI_SRC"):
|
||||
src = validate_path(ctx, None, os.getenv("PLATFORMIO_CI_SRC").split(":"))
|
||||
if not src:
|
||||
raise click.BadParameter("Missing argument 'src'")
|
||||
|
||||
try:
|
||||
app.set_session_var("force_option", True)
|
||||
|
||||
if not keep_build_dir and isdir(build_dir):
|
||||
if not keep_build_dir and os.path.isdir(build_dir):
|
||||
fs.rmtree(build_dir)
|
||||
if not isdir(build_dir):
|
||||
makedirs(build_dir)
|
||||
if not os.path.isdir(build_dir):
|
||||
os.makedirs(build_dir)
|
||||
|
||||
for dir_name, patterns in dict(lib=lib, src=src).items():
|
||||
if not patterns:
|
||||
continue
|
||||
contents = []
|
||||
for p in patterns:
|
||||
contents += compat.glob_recursive(p)
|
||||
_copy_contents(join(build_dir, dir_name), contents)
|
||||
contents += glob.glob(p, recursive=True)
|
||||
_copy_contents(os.path.join(build_dir, dir_name), contents)
|
||||
|
||||
if project_conf and isfile(project_conf):
|
||||
if project_conf and os.path.isfile(project_conf):
|
||||
_copy_project_conf(build_dir, project_conf)
|
||||
elif not board:
|
||||
raise CIBuildEnvsEmpty()
|
||||
@ -109,65 +105,70 @@ def cli( # pylint: disable=too-many-arguments, too-many-branches
|
||||
|
||||
# initialise project
|
||||
ctx.invoke(
|
||||
cmd_project_init,
|
||||
project_init_cmd,
|
||||
project_dir=build_dir,
|
||||
board=board,
|
||||
project_option=project_option,
|
||||
boards=board,
|
||||
project_options=project_option,
|
||||
)
|
||||
|
||||
# process project
|
||||
ctx.invoke(cmd_run, project_dir=build_dir, verbose=verbose)
|
||||
ctx.invoke(
|
||||
cmd_run, project_dir=build_dir, environment=environments, verbose=verbose
|
||||
)
|
||||
finally:
|
||||
if not keep_build_dir:
|
||||
fs.rmtree(build_dir)
|
||||
|
||||
|
||||
def _copy_contents(dst_dir, contents):
|
||||
def _copy_contents(dst_dir, contents): # pylint: disable=too-many-branches
|
||||
items = {"dirs": set(), "files": set()}
|
||||
|
||||
for path in contents:
|
||||
if isdir(path):
|
||||
if os.path.isdir(path):
|
||||
items["dirs"].add(path)
|
||||
elif isfile(path):
|
||||
elif os.path.isfile(path):
|
||||
items["files"].add(path)
|
||||
|
||||
dst_dir_name = basename(dst_dir)
|
||||
dst_dir_name = os.path.basename(dst_dir)
|
||||
|
||||
if dst_dir_name == "src" and len(items["dirs"]) == 1:
|
||||
copytree(list(items["dirs"]).pop(), dst_dir, symlinks=True)
|
||||
if not os.path.isdir(dst_dir):
|
||||
shutil.copytree(list(items["dirs"]).pop(), dst_dir, symlinks=True)
|
||||
else:
|
||||
if not isdir(dst_dir):
|
||||
makedirs(dst_dir)
|
||||
if not os.path.isdir(dst_dir):
|
||||
os.makedirs(dst_dir)
|
||||
for d in items["dirs"]:
|
||||
copytree(d, join(dst_dir, basename(d)), symlinks=True)
|
||||
src_dst_dir = os.path.join(dst_dir, os.path.basename(d))
|
||||
if not os.path.isdir(src_dst_dir):
|
||||
shutil.copytree(d, src_dst_dir, symlinks=True)
|
||||
|
||||
if not items["files"]:
|
||||
return
|
||||
|
||||
if dst_dir_name == "lib":
|
||||
dst_dir = join(dst_dir, mkdtemp(dir=dst_dir))
|
||||
dst_dir = os.path.join(dst_dir, tempfile.mkdtemp(dir=dst_dir))
|
||||
|
||||
for f in items["files"]:
|
||||
dst_file = join(dst_dir, basename(f))
|
||||
dst_file = os.path.join(dst_dir, os.path.basename(f))
|
||||
if f == dst_file:
|
||||
continue
|
||||
copyfile(f, dst_file)
|
||||
shutil.copyfile(f, dst_file)
|
||||
|
||||
|
||||
def _exclude_contents(dst_dir, patterns):
|
||||
contents = []
|
||||
for p in patterns:
|
||||
contents += compat.glob_recursive(join(compat.glob_escape(dst_dir), p))
|
||||
contents += glob.glob(os.path.join(glob.escape(dst_dir), p), recursive=True)
|
||||
for path in contents:
|
||||
path = realpath(path)
|
||||
if isdir(path):
|
||||
path = os.path.abspath(path)
|
||||
if os.path.isdir(path):
|
||||
fs.rmtree(path)
|
||||
elif isfile(path):
|
||||
remove(path)
|
||||
elif os.path.isfile(path):
|
||||
os.remove(path)
|
||||
|
||||
|
||||
def _copy_project_conf(build_dir, project_conf):
|
||||
config = ProjectConfig(project_conf, parse_extra=False)
|
||||
if config.has_section("platformio"):
|
||||
config.remove_section("platformio")
|
||||
config.save(join(build_dir, "platformio.ini"))
|
||||
config.save(os.path.join(build_dir, "platformio.ini"))
|
||||
|
@ -1,175 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=too-many-arguments, too-many-statements
|
||||
# pylint: disable=too-many-locals, too-many-branches
|
||||
|
||||
import os
|
||||
import signal
|
||||
from os.path import isfile
|
||||
|
||||
import click
|
||||
|
||||
from platformio import app, exception, fs, proc
|
||||
from platformio.commands.debug import helpers
|
||||
from platformio.commands.debug.exception import DebugInvalidOptionsError
|
||||
from platformio.commands.platform import platform_install as cmd_platform_install
|
||||
from platformio.package.manager.core import inject_contrib_pysite
|
||||
from platformio.platform.exception import UnknownPlatform
|
||||
from platformio.platform.factory import PlatformFactory
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.exception import ProjectEnvsNotAvailableError
|
||||
from platformio.project.helpers import is_platformio_project, load_project_ide_data
|
||||
|
||||
|
||||
@click.command(
|
||||
"debug",
|
||||
context_settings=dict(ignore_unknown_options=True),
|
||||
short_help="Unified debugger",
|
||||
)
|
||||
@click.option(
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=os.getcwd,
|
||||
type=click.Path(
|
||||
exists=True, file_okay=False, dir_okay=True, writable=True, resolve_path=True
|
||||
),
|
||||
)
|
||||
@click.option(
|
||||
"-c",
|
||||
"--project-conf",
|
||||
type=click.Path(
|
||||
exists=True, file_okay=True, dir_okay=False, readable=True, resolve_path=True
|
||||
),
|
||||
)
|
||||
@click.option("--environment", "-e", metavar="<environment>")
|
||||
@click.option("--verbose", "-v", is_flag=True)
|
||||
@click.option("--interface", type=click.Choice(["gdb"]))
|
||||
@click.argument("__unprocessed", nargs=-1, type=click.UNPROCESSED)
|
||||
@click.pass_context
|
||||
def cli(ctx, project_dir, project_conf, environment, verbose, interface, __unprocessed):
|
||||
app.set_session_var("custom_project_conf", project_conf)
|
||||
|
||||
# use env variables from Eclipse or CLion
|
||||
for sysenv in ("CWD", "PWD", "PLATFORMIO_PROJECT_DIR"):
|
||||
if is_platformio_project(project_dir):
|
||||
break
|
||||
if os.getenv(sysenv):
|
||||
project_dir = os.getenv(sysenv)
|
||||
|
||||
with fs.cd(project_dir):
|
||||
config = ProjectConfig.get_instance(project_conf)
|
||||
config.validate(envs=[environment] if environment else None)
|
||||
|
||||
env_name = environment or helpers.get_default_debug_env(config)
|
||||
env_options = config.items(env=env_name, as_dict=True)
|
||||
if not set(env_options.keys()) >= set(["platform", "board"]):
|
||||
raise ProjectEnvsNotAvailableError()
|
||||
|
||||
try:
|
||||
platform = PlatformFactory.new(env_options["platform"])
|
||||
except UnknownPlatform:
|
||||
ctx.invoke(
|
||||
cmd_platform_install,
|
||||
platforms=[env_options["platform"]],
|
||||
skip_default_package=True,
|
||||
)
|
||||
platform = PlatformFactory.new(env_options["platform"])
|
||||
|
||||
debug_options = helpers.configure_initial_debug_options(platform, env_options)
|
||||
assert debug_options
|
||||
|
||||
if not interface:
|
||||
return helpers.predebug_project(ctx, project_dir, env_name, False, verbose)
|
||||
|
||||
ide_data = load_project_ide_data(project_dir, env_name)
|
||||
if not ide_data:
|
||||
raise DebugInvalidOptionsError("Could not load a build configuration")
|
||||
|
||||
if "--version" in __unprocessed:
|
||||
result = proc.exec_command([ide_data["gdb_path"], "--version"])
|
||||
if result["returncode"] == 0:
|
||||
return click.echo(result["out"])
|
||||
raise exception.PlatformioException("\n".join([result["out"], result["err"]]))
|
||||
|
||||
try:
|
||||
fs.ensure_udev_rules()
|
||||
except exception.InvalidUdevRules as e:
|
||||
click.echo(
|
||||
helpers.escape_gdbmi_stream("~", str(e) + "\n")
|
||||
if helpers.is_gdbmi_mode()
|
||||
else str(e) + "\n",
|
||||
nl=False,
|
||||
)
|
||||
|
||||
try:
|
||||
debug_options = platform.configure_debug_options(debug_options, ide_data)
|
||||
except NotImplementedError:
|
||||
# legacy for ESP32 dev-platform <=2.0.0
|
||||
debug_options["load_cmds"] = helpers.configure_esp32_load_cmds(
|
||||
debug_options, ide_data
|
||||
)
|
||||
|
||||
rebuild_prog = False
|
||||
preload = debug_options["load_cmds"] == ["preload"]
|
||||
load_mode = debug_options["load_mode"]
|
||||
if load_mode == "always":
|
||||
rebuild_prog = preload or not helpers.has_debug_symbols(ide_data["prog_path"])
|
||||
elif load_mode == "modified":
|
||||
rebuild_prog = helpers.is_prog_obsolete(
|
||||
ide_data["prog_path"]
|
||||
) or not helpers.has_debug_symbols(ide_data["prog_path"])
|
||||
else:
|
||||
rebuild_prog = not isfile(ide_data["prog_path"])
|
||||
|
||||
if preload or (not rebuild_prog and load_mode != "always"):
|
||||
# don't load firmware through debug server
|
||||
debug_options["load_cmds"] = []
|
||||
|
||||
if rebuild_prog:
|
||||
if helpers.is_gdbmi_mode():
|
||||
click.echo(
|
||||
helpers.escape_gdbmi_stream(
|
||||
"~", "Preparing firmware for debugging...\n"
|
||||
),
|
||||
nl=False,
|
||||
)
|
||||
stream = helpers.GDBMIConsoleStream()
|
||||
with proc.capture_std_streams(stream):
|
||||
helpers.predebug_project(ctx, project_dir, env_name, preload, verbose)
|
||||
stream.close()
|
||||
else:
|
||||
click.echo("Preparing firmware for debugging...")
|
||||
helpers.predebug_project(ctx, project_dir, env_name, preload, verbose)
|
||||
|
||||
# save SHA sum of newly created prog
|
||||
if load_mode == "modified":
|
||||
helpers.is_prog_obsolete(ide_data["prog_path"])
|
||||
|
||||
if not isfile(ide_data["prog_path"]):
|
||||
raise DebugInvalidOptionsError("Program/firmware is missed")
|
||||
|
||||
# run debugging client
|
||||
inject_contrib_pysite()
|
||||
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from platformio.commands.debug.process.client import GDBClient, reactor
|
||||
|
||||
client = GDBClient(project_dir, __unprocessed, debug_options, env_options)
|
||||
client.spawn(ide_data["gdb_path"], ide_data["prog_path"])
|
||||
|
||||
signal.signal(signal.SIGINT, lambda *args, **kwargs: None)
|
||||
reactor.run()
|
||||
|
||||
return True
|
@ -1,302 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
from fnmatch import fnmatch
|
||||
from hashlib import sha1
|
||||
from io import BytesIO
|
||||
from os.path import isfile
|
||||
|
||||
from platformio import fs, util
|
||||
from platformio.commands import PlatformioCLI
|
||||
from platformio.commands.debug.exception import DebugInvalidOptionsError
|
||||
from platformio.commands.run.command import cli as cmd_run
|
||||
from platformio.compat import is_bytes
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.options import ProjectOptions
|
||||
|
||||
|
||||
class GDBMIConsoleStream(BytesIO): # pylint: disable=too-few-public-methods
|
||||
|
||||
STDOUT = sys.stdout
|
||||
|
||||
def write(self, text):
|
||||
self.STDOUT.write(escape_gdbmi_stream("~", text))
|
||||
self.STDOUT.flush()
|
||||
|
||||
|
||||
def is_gdbmi_mode():
|
||||
return "--interpreter" in " ".join(PlatformioCLI.leftover_args)
|
||||
|
||||
|
||||
def escape_gdbmi_stream(prefix, stream):
|
||||
bytes_stream = False
|
||||
if is_bytes(stream):
|
||||
bytes_stream = True
|
||||
stream = stream.decode()
|
||||
|
||||
if not stream:
|
||||
return b"" if bytes_stream else ""
|
||||
|
||||
ends_nl = stream.endswith("\n")
|
||||
stream = re.sub(r"\\+", "\\\\\\\\", stream)
|
||||
stream = stream.replace('"', '\\"')
|
||||
stream = stream.replace("\n", "\\n")
|
||||
stream = '%s"%s"' % (prefix, stream)
|
||||
if ends_nl:
|
||||
stream += "\n"
|
||||
|
||||
return stream.encode() if bytes_stream else stream
|
||||
|
||||
|
||||
def get_default_debug_env(config):
|
||||
default_envs = config.default_envs()
|
||||
all_envs = config.envs()
|
||||
for env in default_envs:
|
||||
if config.get("env:" + env, "build_type") == "debug":
|
||||
return env
|
||||
for env in all_envs:
|
||||
if config.get("env:" + env, "build_type") == "debug":
|
||||
return env
|
||||
return default_envs[0] if default_envs else all_envs[0]
|
||||
|
||||
|
||||
def predebug_project(ctx, project_dir, env_name, preload, verbose):
|
||||
ctx.invoke(
|
||||
cmd_run,
|
||||
project_dir=project_dir,
|
||||
environment=[env_name],
|
||||
target=["debug"] + (["upload"] if preload else []),
|
||||
verbose=verbose,
|
||||
)
|
||||
if preload:
|
||||
time.sleep(5)
|
||||
|
||||
|
||||
def configure_initial_debug_options(platform, env_options):
|
||||
def _cleanup_cmds(items):
|
||||
items = ProjectConfig.parse_multi_values(items)
|
||||
return ["$LOAD_CMDS" if item == "$LOAD_CMD" else item for item in items]
|
||||
|
||||
board_config = platform.board_config(env_options["board"])
|
||||
tool_name = board_config.get_debug_tool_name(env_options.get("debug_tool"))
|
||||
tool_settings = board_config.get("debug", {}).get("tools", {}).get(tool_name, {})
|
||||
server_options = None
|
||||
|
||||
# specific server per a system
|
||||
if isinstance(tool_settings.get("server", {}), list):
|
||||
for item in tool_settings["server"][:]:
|
||||
tool_settings["server"] = item
|
||||
if util.get_systype() in item.get("system", []):
|
||||
break
|
||||
|
||||
# user overwrites debug server
|
||||
if env_options.get("debug_server"):
|
||||
server_options = {
|
||||
"cwd": None,
|
||||
"executable": None,
|
||||
"arguments": env_options.get("debug_server"),
|
||||
}
|
||||
server_options["executable"] = server_options["arguments"][0]
|
||||
server_options["arguments"] = server_options["arguments"][1:]
|
||||
elif "server" in tool_settings:
|
||||
server_options = tool_settings["server"]
|
||||
server_package = server_options.get("package")
|
||||
server_package_dir = (
|
||||
platform.get_package_dir(server_package) if server_package else None
|
||||
)
|
||||
if server_package and not server_package_dir:
|
||||
platform.install_packages(
|
||||
with_packages=[server_package], skip_default_package=True, silent=True
|
||||
)
|
||||
server_package_dir = platform.get_package_dir(server_package)
|
||||
server_options.update(
|
||||
dict(
|
||||
cwd=server_package_dir if server_package else None,
|
||||
executable=server_options.get("executable"),
|
||||
arguments=[
|
||||
a.replace("$PACKAGE_DIR", server_package_dir)
|
||||
if server_package_dir
|
||||
else a
|
||||
for a in server_options.get("arguments", [])
|
||||
],
|
||||
)
|
||||
)
|
||||
|
||||
extra_cmds = _cleanup_cmds(env_options.get("debug_extra_cmds"))
|
||||
extra_cmds.extend(_cleanup_cmds(tool_settings.get("extra_cmds")))
|
||||
result = dict(
|
||||
tool=tool_name,
|
||||
upload_protocol=env_options.get(
|
||||
"upload_protocol", board_config.get("upload", {}).get("protocol")
|
||||
),
|
||||
load_cmds=_cleanup_cmds(
|
||||
env_options.get(
|
||||
"debug_load_cmds",
|
||||
tool_settings.get(
|
||||
"load_cmds",
|
||||
tool_settings.get(
|
||||
"load_cmd", ProjectOptions["env.debug_load_cmds"].default
|
||||
),
|
||||
),
|
||||
)
|
||||
),
|
||||
load_mode=env_options.get(
|
||||
"debug_load_mode",
|
||||
tool_settings.get(
|
||||
"load_mode", ProjectOptions["env.debug_load_mode"].default
|
||||
),
|
||||
),
|
||||
init_break=env_options.get(
|
||||
"debug_init_break",
|
||||
tool_settings.get(
|
||||
"init_break", ProjectOptions["env.debug_init_break"].default
|
||||
),
|
||||
),
|
||||
init_cmds=_cleanup_cmds(
|
||||
env_options.get("debug_init_cmds", tool_settings.get("init_cmds"))
|
||||
),
|
||||
extra_cmds=extra_cmds,
|
||||
require_debug_port=tool_settings.get("require_debug_port", False),
|
||||
port=reveal_debug_port(
|
||||
env_options.get("debug_port", tool_settings.get("port")),
|
||||
tool_name,
|
||||
tool_settings,
|
||||
),
|
||||
server=server_options,
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
def configure_esp32_load_cmds(debug_options, configuration):
|
||||
"""
|
||||
DEPRECATED: Moved to ESP32 dev-platform
|
||||
See platform.py::configure_debug_options
|
||||
"""
|
||||
flash_images = configuration.get("extra", {}).get("flash_images")
|
||||
ignore_conds = [
|
||||
debug_options["load_cmds"] != ["load"],
|
||||
"xtensa-esp32" not in configuration.get("cc_path", ""),
|
||||
not flash_images,
|
||||
not all([isfile(item["path"]) for item in flash_images]),
|
||||
]
|
||||
if any(ignore_conds):
|
||||
return debug_options["load_cmds"]
|
||||
|
||||
mon_cmds = [
|
||||
'monitor program_esp32 "{{{path}}}" {offset} verify'.format(
|
||||
path=fs.to_unix_path(item["path"]), offset=item["offset"]
|
||||
)
|
||||
for item in flash_images
|
||||
]
|
||||
mon_cmds.append(
|
||||
'monitor program_esp32 "{%s.bin}" 0x10000 verify'
|
||||
% fs.to_unix_path(configuration["prog_path"][:-4])
|
||||
)
|
||||
return mon_cmds
|
||||
|
||||
|
||||
def has_debug_symbols(prog_path):
|
||||
if not isfile(prog_path):
|
||||
return False
|
||||
matched = {
|
||||
b".debug_info": False,
|
||||
b".debug_abbrev": False,
|
||||
b" -Og": False,
|
||||
b" -g": False,
|
||||
b"__PLATFORMIO_BUILD_DEBUG__": False,
|
||||
}
|
||||
with open(prog_path, "rb") as fp:
|
||||
last_data = b""
|
||||
while True:
|
||||
data = fp.read(1024)
|
||||
if not data:
|
||||
break
|
||||
for pattern, found in matched.items():
|
||||
if found:
|
||||
continue
|
||||
if pattern in last_data + data:
|
||||
matched[pattern] = True
|
||||
last_data = data
|
||||
return all(matched.values())
|
||||
|
||||
|
||||
def is_prog_obsolete(prog_path):
|
||||
prog_hash_path = prog_path + ".sha1"
|
||||
if not isfile(prog_path):
|
||||
return True
|
||||
shasum = sha1()
|
||||
with open(prog_path, "rb") as fp:
|
||||
while True:
|
||||
data = fp.read(1024)
|
||||
if not data:
|
||||
break
|
||||
shasum.update(data)
|
||||
new_digest = shasum.hexdigest()
|
||||
old_digest = None
|
||||
if isfile(prog_hash_path):
|
||||
with open(prog_hash_path) as fp:
|
||||
old_digest = fp.read()
|
||||
if new_digest == old_digest:
|
||||
return False
|
||||
with open(prog_hash_path, "w") as fp:
|
||||
fp.write(new_digest)
|
||||
return True
|
||||
|
||||
|
||||
def reveal_debug_port(env_debug_port, tool_name, tool_settings):
|
||||
def _get_pattern():
|
||||
if not env_debug_port:
|
||||
return None
|
||||
if set(["*", "?", "[", "]"]) & set(env_debug_port):
|
||||
return env_debug_port
|
||||
return None
|
||||
|
||||
def _is_match_pattern(port):
|
||||
pattern = _get_pattern()
|
||||
if not pattern:
|
||||
return True
|
||||
return fnmatch(port, pattern)
|
||||
|
||||
def _look_for_serial_port(hwids):
|
||||
for item in util.get_serialports(filter_hwid=True):
|
||||
if not _is_match_pattern(item["port"]):
|
||||
continue
|
||||
port = item["port"]
|
||||
if tool_name.startswith("blackmagic"):
|
||||
if (
|
||||
"windows" in util.get_systype()
|
||||
and port.startswith("COM")
|
||||
and len(port) > 4
|
||||
):
|
||||
port = "\\\\.\\%s" % port
|
||||
if "GDB" in item["description"]:
|
||||
return port
|
||||
for hwid in hwids:
|
||||
hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "")
|
||||
if hwid_str in item["hwid"]:
|
||||
return port
|
||||
return None
|
||||
|
||||
if env_debug_port and not _get_pattern():
|
||||
return env_debug_port
|
||||
if not tool_settings.get("require_debug_port"):
|
||||
return None
|
||||
|
||||
debug_port = _look_for_serial_port(tool_settings.get("hwids", []))
|
||||
if not debug_port:
|
||||
raise DebugInvalidOptionsError("Please specify `debug_port` for environment")
|
||||
return debug_port
|
@ -1,161 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
GDB_DEFAULT_INIT_CONFIG = """
|
||||
define pio_reset_halt_target
|
||||
monitor reset halt
|
||||
end
|
||||
|
||||
define pio_reset_run_target
|
||||
monitor reset
|
||||
end
|
||||
|
||||
target extended-remote $DEBUG_PORT
|
||||
monitor init
|
||||
$LOAD_CMDS
|
||||
pio_reset_halt_target
|
||||
$INIT_BREAK
|
||||
"""
|
||||
|
||||
GDB_STUTIL_INIT_CONFIG = """
|
||||
define pio_reset_halt_target
|
||||
monitor reset
|
||||
monitor halt
|
||||
end
|
||||
|
||||
define pio_reset_run_target
|
||||
monitor reset
|
||||
end
|
||||
|
||||
target extended-remote $DEBUG_PORT
|
||||
$LOAD_CMDS
|
||||
pio_reset_halt_target
|
||||
$INIT_BREAK
|
||||
"""
|
||||
|
||||
GDB_JLINK_INIT_CONFIG = """
|
||||
define pio_reset_halt_target
|
||||
monitor reset
|
||||
monitor halt
|
||||
end
|
||||
|
||||
define pio_reset_run_target
|
||||
monitor clrbp
|
||||
monitor reset
|
||||
monitor go
|
||||
end
|
||||
|
||||
target extended-remote $DEBUG_PORT
|
||||
monitor clrbp
|
||||
monitor speed auto
|
||||
pio_reset_halt_target
|
||||
$LOAD_CMDS
|
||||
$INIT_BREAK
|
||||
"""
|
||||
|
||||
GDB_BLACKMAGIC_INIT_CONFIG = """
|
||||
define pio_reset_halt_target
|
||||
set language c
|
||||
set *0xE000ED0C = 0x05FA0004
|
||||
set $busy = (*0xE000ED0C & 0x4)
|
||||
while ($busy)
|
||||
set $busy = (*0xE000ED0C & 0x4)
|
||||
end
|
||||
set language auto
|
||||
end
|
||||
|
||||
define pio_reset_run_target
|
||||
pio_reset_halt_target
|
||||
end
|
||||
|
||||
target extended-remote $DEBUG_PORT
|
||||
monitor swdp_scan
|
||||
attach 1
|
||||
set mem inaccessible-by-default off
|
||||
$LOAD_CMDS
|
||||
$INIT_BREAK
|
||||
|
||||
set language c
|
||||
set *0xE000ED0C = 0x05FA0004
|
||||
set $busy = (*0xE000ED0C & 0x4)
|
||||
while ($busy)
|
||||
set $busy = (*0xE000ED0C & 0x4)
|
||||
end
|
||||
set language auto
|
||||
"""
|
||||
|
||||
GDB_MSPDEBUG_INIT_CONFIG = """
|
||||
define pio_reset_halt_target
|
||||
end
|
||||
|
||||
define pio_reset_run_target
|
||||
end
|
||||
|
||||
target extended-remote $DEBUG_PORT
|
||||
monitor erase
|
||||
$LOAD_CMDS
|
||||
pio_reset_halt_target
|
||||
$INIT_BREAK
|
||||
"""
|
||||
|
||||
GDB_QEMU_INIT_CONFIG = """
|
||||
define pio_reset_halt_target
|
||||
monitor system_reset
|
||||
end
|
||||
|
||||
define pio_reset_run_target
|
||||
monitor system_reset
|
||||
end
|
||||
|
||||
target extended-remote $DEBUG_PORT
|
||||
$LOAD_CMDS
|
||||
pio_reset_halt_target
|
||||
$INIT_BREAK
|
||||
"""
|
||||
|
||||
GDB_RENODE_INIT_CONFIG = """
|
||||
define pio_reset_halt_target
|
||||
monitor machine Reset
|
||||
$LOAD_CMDS
|
||||
monitor start
|
||||
end
|
||||
|
||||
define pio_reset_run_target
|
||||
pio_reset_halt_target
|
||||
end
|
||||
|
||||
target extended-remote $DEBUG_PORT
|
||||
$LOAD_CMDS
|
||||
$INIT_BREAK
|
||||
monitor start
|
||||
"""
|
||||
|
||||
|
||||
TOOL_TO_CONFIG = {
|
||||
"jlink": GDB_JLINK_INIT_CONFIG,
|
||||
"mspdebug": GDB_MSPDEBUG_INIT_CONFIG,
|
||||
"qemu": GDB_QEMU_INIT_CONFIG,
|
||||
"blackmagic": GDB_BLACKMAGIC_INIT_CONFIG,
|
||||
"renode": GDB_RENODE_INIT_CONFIG,
|
||||
}
|
||||
|
||||
|
||||
def get_gdb_init_config(debug_options):
|
||||
tool = debug_options.get("tool")
|
||||
if tool and tool in TOOL_TO_CONFIG:
|
||||
return TOOL_TO_CONFIG[tool]
|
||||
server_exe = (debug_options.get("server") or {}).get("executable", "").lower()
|
||||
if "st-util" in server_exe:
|
||||
return GDB_STUTIL_INIT_CONFIG
|
||||
return GDB_DEFAULT_INIT_CONFIG
|
@ -1,93 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import signal
|
||||
import time
|
||||
|
||||
import click
|
||||
from twisted.internet import protocol # pylint: disable=import-error
|
||||
|
||||
from platformio import fs
|
||||
from platformio.compat import string_types
|
||||
from platformio.proc import get_pythonexe_path
|
||||
from platformio.project.helpers import get_project_core_dir
|
||||
|
||||
|
||||
class BaseProcess(protocol.ProcessProtocol, object):
|
||||
|
||||
STDOUT_CHUNK_SIZE = 2048
|
||||
LOG_FILE = None
|
||||
|
||||
COMMON_PATTERNS = {
|
||||
"PLATFORMIO_HOME_DIR": get_project_core_dir(),
|
||||
"PLATFORMIO_CORE_DIR": get_project_core_dir(),
|
||||
"PYTHONEXE": get_pythonexe_path(),
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
self._last_activity = 0
|
||||
|
||||
def apply_patterns(self, source, patterns=None):
|
||||
_patterns = self.COMMON_PATTERNS.copy()
|
||||
_patterns.update(patterns or {})
|
||||
|
||||
for key, value in _patterns.items():
|
||||
if key.endswith(("_DIR", "_PATH")):
|
||||
_patterns[key] = fs.to_unix_path(value)
|
||||
|
||||
def _replace(text):
|
||||
for key, value in _patterns.items():
|
||||
pattern = "$%s" % key
|
||||
text = text.replace(pattern, value or "")
|
||||
return text
|
||||
|
||||
if isinstance(source, string_types):
|
||||
source = _replace(source)
|
||||
elif isinstance(source, (list, dict)):
|
||||
items = enumerate(source) if isinstance(source, list) else source.items()
|
||||
for key, value in items:
|
||||
if isinstance(value, string_types):
|
||||
source[key] = _replace(value)
|
||||
elif isinstance(value, (list, dict)):
|
||||
source[key] = self.apply_patterns(value, patterns)
|
||||
|
||||
return source
|
||||
|
||||
def onStdInData(self, data):
|
||||
self._last_activity = time.time()
|
||||
if self.LOG_FILE:
|
||||
with open(self.LOG_FILE, "ab") as fp:
|
||||
fp.write(data)
|
||||
|
||||
def outReceived(self, data):
|
||||
self._last_activity = time.time()
|
||||
if self.LOG_FILE:
|
||||
with open(self.LOG_FILE, "ab") as fp:
|
||||
fp.write(data)
|
||||
while data:
|
||||
chunk = data[: self.STDOUT_CHUNK_SIZE]
|
||||
click.echo(chunk, nl=False)
|
||||
data = data[self.STDOUT_CHUNK_SIZE :]
|
||||
|
||||
def errReceived(self, data):
|
||||
self._last_activity = time.time()
|
||||
if self.LOG_FILE:
|
||||
with open(self.LOG_FILE, "ab") as fp:
|
||||
fp.write(data)
|
||||
click.echo(data, nl=False, err=True)
|
||||
|
||||
def processEnded(self, _):
|
||||
self._last_activity = time.time()
|
||||
# Allow terminating via SIGINT/CTRL+C
|
||||
signal.signal(signal.SIGINT, signal.default_int_handler)
|
@ -1,280 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import re
|
||||
import signal
|
||||
import time
|
||||
from hashlib import sha1
|
||||
from os.path import basename, dirname, isdir, join, realpath, splitext
|
||||
from tempfile import mkdtemp
|
||||
|
||||
from twisted.internet import defer # pylint: disable=import-error
|
||||
from twisted.internet import protocol # pylint: disable=import-error
|
||||
from twisted.internet import reactor # pylint: disable=import-error
|
||||
from twisted.internet import stdio # pylint: disable=import-error
|
||||
from twisted.internet import task # pylint: disable=import-error
|
||||
|
||||
from platformio import fs, proc, telemetry, util
|
||||
from platformio.cache import ContentCache
|
||||
from platformio.commands.debug import helpers
|
||||
from platformio.commands.debug.exception import DebugInvalidOptionsError
|
||||
from platformio.commands.debug.initcfgs import get_gdb_init_config
|
||||
from platformio.commands.debug.process.base import BaseProcess
|
||||
from platformio.commands.debug.process.server import DebugServer
|
||||
from platformio.compat import hashlib_encode_data, is_bytes
|
||||
from platformio.project.helpers import get_project_cache_dir
|
||||
|
||||
|
||||
class GDBClient(BaseProcess): # pylint: disable=too-many-instance-attributes
|
||||
|
||||
PIO_SRC_NAME = ".pioinit"
|
||||
INIT_COMPLETED_BANNER = "PlatformIO: Initialization completed"
|
||||
|
||||
def __init__(self, project_dir, args, debug_options, env_options):
|
||||
super(GDBClient, self).__init__()
|
||||
self.project_dir = project_dir
|
||||
self.args = list(args)
|
||||
self.debug_options = debug_options
|
||||
self.env_options = env_options
|
||||
|
||||
self._debug_server = DebugServer(debug_options, env_options)
|
||||
self._session_id = None
|
||||
|
||||
if not isdir(get_project_cache_dir()):
|
||||
os.makedirs(get_project_cache_dir())
|
||||
self._gdbsrc_dir = mkdtemp(dir=get_project_cache_dir(), prefix=".piodebug-")
|
||||
|
||||
self._target_is_run = False
|
||||
self._auto_continue_timer = None
|
||||
self._errors_buffer = b""
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def spawn(self, gdb_path, prog_path):
|
||||
session_hash = gdb_path + prog_path
|
||||
self._session_id = sha1(hashlib_encode_data(session_hash)).hexdigest()
|
||||
self._kill_previous_session()
|
||||
|
||||
patterns = {
|
||||
"PROJECT_DIR": self.project_dir,
|
||||
"PROG_PATH": prog_path,
|
||||
"PROG_DIR": dirname(prog_path),
|
||||
"PROG_NAME": basename(splitext(prog_path)[0]),
|
||||
"DEBUG_PORT": self.debug_options["port"],
|
||||
"UPLOAD_PROTOCOL": self.debug_options["upload_protocol"],
|
||||
"INIT_BREAK": self.debug_options["init_break"] or "",
|
||||
"LOAD_CMDS": "\n".join(self.debug_options["load_cmds"] or []),
|
||||
}
|
||||
|
||||
yield self._debug_server.spawn(patterns)
|
||||
if not patterns["DEBUG_PORT"]:
|
||||
patterns["DEBUG_PORT"] = self._debug_server.get_debug_port()
|
||||
|
||||
self.generate_pioinit(self._gdbsrc_dir, patterns)
|
||||
|
||||
# start GDB client
|
||||
args = [
|
||||
"piogdb",
|
||||
"-q",
|
||||
"--directory",
|
||||
self._gdbsrc_dir,
|
||||
"--directory",
|
||||
self.project_dir,
|
||||
"-l",
|
||||
"10",
|
||||
]
|
||||
args.extend(self.args)
|
||||
if not gdb_path:
|
||||
raise DebugInvalidOptionsError("GDB client is not configured")
|
||||
gdb_data_dir = self._get_data_dir(gdb_path)
|
||||
if gdb_data_dir:
|
||||
args.extend(["--data-directory", gdb_data_dir])
|
||||
args.append(patterns["PROG_PATH"])
|
||||
|
||||
transport = reactor.spawnProcess(
|
||||
self, gdb_path, args, path=self.project_dir, env=os.environ
|
||||
)
|
||||
defer.returnValue(transport)
|
||||
|
||||
@staticmethod
|
||||
def _get_data_dir(gdb_path):
|
||||
if "msp430" in gdb_path:
|
||||
return None
|
||||
gdb_data_dir = realpath(join(dirname(gdb_path), "..", "share", "gdb"))
|
||||
return gdb_data_dir if isdir(gdb_data_dir) else None
|
||||
|
||||
def generate_pioinit(self, dst_dir, patterns):
|
||||
# default GDB init commands depending on debug tool
|
||||
commands = get_gdb_init_config(self.debug_options).split("\n")
|
||||
|
||||
if self.debug_options["init_cmds"]:
|
||||
commands = self.debug_options["init_cmds"]
|
||||
commands.extend(self.debug_options["extra_cmds"])
|
||||
|
||||
if not any("define pio_reset_run_target" in cmd for cmd in commands):
|
||||
commands = [
|
||||
"define pio_reset_run_target",
|
||||
" echo Warning! Undefined pio_reset_run_target command\\n",
|
||||
" monitor reset",
|
||||
"end",
|
||||
] + commands
|
||||
if not any("define pio_reset_halt_target" in cmd for cmd in commands):
|
||||
commands = [
|
||||
"define pio_reset_halt_target",
|
||||
" echo Warning! Undefined pio_reset_halt_target command\\n",
|
||||
" monitor reset halt",
|
||||
"end",
|
||||
] + commands
|
||||
if not any("define pio_restart_target" in cmd for cmd in commands):
|
||||
commands += [
|
||||
"define pio_restart_target",
|
||||
" pio_reset_halt_target",
|
||||
" $INIT_BREAK",
|
||||
" %s" % ("continue" if patterns["INIT_BREAK"] else "next"),
|
||||
"end",
|
||||
]
|
||||
|
||||
banner = [
|
||||
"echo PlatformIO Unified Debugger -> http://bit.ly/pio-debug\\n",
|
||||
"echo PlatformIO: debug_tool = %s\\n" % self.debug_options["tool"],
|
||||
"echo PlatformIO: Initializing remote target...\\n",
|
||||
]
|
||||
footer = ["echo %s\\n" % self.INIT_COMPLETED_BANNER]
|
||||
commands = banner + commands + footer
|
||||
|
||||
with open(join(dst_dir, self.PIO_SRC_NAME), "w") as fp:
|
||||
fp.write("\n".join(self.apply_patterns(commands, patterns)))
|
||||
|
||||
def connectionMade(self):
|
||||
self._lock_session(self.transport.pid)
|
||||
|
||||
p = protocol.Protocol()
|
||||
p.dataReceived = self.onStdInData
|
||||
stdio.StandardIO(p)
|
||||
|
||||
def onStdInData(self, data):
|
||||
super(GDBClient, self).onStdInData(data)
|
||||
if b"-exec-run" in data:
|
||||
if self._target_is_run:
|
||||
token, _ = data.split(b"-", 1)
|
||||
self.outReceived(token + b"^running\n")
|
||||
return
|
||||
data = data.replace(b"-exec-run", b"-exec-continue")
|
||||
|
||||
if b"-exec-continue" in data:
|
||||
self._target_is_run = True
|
||||
if b"-gdb-exit" in data or data.strip() in (b"q", b"quit"):
|
||||
# Allow terminating via SIGINT/CTRL+C
|
||||
signal.signal(signal.SIGINT, signal.default_int_handler)
|
||||
self.transport.write(b"pio_reset_run_target\n")
|
||||
self.transport.write(data)
|
||||
|
||||
def processEnded(self, reason): # pylint: disable=unused-argument
|
||||
self._unlock_session()
|
||||
if self._gdbsrc_dir and isdir(self._gdbsrc_dir):
|
||||
fs.rmtree(self._gdbsrc_dir)
|
||||
if self._debug_server:
|
||||
self._debug_server.terminate()
|
||||
|
||||
reactor.stop()
|
||||
|
||||
def outReceived(self, data):
|
||||
super(GDBClient, self).outReceived(data)
|
||||
self._handle_error(data)
|
||||
# go to init break automatically
|
||||
if self.INIT_COMPLETED_BANNER.encode() in data:
|
||||
telemetry.send_event(
|
||||
"Debug", "Started", telemetry.dump_run_environment(self.env_options)
|
||||
)
|
||||
self._auto_continue_timer = task.LoopingCall(self._auto_exec_continue)
|
||||
self._auto_continue_timer.start(0.1)
|
||||
|
||||
def errReceived(self, data):
|
||||
super(GDBClient, self).errReceived(data)
|
||||
self._handle_error(data)
|
||||
|
||||
def console_log(self, msg):
|
||||
if helpers.is_gdbmi_mode():
|
||||
msg = helpers.escape_gdbmi_stream("~", msg)
|
||||
self.outReceived(msg if is_bytes(msg) else msg.encode())
|
||||
|
||||
def _auto_exec_continue(self):
|
||||
auto_exec_delay = 0.5 # in seconds
|
||||
if self._last_activity > (time.time() - auto_exec_delay):
|
||||
return
|
||||
if self._auto_continue_timer:
|
||||
self._auto_continue_timer.stop()
|
||||
self._auto_continue_timer = None
|
||||
|
||||
if not self.debug_options["init_break"] or self._target_is_run:
|
||||
return
|
||||
self.console_log(
|
||||
"PlatformIO: Resume the execution to `debug_init_break = %s`\n"
|
||||
% self.debug_options["init_break"]
|
||||
)
|
||||
self.console_log(
|
||||
"PlatformIO: More configuration options -> http://bit.ly/pio-debug\n"
|
||||
)
|
||||
self.transport.write(
|
||||
b"0-exec-continue\n" if helpers.is_gdbmi_mode() else b"continue\n"
|
||||
)
|
||||
self._target_is_run = True
|
||||
|
||||
def _handle_error(self, data):
|
||||
self._errors_buffer = (self._errors_buffer + data)[-8192:] # keep last 8 KBytes
|
||||
if not (
|
||||
self.PIO_SRC_NAME.encode() in self._errors_buffer
|
||||
and b"Error in sourced" in self._errors_buffer
|
||||
):
|
||||
return
|
||||
|
||||
last_erros = self._errors_buffer.decode()
|
||||
last_erros = " ".join(reversed(last_erros.split("\n")))
|
||||
last_erros = re.sub(r'((~|&)"|\\n\"|\\t)', " ", last_erros, flags=re.M)
|
||||
|
||||
err = "%s -> %s" % (
|
||||
telemetry.dump_run_environment(self.env_options),
|
||||
last_erros,
|
||||
)
|
||||
telemetry.send_exception("DebugInitError: %s" % err)
|
||||
self.transport.loseConnection()
|
||||
|
||||
def _kill_previous_session(self):
|
||||
assert self._session_id
|
||||
pid = None
|
||||
with ContentCache() as cc:
|
||||
pid = cc.get(self._session_id)
|
||||
cc.delete(self._session_id)
|
||||
if not pid:
|
||||
return
|
||||
if "windows" in util.get_systype():
|
||||
kill = ["Taskkill", "/PID", pid, "/F"]
|
||||
else:
|
||||
kill = ["kill", pid]
|
||||
try:
|
||||
proc.exec_command(kill)
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
def _lock_session(self, pid):
|
||||
if not self._session_id:
|
||||
return
|
||||
with ContentCache() as cc:
|
||||
cc.set(self._session_id, str(pid), "1h")
|
||||
|
||||
def _unlock_session(self):
|
||||
if not self._session_id:
|
||||
return
|
||||
with ContentCache() as cc:
|
||||
cc.delete(self._session_id)
|
@ -1,166 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import time
|
||||
from os.path import isdir, isfile, join
|
||||
|
||||
from twisted.internet import defer # pylint: disable=import-error
|
||||
from twisted.internet import reactor # pylint: disable=import-error
|
||||
|
||||
from platformio import fs, util
|
||||
from platformio.commands.debug.exception import DebugInvalidOptionsError
|
||||
from platformio.commands.debug.helpers import escape_gdbmi_stream, is_gdbmi_mode
|
||||
from platformio.commands.debug.process.base import BaseProcess
|
||||
from platformio.proc import where_is_program
|
||||
|
||||
|
||||
class DebugServer(BaseProcess):
|
||||
def __init__(self, debug_options, env_options):
|
||||
super(DebugServer, self).__init__()
|
||||
self.debug_options = debug_options
|
||||
self.env_options = env_options
|
||||
|
||||
self._debug_port = ":3333"
|
||||
self._transport = None
|
||||
self._process_ended = False
|
||||
self._ready = False
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def spawn(self, patterns): # pylint: disable=too-many-branches
|
||||
systype = util.get_systype()
|
||||
server = self.debug_options.get("server")
|
||||
if not server:
|
||||
defer.returnValue(None)
|
||||
server = self.apply_patterns(server, patterns)
|
||||
server_executable = server["executable"]
|
||||
if not server_executable:
|
||||
defer.returnValue(None)
|
||||
if server["cwd"]:
|
||||
server_executable = join(server["cwd"], server_executable)
|
||||
if (
|
||||
"windows" in systype
|
||||
and not server_executable.endswith(".exe")
|
||||
and isfile(server_executable + ".exe")
|
||||
):
|
||||
server_executable = server_executable + ".exe"
|
||||
|
||||
if not isfile(server_executable):
|
||||
server_executable = where_is_program(server_executable)
|
||||
if not isfile(server_executable):
|
||||
raise DebugInvalidOptionsError(
|
||||
"\nCould not launch Debug Server '%s'. Please check that it "
|
||||
"is installed and is included in a system PATH\n\n"
|
||||
"See documentation or contact contact@platformio.org:\n"
|
||||
"https://docs.platformio.org/page/plus/debugging.html\n"
|
||||
% server_executable
|
||||
)
|
||||
|
||||
openocd_pipe_allowed = all(
|
||||
[not self.debug_options["port"], "openocd" in server_executable]
|
||||
)
|
||||
if openocd_pipe_allowed:
|
||||
args = []
|
||||
if server["cwd"]:
|
||||
args.extend(["-s", server["cwd"]])
|
||||
args.extend(
|
||||
["-c", "gdb_port pipe; tcl_port disabled; telnet_port disabled"]
|
||||
)
|
||||
args.extend(server["arguments"])
|
||||
str_args = " ".join(
|
||||
[arg if arg.startswith("-") else '"%s"' % arg for arg in args]
|
||||
)
|
||||
self._debug_port = '| "%s" %s' % (server_executable, str_args)
|
||||
self._debug_port = fs.to_unix_path(self._debug_port)
|
||||
defer.returnValue(self._debug_port)
|
||||
|
||||
env = os.environ.copy()
|
||||
# prepend server "lib" folder to LD path
|
||||
if (
|
||||
"windows" not in systype
|
||||
and server["cwd"]
|
||||
and isdir(join(server["cwd"], "lib"))
|
||||
):
|
||||
ld_key = "DYLD_LIBRARY_PATH" if "darwin" in systype else "LD_LIBRARY_PATH"
|
||||
env[ld_key] = join(server["cwd"], "lib")
|
||||
if os.environ.get(ld_key):
|
||||
env[ld_key] = "%s:%s" % (env[ld_key], os.environ.get(ld_key))
|
||||
# prepend BIN to PATH
|
||||
if server["cwd"] and isdir(join(server["cwd"], "bin")):
|
||||
env["PATH"] = "%s%s%s" % (
|
||||
join(server["cwd"], "bin"),
|
||||
os.pathsep,
|
||||
os.environ.get("PATH", os.environ.get("Path", "")),
|
||||
)
|
||||
|
||||
self._transport = reactor.spawnProcess(
|
||||
self,
|
||||
server_executable,
|
||||
[server_executable] + server["arguments"],
|
||||
path=server["cwd"],
|
||||
env=env,
|
||||
)
|
||||
if "mspdebug" in server_executable.lower():
|
||||
self._debug_port = ":2000"
|
||||
elif "jlink" in server_executable.lower():
|
||||
self._debug_port = ":2331"
|
||||
elif "qemu" in server_executable.lower():
|
||||
self._debug_port = ":1234"
|
||||
|
||||
yield self._wait_until_ready()
|
||||
|
||||
defer.returnValue(self._debug_port)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _wait_until_ready(self):
|
||||
timeout = 10
|
||||
elapsed = 0
|
||||
delay = 0.5
|
||||
auto_ready_delay = 0.5
|
||||
while not self._ready and not self._process_ended and elapsed < timeout:
|
||||
yield self.async_sleep(delay)
|
||||
if not self.debug_options.get("server", {}).get("ready_pattern"):
|
||||
self._ready = self._last_activity < (time.time() - auto_ready_delay)
|
||||
elapsed += delay
|
||||
|
||||
@staticmethod
|
||||
def async_sleep(secs):
|
||||
d = defer.Deferred()
|
||||
reactor.callLater(secs, d.callback, None)
|
||||
return d
|
||||
|
||||
def get_debug_port(self):
|
||||
return self._debug_port
|
||||
|
||||
def outReceived(self, data):
|
||||
super(DebugServer, self).outReceived(
|
||||
escape_gdbmi_stream("@", data) if is_gdbmi_mode() else data
|
||||
)
|
||||
if self._ready:
|
||||
return
|
||||
ready_pattern = self.debug_options.get("server", {}).get("ready_pattern")
|
||||
if ready_pattern:
|
||||
self._ready = ready_pattern.encode() in data
|
||||
|
||||
def processEnded(self, reason):
|
||||
self._process_ended = True
|
||||
super(DebugServer, self).processEnded(reason)
|
||||
|
||||
def terminate(self):
|
||||
if self._process_ended or not self._transport:
|
||||
return
|
||||
try:
|
||||
self._transport.signalProcess("KILL")
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
@ -12,4 +12,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from platformio.commands.device.filters.base import DeviceMonitorFilter
|
||||
# pylint: disable=unused-import
|
||||
from platformio.device.monitor.filters.base import (
|
||||
DeviceMonitorFilterBase as DeviceMonitorFilter,
|
||||
)
|
||||
|
@ -1,245 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import sys
|
||||
from fnmatch import fnmatch
|
||||
|
||||
import click
|
||||
from serial.tools import miniterm
|
||||
|
||||
from platformio import exception, fs, util
|
||||
from platformio.commands.device import helpers as device_helpers
|
||||
from platformio.compat import dump_json_to_unicode
|
||||
from platformio.platform.factory import PlatformFactory
|
||||
from platformio.project.exception import NotPlatformIOProjectError
|
||||
|
||||
|
||||
@click.group(short_help="Device manager & serial/socket monitor")
|
||||
def cli():
|
||||
pass
|
||||
|
||||
|
||||
@cli.command("list", short_help="List devices")
|
||||
@click.option("--serial", is_flag=True, help="List serial ports, default")
|
||||
@click.option("--logical", is_flag=True, help="List logical devices")
|
||||
@click.option("--mdns", is_flag=True, help="List multicast DNS services")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def device_list( # pylint: disable=too-many-branches
|
||||
serial, logical, mdns, json_output
|
||||
):
|
||||
if not logical and not mdns:
|
||||
serial = True
|
||||
data = {}
|
||||
if serial:
|
||||
data["serial"] = util.get_serial_ports()
|
||||
if logical:
|
||||
data["logical"] = util.get_logical_devices()
|
||||
if mdns:
|
||||
data["mdns"] = util.get_mdns_services()
|
||||
|
||||
single_key = list(data)[0] if len(list(data)) == 1 else None
|
||||
|
||||
if json_output:
|
||||
return click.echo(
|
||||
dump_json_to_unicode(data[single_key] if single_key else data)
|
||||
)
|
||||
|
||||
titles = {
|
||||
"serial": "Serial Ports",
|
||||
"logical": "Logical Devices",
|
||||
"mdns": "Multicast DNS Services",
|
||||
}
|
||||
|
||||
for key, value in data.items():
|
||||
if not single_key:
|
||||
click.secho(titles[key], bold=True)
|
||||
click.echo("=" * len(titles[key]))
|
||||
|
||||
if key == "serial":
|
||||
for item in value:
|
||||
click.secho(item["port"], fg="cyan")
|
||||
click.echo("-" * len(item["port"]))
|
||||
click.echo("Hardware ID: %s" % item["hwid"])
|
||||
click.echo("Description: %s" % item["description"])
|
||||
click.echo("")
|
||||
|
||||
if key == "logical":
|
||||
for item in value:
|
||||
click.secho(item["path"], fg="cyan")
|
||||
click.echo("-" * len(item["path"]))
|
||||
click.echo("Name: %s" % item["name"])
|
||||
click.echo("")
|
||||
|
||||
if key == "mdns":
|
||||
for item in value:
|
||||
click.secho(item["name"], fg="cyan")
|
||||
click.echo("-" * len(item["name"]))
|
||||
click.echo("Type: %s" % item["type"])
|
||||
click.echo("IP: %s" % item["ip"])
|
||||
click.echo("Port: %s" % item["port"])
|
||||
if item["properties"]:
|
||||
click.echo(
|
||||
"Properties: %s"
|
||||
% (
|
||||
"; ".join(
|
||||
[
|
||||
"%s=%s" % (k, v)
|
||||
for k, v in item["properties"].items()
|
||||
]
|
||||
)
|
||||
)
|
||||
)
|
||||
click.echo("")
|
||||
|
||||
if single_key:
|
||||
click.echo("")
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@cli.command("monitor", short_help="Monitor device (Serial)")
|
||||
@click.option("--port", "-p", help="Port, a number or a device name")
|
||||
@click.option("--baud", "-b", type=int, help="Set baud rate, default=9600")
|
||||
@click.option(
|
||||
"--parity",
|
||||
default="N",
|
||||
type=click.Choice(["N", "E", "O", "S", "M"]),
|
||||
help="Set parity, default=N",
|
||||
)
|
||||
@click.option("--rtscts", is_flag=True, help="Enable RTS/CTS flow control, default=Off")
|
||||
@click.option(
|
||||
"--xonxoff", is_flag=True, help="Enable software flow control, default=Off"
|
||||
)
|
||||
@click.option(
|
||||
"--rts", default=None, type=click.IntRange(0, 1), help="Set initial RTS line state"
|
||||
)
|
||||
@click.option(
|
||||
"--dtr", default=None, type=click.IntRange(0, 1), help="Set initial DTR line state"
|
||||
)
|
||||
@click.option("--echo", is_flag=True, help="Enable local echo, default=Off")
|
||||
@click.option(
|
||||
"--encoding",
|
||||
default="UTF-8",
|
||||
help="Set the encoding for the serial port (e.g. hexlify, "
|
||||
"Latin1, UTF-8), default: UTF-8",
|
||||
)
|
||||
@click.option("--filter", "-f", multiple=True, help="Add filters/text transformations")
|
||||
@click.option(
|
||||
"--eol",
|
||||
default="CRLF",
|
||||
type=click.Choice(["CR", "LF", "CRLF"]),
|
||||
help="End of line mode, default=CRLF",
|
||||
)
|
||||
@click.option("--raw", is_flag=True, help="Do not apply any encodings/transformations")
|
||||
@click.option(
|
||||
"--exit-char",
|
||||
type=int,
|
||||
default=3,
|
||||
help="ASCII code of special character that is used to exit "
|
||||
"the application, default=3 (Ctrl+C)",
|
||||
)
|
||||
@click.option(
|
||||
"--menu-char",
|
||||
type=int,
|
||||
default=20,
|
||||
help="ASCII code of special character that is used to "
|
||||
"control miniterm (menu), default=20 (DEC)",
|
||||
)
|
||||
@click.option(
|
||||
"--quiet",
|
||||
is_flag=True,
|
||||
help="Diagnostics: suppress non-error messages, default=Off",
|
||||
)
|
||||
@click.option(
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=os.getcwd,
|
||||
type=click.Path(exists=True, file_okay=False, dir_okay=True, resolve_path=True),
|
||||
)
|
||||
@click.option(
|
||||
"-e",
|
||||
"--environment",
|
||||
help="Load configuration from `platformio.ini` and specified environment",
|
||||
)
|
||||
def device_monitor(**kwargs): # pylint: disable=too-many-branches
|
||||
# load default monitor filters
|
||||
filters_dir = os.path.join(fs.get_source_dir(), "commands", "device", "filters")
|
||||
for name in os.listdir(filters_dir):
|
||||
if not name.endswith(".py"):
|
||||
continue
|
||||
device_helpers.load_monitor_filter(os.path.join(filters_dir, name))
|
||||
|
||||
project_options = {}
|
||||
try:
|
||||
with fs.cd(kwargs["project_dir"]):
|
||||
project_options = device_helpers.get_project_options(kwargs["environment"])
|
||||
kwargs = device_helpers.apply_project_monitor_options(kwargs, project_options)
|
||||
except NotPlatformIOProjectError:
|
||||
pass
|
||||
|
||||
platform = None
|
||||
if "platform" in project_options:
|
||||
with fs.cd(kwargs["project_dir"]):
|
||||
platform = PlatformFactory.new(project_options["platform"])
|
||||
device_helpers.register_platform_filters(
|
||||
platform, kwargs["project_dir"], kwargs["environment"]
|
||||
)
|
||||
|
||||
if not kwargs["port"]:
|
||||
ports = util.get_serial_ports(filter_hwid=True)
|
||||
if len(ports) == 1:
|
||||
kwargs["port"] = ports[0]["port"]
|
||||
elif "platform" in project_options and "board" in project_options:
|
||||
board_hwids = device_helpers.get_board_hwids(
|
||||
kwargs["project_dir"],
|
||||
platform,
|
||||
project_options["board"],
|
||||
)
|
||||
for item in ports:
|
||||
for hwid in board_hwids:
|
||||
hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "")
|
||||
if hwid_str in item["hwid"]:
|
||||
kwargs["port"] = item["port"]
|
||||
break
|
||||
if kwargs["port"]:
|
||||
break
|
||||
elif kwargs["port"] and (set(["*", "?", "[", "]"]) & set(kwargs["port"])):
|
||||
for item in util.get_serial_ports():
|
||||
if fnmatch(item["port"], kwargs["port"]):
|
||||
kwargs["port"] = item["port"]
|
||||
break
|
||||
|
||||
# override system argv with patched options
|
||||
sys.argv = ["monitor"] + device_helpers.options_to_argv(
|
||||
kwargs,
|
||||
project_options,
|
||||
ignore=("port", "baud", "rts", "dtr", "environment", "project_dir"),
|
||||
)
|
||||
|
||||
if not kwargs["quiet"]:
|
||||
click.echo(
|
||||
"--- Available filters and text transformations: %s"
|
||||
% ", ".join(sorted(miniterm.TRANSFORMATIONS.keys()))
|
||||
)
|
||||
click.echo("--- More details at http://bit.ly/pio-monitor-filters")
|
||||
try:
|
||||
miniterm.main(
|
||||
default_port=kwargs["port"],
|
||||
default_baudrate=kwargs["baud"] or 9600,
|
||||
default_rts=kwargs["rts"],
|
||||
default_dtr=kwargs["dtr"],
|
||||
)
|
||||
except Exception as e:
|
||||
raise exception.MinitermException(e)
|
@ -1,42 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from serial.tools import miniterm
|
||||
|
||||
from platformio.project.config import ProjectConfig
|
||||
|
||||
|
||||
class DeviceMonitorFilter(miniterm.Transform):
|
||||
def __init__(self, project_dir=None, environment=None):
|
||||
""" Called by PlatformIO to pass context """
|
||||
miniterm.Transform.__init__(self)
|
||||
|
||||
self.project_dir = project_dir
|
||||
self.environment = environment
|
||||
|
||||
self.config = ProjectConfig.get_instance()
|
||||
if not self.environment:
|
||||
default_envs = self.config.default_envs()
|
||||
if default_envs:
|
||||
self.environment = default_envs[0]
|
||||
elif self.config.envs():
|
||||
self.environment = self.config.envs()[0]
|
||||
|
||||
def __call__(self):
|
||||
""" Called by the miniterm library when the filter is actually used """
|
||||
return self
|
||||
|
||||
@property
|
||||
def NAME(self):
|
||||
raise NotImplementedError("Please declare NAME attribute for the filter class")
|
@ -1,106 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import inspect
|
||||
import os
|
||||
|
||||
from serial.tools import miniterm
|
||||
|
||||
from platformio import fs
|
||||
from platformio.commands.device import DeviceMonitorFilter
|
||||
from platformio.compat import get_object_members, load_python_module
|
||||
from platformio.project.config import ProjectConfig
|
||||
|
||||
|
||||
def apply_project_monitor_options(cli_options, project_options):
|
||||
for k in ("port", "speed", "rts", "dtr"):
|
||||
k2 = "monitor_%s" % k
|
||||
if k == "speed":
|
||||
k = "baud"
|
||||
if cli_options[k] is None and k2 in project_options:
|
||||
cli_options[k] = project_options[k2]
|
||||
if k != "port":
|
||||
cli_options[k] = int(cli_options[k])
|
||||
return cli_options
|
||||
|
||||
|
||||
def options_to_argv(cli_options, project_options, ignore=None):
|
||||
confmon_flags = project_options.get("monitor_flags", [])
|
||||
result = confmon_flags[::]
|
||||
|
||||
for f in project_options.get("monitor_filters", []):
|
||||
result.extend(["--filter", f])
|
||||
|
||||
for k, v in cli_options.items():
|
||||
if v is None or (ignore and k in ignore):
|
||||
continue
|
||||
k = "--" + k.replace("_", "-")
|
||||
if k in confmon_flags:
|
||||
continue
|
||||
if isinstance(v, bool):
|
||||
if v:
|
||||
result.append(k)
|
||||
elif isinstance(v, tuple):
|
||||
for i in v:
|
||||
result.extend([k, i])
|
||||
else:
|
||||
result.extend([k, str(v)])
|
||||
return result
|
||||
|
||||
|
||||
def get_project_options(environment=None):
|
||||
config = ProjectConfig.get_instance()
|
||||
config.validate(envs=[environment] if environment else None)
|
||||
if not environment:
|
||||
default_envs = config.default_envs()
|
||||
if default_envs:
|
||||
environment = default_envs[0]
|
||||
else:
|
||||
environment = config.envs()[0]
|
||||
return config.items(env=environment, as_dict=True)
|
||||
|
||||
|
||||
def get_board_hwids(project_dir, platform, board):
|
||||
with fs.cd(project_dir):
|
||||
return platform.board_config(board).get("build.hwids", [])
|
||||
|
||||
|
||||
def load_monitor_filter(path, project_dir=None, environment=None):
|
||||
name = os.path.basename(path)
|
||||
name = name[: name.find(".")]
|
||||
module = load_python_module("platformio.commands.device.filters.%s" % name, path)
|
||||
for cls in get_object_members(module).values():
|
||||
if (
|
||||
not inspect.isclass(cls)
|
||||
or not issubclass(cls, DeviceMonitorFilter)
|
||||
or cls == DeviceMonitorFilter
|
||||
):
|
||||
continue
|
||||
obj = cls(project_dir, environment)
|
||||
miniterm.TRANSFORMATIONS[obj.NAME] = obj
|
||||
return True
|
||||
|
||||
|
||||
def register_platform_filters(platform, project_dir, environment):
|
||||
monitor_dir = os.path.join(platform.get_dir(), "monitor")
|
||||
if not os.path.isdir(monitor_dir):
|
||||
return
|
||||
|
||||
for name in os.listdir(monitor_dir):
|
||||
if not name.startswith("filter_") or not name.endswith(".py"):
|
||||
continue
|
||||
path = os.path.join(monitor_dir, name)
|
||||
if not os.path.isfile(path):
|
||||
continue
|
||||
load_monitor_filter(path, project_dir, environment)
|
@ -1,152 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=too-many-locals,too-many-statements
|
||||
|
||||
import mimetypes
|
||||
import socket
|
||||
from os.path import isdir
|
||||
|
||||
import click
|
||||
|
||||
from platformio import exception
|
||||
from platformio.compat import WINDOWS
|
||||
from platformio.package.manager.core import get_core_package_dir, inject_contrib_pysite
|
||||
|
||||
|
||||
@click.command("home", short_help="UI to manage PlatformIO")
|
||||
@click.option("--port", type=int, default=8008, help="HTTP port, default=8008")
|
||||
@click.option(
|
||||
"--host",
|
||||
default="127.0.0.1",
|
||||
help=(
|
||||
"HTTP host, default=127.0.0.1. You can open PIO Home for inbound "
|
||||
"connections with --host=0.0.0.0"
|
||||
),
|
||||
)
|
||||
@click.option("--no-open", is_flag=True)
|
||||
@click.option(
|
||||
"--shutdown-timeout",
|
||||
default=0,
|
||||
type=int,
|
||||
help=(
|
||||
"Automatically shutdown server on timeout (in seconds) when no clients "
|
||||
"are connected. Default is 0 which means never auto shutdown"
|
||||
),
|
||||
)
|
||||
def cli(port, host, no_open, shutdown_timeout):
|
||||
# pylint: disable=import-error, import-outside-toplevel
|
||||
|
||||
# import contrib modules
|
||||
inject_contrib_pysite()
|
||||
|
||||
from autobahn.twisted.resource import WebSocketResource
|
||||
from twisted.internet import reactor
|
||||
from twisted.web import server
|
||||
from twisted.internet.error import CannotListenError
|
||||
|
||||
from platformio.commands.home.rpc.handlers.app import AppRPC
|
||||
from platformio.commands.home.rpc.handlers.ide import IDERPC
|
||||
from platformio.commands.home.rpc.handlers.misc import MiscRPC
|
||||
from platformio.commands.home.rpc.handlers.os import OSRPC
|
||||
from platformio.commands.home.rpc.handlers.piocore import PIOCoreRPC
|
||||
from platformio.commands.home.rpc.handlers.project import ProjectRPC
|
||||
from platformio.commands.home.rpc.handlers.account import AccountRPC
|
||||
from platformio.commands.home.rpc.server import JSONRPCServerFactory
|
||||
from platformio.commands.home.web import WebRoot
|
||||
|
||||
factory = JSONRPCServerFactory(shutdown_timeout)
|
||||
factory.addHandler(AppRPC(), namespace="app")
|
||||
factory.addHandler(IDERPC(), namespace="ide")
|
||||
factory.addHandler(MiscRPC(), namespace="misc")
|
||||
factory.addHandler(OSRPC(), namespace="os")
|
||||
factory.addHandler(PIOCoreRPC(), namespace="core")
|
||||
factory.addHandler(ProjectRPC(), namespace="project")
|
||||
factory.addHandler(AccountRPC(), namespace="account")
|
||||
|
||||
contrib_dir = get_core_package_dir("contrib-piohome")
|
||||
if not isdir(contrib_dir):
|
||||
raise exception.PlatformioException("Invalid path to PIO Home Contrib")
|
||||
|
||||
# Ensure PIO Home mimetypes are known
|
||||
mimetypes.add_type("text/html", ".html")
|
||||
mimetypes.add_type("text/css", ".css")
|
||||
mimetypes.add_type("application/javascript", ".js")
|
||||
|
||||
root = WebRoot(contrib_dir)
|
||||
root.putChild(b"wsrpc", WebSocketResource(factory))
|
||||
site = server.Site(root)
|
||||
|
||||
# hook for `platformio-node-helpers`
|
||||
if host == "__do_not_start__":
|
||||
return
|
||||
|
||||
already_started = is_port_used(host, port)
|
||||
home_url = "http://%s:%d" % (host, port)
|
||||
if not no_open:
|
||||
if already_started:
|
||||
click.launch(home_url)
|
||||
else:
|
||||
reactor.callLater(1, lambda: click.launch(home_url))
|
||||
|
||||
click.echo(
|
||||
"\n".join(
|
||||
[
|
||||
"",
|
||||
" ___I_",
|
||||
" /\\-_--\\ PlatformIO Home",
|
||||
"/ \\_-__\\",
|
||||
"|[]| [] | %s" % home_url,
|
||||
"|__|____|______________%s" % ("_" * len(host)),
|
||||
]
|
||||
)
|
||||
)
|
||||
click.echo("")
|
||||
click.echo("Open PlatformIO Home in your browser by this URL => %s" % home_url)
|
||||
|
||||
try:
|
||||
reactor.listenTCP(port, site, interface=host)
|
||||
except CannotListenError as e:
|
||||
click.secho(str(e), fg="red", err=True)
|
||||
already_started = True
|
||||
|
||||
if already_started:
|
||||
click.secho(
|
||||
"PlatformIO Home server is already started in another process.", fg="yellow"
|
||||
)
|
||||
return
|
||||
|
||||
click.echo("PIO Home has been started. Press Ctrl+C to shutdown.")
|
||||
|
||||
reactor.run()
|
||||
|
||||
|
||||
def is_port_used(host, port):
|
||||
socket.setdefaulttimeout(1)
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
if WINDOWS:
|
||||
try:
|
||||
s.bind((host, port))
|
||||
s.close()
|
||||
return False
|
||||
except (OSError, socket.error):
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
s.connect((host, port))
|
||||
s.close()
|
||||
except socket.error:
|
||||
return False
|
||||
|
||||
return True
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user