mirror of
https://github.com/platformio/platformio-core.git
synced 2025-06-25 17:41:54 +02:00
Compare commits
2836 Commits
Author | SHA1 | Date | |
---|---|---|---|
444c57b4a6 | |||
d787648e71 | |||
846588deec | |||
79142965ce | |||
93bc4fae6c | |||
1aa256d63c | |||
3a133af1a6 | |||
f93d3d509b | |||
145142ea6c | |||
b4b02982d6 | |||
841489c154 | |||
23c142dffd | |||
fc946baa93 | |||
a447022e7f | |||
4c697d9032 | |||
a71443a2ee | |||
20e076191e | |||
d907ecb9e9 | |||
c950d6d366 | |||
29cd2d2bdb | |||
a584a6bce3 | |||
4dc7ea5bd0 | |||
1be6e10f99 | |||
c9016d6939 | |||
baab25a48c | |||
4d4f5a217b | |||
b6d1f4d769 | |||
90fc36cf2d | |||
9be0a8248d | |||
d15314689d | |||
1d4b5c8051 | |||
47a87c57f2 | |||
ec2d01f277 | |||
4e05309e02 | |||
1fd3a4061f | |||
014ac79c87 | |||
dd3fe909a1 | |||
c1afb364e9 | |||
f3c27eadf6 | |||
fe2fd5e880 | |||
07e7dc4717 | |||
a94e5bd5ab | |||
f5ab0e5ddd | |||
3e20abec90 | |||
a4276b4ea6 | |||
cade63fba5 | |||
3a57661230 | |||
33fadd028d | |||
647b131d9b | |||
b537004a75 | |||
67b2759be2 | |||
fe2e8a0a40 | |||
03e84fe325 | |||
b45cdc9cb6 | |||
3aed8e1259 | |||
2d4a87238a | |||
023b58e9f0 | |||
3211a2b91b | |||
4b61de0136 | |||
e6ae18ab0d | |||
4230b223d2 | |||
d224ae658d | |||
20dc006345 | |||
13035ced59 | |||
b9d27240b5 | |||
2441d47321 | |||
cf497e8829 | |||
013153718d | |||
f1726843a2 | |||
44ef6e3469 | |||
eeb5ac456e | |||
aea9075d4b | |||
11a8d9ff7a | |||
7b587ba8bf | |||
9eb6e5166d | |||
aa580360e8 | |||
4c490cc63c | |||
882d4da8cb | |||
781114f026 | |||
7cf8d1d696 | |||
fd1333f031 | |||
8e21259222 | |||
9899547b73 | |||
4075789a32 | |||
ff364610c5 | |||
e5940673d7 | |||
fe140b0566 | |||
2ec5a3154e | |||
956f21b639 | |||
cdac7d497c | |||
591b377e4a | |||
c475578db6 | |||
2bad42ecb1 | |||
0acfc25d56 | |||
9d1593da0b | |||
e9433de50f | |||
fcba901611 | |||
0e3249e8b1 | |||
0d647e164b | |||
c01ef88265 | |||
9fb9e586a0 | |||
28bd200cd6 | |||
56be27fb0b | |||
32991356f3 | |||
dbe58b49bf | |||
d36e39418e | |||
c28740cfb1 | |||
430acc87de | |||
c0d97287dd | |||
0f3dbe623d | |||
6449115635 | |||
d085a02068 | |||
76a11a75b7 | |||
93018930ab | |||
621b24b665 | |||
7606dd4faf | |||
aa06d21abe | |||
042f8dc668 | |||
c4f76848a7 | |||
e1ff9a469d | |||
2239616484 | |||
55be7181b3 | |||
f519a9d524 | |||
f4319f670c | |||
80fc335528 | |||
353f440335 | |||
3e9ca48588 | |||
255e91b51c | |||
adf94843ea | |||
e3e08d9691 | |||
84c7ede0e1 | |||
28c90652bc | |||
a75da327d0 | |||
adf4012b96 | |||
1fe806269d | |||
ffacd17387 | |||
4742ffc9d8 | |||
700c705317 | |||
17ba91977d | |||
f31f9fa616 | |||
485f801c74 | |||
adab425c6d | |||
aabbbef944 | |||
14ce28e028 | |||
ca1f633f9c | |||
a2f3e85760 | |||
f422b5e05c | |||
ba58db3079 | |||
4729d9f55d | |||
41bd751ec2 | |||
c74c9778a1 | |||
f2d16e7631 | |||
b181406a1f | |||
dc16f80ffc | |||
125be4bfd4 | |||
14907579cd | |||
b0a1f3ae16 | |||
195304bbea | |||
e4c4f2ac50 | |||
77e6d1b099 | |||
cf4da42b25 | |||
51bf17515e | |||
1e2c37c190 | |||
204a60dd52 | |||
0f554d2f31 | |||
f382aae66b | |||
998da59f7c | |||
4cad98601d | |||
34545d3f12 | |||
127b422d25 | |||
8c61f0f6b6 | |||
fb93c1937c | |||
827bd09c61 | |||
984d63983d | |||
11df021750 | |||
ac6d94860b | |||
b238c55e53 | |||
961ab6b35e | |||
e1f34c7ea0 | |||
f70e6d50c6 | |||
540465291a | |||
0b3c0144e6 | |||
7ab27ddf9d | |||
e78bf51f68 | |||
5f8c15b96a | |||
9c61ef544d | |||
5548197a74 | |||
2458309d55 | |||
7229e1cce4 | |||
3e95134721 | |||
687189a142 | |||
51b4cd88db | |||
fe52b79eb2 | |||
091c96eb07 | |||
f2eead6ece | |||
c2b3097618 | |||
2728c90441 | |||
5cac6d8b88 | |||
bd34c0f437 | |||
f1c445be15 | |||
b88c393b4e | |||
897844ebc1 | |||
00409fc096 | |||
b75bdbd320 | |||
a0f8def616 | |||
c946613019 | |||
2ee8214485 | |||
7e89e551ae | |||
6972c9c100 | |||
5cfaea91d6 | |||
ce735c0ae5 | |||
aa0df36c8a | |||
99224d7d4e | |||
532759c0c6 | |||
fb43d2508a | |||
07944a9d5b | |||
8b6a4b8ce8 | |||
6e75dc0d57 | |||
a733f3c868 | |||
65397fe059 | |||
48a823d39e | |||
f8b5266c1e | |||
9170eee6e4 | |||
89f4574680 | |||
831f7f52bc | |||
dccc14b507 | |||
3a21f48c9c | |||
54ff3a8d4e | |||
4474175e52 | |||
a983075dac | |||
3268b516a9 | |||
5c9b373b65 | |||
0fe6bf262e | |||
390755c499 | |||
deca77d1a3 | |||
bc2e51d51f | |||
bce70d4945 | |||
940fa327f5 | |||
db8f027f30 | |||
39b61d50e6 | |||
f85c3081fe | |||
2a1fd273ee | |||
a423a4dde4 | |||
abda3edad6 | |||
be4d016f61 | |||
68e62c7137 | |||
bf8f1e9efb | |||
a102fd2d48 | |||
ff221b103a | |||
646aa4f45b | |||
325d4c16b8 | |||
f47083b86b | |||
3d48f3ec04 | |||
837ea85c3c | |||
9585e2a3e3 | |||
5396882e75 | |||
109c537d86 | |||
b239628ac3 | |||
25c7c60f0d | |||
8a38442bba | |||
205b29560f | |||
bbcd92b7c6 | |||
3b3fbecbf3 | |||
a3e66d6325 | |||
355f57e888 | |||
6eff31b5d3 | |||
01423a7659 | |||
0f9a5f8eee | |||
1c419ef71a | |||
01ab1fa4c0 | |||
0ff46bdd88 | |||
dd033bf675 | |||
a28a3d31c9 | |||
450f48ba81 | |||
813861ddae | |||
939b9b9112 | |||
98edf7609f | |||
9f0efdeb5c | |||
3fd063d8ed | |||
1b55da0af2 | |||
4dc44868ea | |||
f720cd841c | |||
53f1d82890 | |||
e78efff33b | |||
a754a28cd8 | |||
1d97982230 | |||
e022b67161 | |||
82de26d401 | |||
31218060db | |||
e25b170b34 | |||
326ebcf593 | |||
8b604c1a03 | |||
f219f35ac8 | |||
4d89593b05 | |||
3881a8c677 | |||
e9cf551101 | |||
5ffa42a5a2 | |||
a5052433f2 | |||
425332040e | |||
3a230dfb51 | |||
292dc3fd71 | |||
e48dfbaadc | |||
c0d2abc9a7 | |||
d017a8197e | |||
378528abfc | |||
91487f179e | |||
363fee4ba0 | |||
41cc735979 | |||
c9235a5276 | |||
355f5afab9 | |||
2b36c7086a | |||
f819cbb4b8 | |||
e3c33596db | |||
1bcec6654d | |||
9df692529b | |||
141d6fc4a6 | |||
6bc915f7db | |||
4ae24a619f | |||
7f7bc76b20 | |||
55e7b36dc4 | |||
395a4053aa | |||
cb65bdf22f | |||
6ea7ded483 | |||
eeb0116f28 | |||
63ca19541f | |||
e0f839a372 | |||
4fc6b26db5 | |||
4388cd4321 | |||
71afa639e2 | |||
89ffd82275 | |||
148ce1a897 | |||
fc12dda765 | |||
58a59f8ae8 | |||
41fb1ca8bd | |||
bba5bc9d0f | |||
f8f3e9863e | |||
eb20f3410a | |||
681b90e6b2 | |||
c016d6827b | |||
f840577066 | |||
475c5d2a3c | |||
f9cbf6cb97 | |||
d728e0e873 | |||
7876626f04 | |||
f6aa95a4fe | |||
4596acab81 | |||
09f1269440 | |||
939f8e0812 | |||
1d0d89a4fa | |||
2908efd337 | |||
9dc6ed031f | |||
21c4f091e2 | |||
2c94fb2aad | |||
2312ca929d | |||
6f0b1fbb91 | |||
c8eea40dd0 | |||
53cd43b676 | |||
981266646c | |||
0acf968b2d | |||
1e5a728f3c | |||
c4d178e50e | |||
b991d9f25c | |||
82d380d895 | |||
9344f3cd81 | |||
6ee9cc04fb | |||
a0c959be28 | |||
df896ad401 | |||
743fc8e636 | |||
fa255ff8b3 | |||
97a7cdd2a2 | |||
02a63a6954 | |||
7f38e222c9 | |||
f71317dad9 | |||
4444a0db99 | |||
e8ffa244e5 | |||
7e9b637143 | |||
0d9ee75b05 | |||
66fe55668e | |||
5c3ae15bee | |||
58a1d5d96e | |||
ab15da4f4b | |||
71bb84f3f2 | |||
faff0fb56c | |||
0fb064eba3 | |||
bea5e87543 | |||
62e9589851 | |||
7d86eebe77 | |||
0bba598c61 | |||
4b446b0d72 | |||
f43f41cc53 | |||
00c5d30ce9 | |||
269d5e0a3e | |||
331ff2dc9c | |||
a24cf50413 | |||
8d33a3d151 | |||
d9ff250f82 | |||
f2d206ca54 | |||
d7c9dc2411 | |||
3e6725bb5f | |||
c3e287672e | |||
a387f9708a | |||
07bfa8ce4a | |||
3dfb936f3c | |||
e39438791c | |||
c7060f93e8 | |||
8794b2a3a1 | |||
cabe7d8c11 | |||
56cc7ce270 | |||
5b13aeda52 | |||
674d00183e | |||
598d2b24de | |||
4c4fb5029e | |||
2c4055d9e1 | |||
efbe3d4aa6 | |||
c785c8c6f3 | |||
9b4a045413 | |||
7c650c2c08 | |||
1422b77298 | |||
1af508272b | |||
5073313c33 | |||
18b6aad369 | |||
097de2be98 | |||
188c65ef7b | |||
b2a04f265e | |||
15d53c95c0 | |||
0d57a799b5 | |||
464b167e65 | |||
380652eb52 | |||
4350c4ca48 | |||
8835a03cd9 | |||
61ba8afee6 | |||
199e3d8958 | |||
9f09657997 | |||
2e64056787 | |||
83d2173748 | |||
1503eb5d41 | |||
6db3eb8e33 | |||
355222b0c0 | |||
2fbd766fd9 | |||
fb5e99473f | |||
de4ba4cbe1 | |||
42f1197de8 | |||
2337dbd2cd | |||
17360b0ed2 | |||
5ee79f1724 | |||
20067c5736 | |||
d43c5696cc | |||
8970f36f1a | |||
75716d26ff | |||
d0ca48661c | |||
4121882a9d | |||
9c38cf6621 | |||
6395a032e5 | |||
a0387bd16e | |||
4d4aec4f57 | |||
7bbfaab891 | |||
337e7fe43a | |||
38f03224d3 | |||
48655ad728 | |||
5de541e493 | |||
527d61296f | |||
2141a09736 | |||
190ebcccfe | |||
a1d9798594 | |||
bf3942e7cc | |||
5ec5660fa6 | |||
fb09077c38 | |||
ec5bf1b5e7 | |||
bf7fb15941 | |||
b35f1ea572 | |||
7e6cb84c87 | |||
bf769e1a9e | |||
476bf20923 | |||
3277ac3a18 | |||
93ce9b0c5e | |||
b11925a9ec | |||
7f8784e2a8 | |||
ca7a100392 | |||
32c2e33edf | |||
30fc00098d | |||
1dd62361c7 | |||
c870c09d67 | |||
30b00e7a9d | |||
c763f4b3a3 | |||
9800fb7b2c | |||
3b66f4270c | |||
dc14bd7362 | |||
4be5185ed3 | |||
1ea0adf6af | |||
7cb40ef3b0 | |||
044bf61a4d | |||
e0f9cb8c26 | |||
d6d1c6b327 | |||
4c177c1ad3 | |||
490af8ac37 | |||
ca48e6c172 | |||
7533c369d4 | |||
cd8024c762 | |||
0b4aedbeeb | |||
3d2ac4698c | |||
e0a3b81877 | |||
af21c50aec | |||
1cbc424488 | |||
887e542cb2 | |||
780c62d925 | |||
122ebed16d | |||
158aabbdf2 | |||
a8c3f2bdf6 | |||
8814f4e92d | |||
ba5f61f92b | |||
43dd429aa2 | |||
cd8179a41f | |||
10136729ab | |||
c5d7c4f88e | |||
e3796cfda1 | |||
847fdd4deb | |||
c56b35f504 | |||
bc9d9ac2db | |||
e2f0d96f09 | |||
4e78c3ec40 | |||
dfffd5e97b | |||
8c13d13f80 | |||
32d501bed1 | |||
17a7293967 | |||
59902abd09 | |||
a4756987a4 | |||
b04c1591c2 | |||
83c4e5f463 | |||
8ada1c2b34 | |||
b7b01dd6a0 | |||
5c2673cd71 | |||
09f7ff2db3 | |||
5e8eb77090 | |||
a0493e6ac4 | |||
4d755f2692 | |||
fcb676abc6 | |||
fa0de1dad4 | |||
6653c02487 | |||
0939b43899 | |||
537558d410 | |||
7c9e0393f8 | |||
9ddf73baa6 | |||
699da0a8fb | |||
fd8c9786c0 | |||
410324b2c7 | |||
36d470279c | |||
e498119e0d | |||
9ff117b0fb | |||
2695690b34 | |||
8dc20f93d5 | |||
f8d21e5b32 | |||
72ac6c86df | |||
8c2a7df53e | |||
d92e36efa0 | |||
741e9a40b3 | |||
7527143fff | |||
a23fef010f | |||
cd4f5541ac | |||
73089b3cb0 | |||
3a70c902a9 | |||
bedbae6311 | |||
842679c32b | |||
10ff4ae77a | |||
bc325ab2cc | |||
a31a7f2b06 | |||
4278574450 | |||
6f8f2511c2 | |||
5282124664 | |||
83bb6611b9 | |||
dcc02c3e14 | |||
f070399cad | |||
b9920b286f | |||
d278f8f215 | |||
3c5c65769c | |||
2f7362951c | |||
f4535190a3 | |||
236c4570cf | |||
5844c536a4 | |||
6627fd5790 | |||
25074d80d3 | |||
f032663b33 | |||
d24702eb29 | |||
9051677d74 | |||
7637286efa | |||
31a24e1652 | |||
c8c4028a23 | |||
0bd27a36e9 | |||
ddfe5a6c03 | |||
ee93ca1615 | |||
4c2aca4956 | |||
dd14b5e2ed | |||
6464420c1c | |||
79ec493c79 | |||
abb464707d | |||
7c846b8968 | |||
84c2e0a3d6 | |||
c2ddc89e46 | |||
1495e24e1e | |||
6e16b43568 | |||
6c18b37d54 | |||
6134db8e81 | |||
3cf62f8fa6 | |||
523b6dfa98 | |||
3928cb522e | |||
de856ee730 | |||
d3b7508bd5 | |||
6c71a3bea2 | |||
d2e27f5385 | |||
2a5de43964 | |||
029e66cd06 | |||
96fb8c74f9 | |||
b006f53010 | |||
19d518fc4c | |||
f01cd7570c | |||
ffebfd4376 | |||
e4264a6a51 | |||
d85bc0f7f8 | |||
1445a91fab | |||
3b878747f2 | |||
401f8a4891 | |||
6bec593b93 | |||
aef49a8bff | |||
772e25df49 | |||
3363b3a516 | |||
1f096fe03f | |||
32e440bec7 | |||
99b5204802 | |||
3c17b31d5e | |||
89a80f158e | |||
c42db2ec22 | |||
6a3b6f0d44 | |||
ca2622b7a6 | |||
b9a9fd4f43 | |||
1ea6d47110 | |||
256acf7e23 | |||
284ccc9e8a | |||
655eedd7b0 | |||
bb6490d6f2 | |||
300b7b2138 | |||
86c4bd69d2 | |||
dd63c8002a | |||
13fc8508b3 | |||
a76933990c | |||
7e3e394707 | |||
cee3f4d90f | |||
c557473cfb | |||
f893fcf135 | |||
092326cb91 | |||
92a5c1bac6 | |||
4b2f0eb1d5 | |||
9ae67fdad9 | |||
5142feba7a | |||
8cbe7bc7a6 | |||
d8f36b6534 | |||
58d533a3bb | |||
18e130fd12 | |||
b72c1636f7 | |||
f68c18d1e5 | |||
db6b8a6dbc | |||
5afa0a955e | |||
ca3b3717d3 | |||
d4784c05f5 | |||
7a01da7039 | |||
42690d3fa7 | |||
50cbc4d4e2 | |||
63c2278a83 | |||
4bccaae945 | |||
e12bc9fe5f | |||
ac63cf0240 | |||
30709fd0b3 | |||
6f9985125d | |||
743a3e2c02 | |||
bd21ff0d3e | |||
46858fff39 | |||
854c549e1c | |||
4b5bc91abb | |||
375c396b7b | |||
7aaa9c028b | |||
7f351bc7c8 | |||
c42fe32972 | |||
a6e61a7a5a | |||
4bc3e3cf95 | |||
4a7a8b8b68 | |||
51ab0bbd3c | |||
30937df4e6 | |||
b15a4e746a | |||
1b17234c41 | |||
26f897cb55 | |||
99d049a6dd | |||
f3c3402b35 | |||
55b9c446f1 | |||
e3ca0c6f04 | |||
4ff591bd7e | |||
b02335a294 | |||
cc3ea65faa | |||
206bb38f54 | |||
10da6bf5c6 | |||
22860cd4e5 | |||
0b8a595288 | |||
7e7856e44c | |||
b104b840c4 | |||
32386bec18 | |||
db366b3163 | |||
472c80159d | |||
4a95148cd0 | |||
11a43b2693 | |||
12fb02db6e | |||
52f8e98eed | |||
dcc63da2ef | |||
756bb07d1a | |||
d2be7033e9 | |||
27ccdc76a0 | |||
dcecd5f922 | |||
506a08c7cf | |||
e2892d5d4c | |||
0ce7885833 | |||
6b7e8ebe97 | |||
6e5aee5ef3 | |||
4aebf8c9d7 | |||
1f75430fab | |||
2564b9eb78 | |||
cf558036d0 | |||
b568eb68d6 | |||
19006378a8 | |||
a19c4dbcda | |||
22a0a20666 | |||
440bb1e6f4 | |||
87dffa36b8 | |||
bd052d0ce0 | |||
73dd29c59c | |||
460a983ab2 | |||
ea94f65159 | |||
6f6460fd4e | |||
5f812409d4 | |||
87f2e86928 | |||
626640cc05 | |||
f5e0ccecc3 | |||
598769fe1b | |||
f7e24f2093 | |||
9b141bf5a8 | |||
9d2adb37f3 | |||
97e2d24cd1 | |||
720732eba6 | |||
37c6f20747 | |||
e27c1c39e4 | |||
1e000027c7 | |||
e3fea07596 | |||
06ed9ba77d | |||
f4d9769450 | |||
9da7c42be4 | |||
3419558265 | |||
61383f9b08 | |||
be0acaed40 | |||
0c4c4ac657 | |||
bb8b115a0b | |||
2e2735a49c | |||
7badd54c89 | |||
4dfc561551 | |||
3c2afeba89 | |||
d2d46f4aea | |||
ccc7d9c9a4 | |||
45fcb40a5c | |||
1585b829be | |||
0ceae62701 | |||
f2bdb17c55 | |||
83b00ac80c | |||
a76e445ed9 | |||
edff591c90 | |||
cb7148d018 | |||
38afa07dbe | |||
92073a4ccd | |||
abf6304818 | |||
9a86175701 | |||
b764a2220f | |||
3776233233 | |||
0d92e8fc17 | |||
40422eac2e | |||
0fb4b1e109 | |||
44ecc7c666 | |||
26d659c433 | |||
58c4145809 | |||
fe08ce7795 | |||
9163e9e67d | |||
7acae6461e | |||
e7a172b8dd | |||
b90e89a791 | |||
db11244f49 | |||
54f0748201 | |||
575f0ae300 | |||
7a100fb0b0 | |||
d01d314f47 | |||
e5e2210768 | |||
d22b479bd3 | |||
19853b0b66 | |||
ce62514a17 | |||
4a4ba5594b | |||
af5a820862 | |||
40e4e38e0c | |||
cb1c825747 | |||
8c27754045 | |||
3247e661e9 | |||
7c93167d52 | |||
79b2bfdefe | |||
de7d710943 | |||
b88a29e652 | |||
ed0b12dcf9 | |||
280bede0e9 | |||
e6938f8f39 | |||
6d705172f5 | |||
8fff7084db | |||
e75bf27b5f | |||
2c99607d3d | |||
c09af13b7f | |||
ee6b498ca9 | |||
65f2f02d93 | |||
960edb5611 | |||
cda7a97e67 | |||
c520700276 | |||
a7654a6098 | |||
814679522a | |||
4249349c2b | |||
d065646d3e | |||
0cf7aeeec9 | |||
277ccdafb6 | |||
5b00f6fb95 | |||
3f46a97b6b | |||
3989979ca3 | |||
50eda82e27 | |||
daa3481862 | |||
2d94000dd5 | |||
e3eb155d76 | |||
f95e23118c | |||
82778473fe | |||
dae3b9665b | |||
f19058df65 | |||
3c7bec7c61 | |||
c4388a6904 | |||
6d1e637518 | |||
bbd56d6eb0 | |||
0b317ef04b | |||
c0cfbe2ce0 | |||
3ed5d41df5 | |||
517ee6532f | |||
653f22f85b | |||
38906478d3 | |||
e81d83b8c2 | |||
b12d9f62b9 | |||
0849e5faad | |||
1a4419059d | |||
4ef1333abc | |||
2b11f64ef1 | |||
5b98f432f2 | |||
76779e6af4 | |||
738d537266 | |||
327d5990d6 | |||
16021d0df7 | |||
b37a74dfd9 | |||
d02f02731f | |||
4295c54c67 | |||
fb1e4fa02b | |||
62b8a63b80 | |||
ab3c832f5e | |||
d380e7ea01 | |||
e69fd5e682 | |||
285f19e132 | |||
4151f53e14 | |||
5895fb9faf | |||
19e22d74f3 | |||
26ed6a5548 | |||
05dd7dd811 | |||
8b694f3734 | |||
c9026a1b9c | |||
9b221a06c8 | |||
f88904e246 | |||
e3533dcb01 | |||
8edb5ffe20 | |||
90e6cd7b46 | |||
1fa73fb632 | |||
a615af233a | |||
4817e13823 | |||
ee43b86742 | |||
93bfc57dea | |||
a568a5c356 | |||
0b21977e48 | |||
2f7668aef5 | |||
72fa6eebba | |||
2f6a417168 | |||
faa63727ab | |||
a2b1a0a0a7 | |||
0d7bc09c49 | |||
f57ca747a9 | |||
624421e4b0 | |||
943c6bc59c | |||
9ce0b0e25b | |||
df3a13fc61 | |||
5a0a215bfc | |||
eaff7f307c | |||
8d63591ce8 | |||
0e3aa29689 | |||
a56b19ff65 | |||
62b7ec271f | |||
5515bef3d7 | |||
092f5de231 | |||
81fdd75aac | |||
f63b2f79e0 | |||
0501d55c8f | |||
fe6f51369e | |||
8f454c7e9c | |||
965feccfdc | |||
5e18f9bbda | |||
541fcbf015 | |||
16f5374474 | |||
b414745aa1 | |||
696d95bf1b | |||
1269ce064a | |||
9097d455db | |||
1615159014 | |||
e4e1e72c30 | |||
43329b7748 | |||
2280865936 | |||
fb2f3c8836 | |||
e2f21212b7 | |||
d7597d0992 | |||
c21876ebe3 | |||
76bea5b7a7 | |||
a03d82ff1a | |||
f555656c92 | |||
f289ebd1f3 | |||
41b3646012 | |||
8de5db4b48 | |||
d8be12dcdd | |||
71f9401e23 | |||
cdd63dec65 | |||
279fdfc47a | |||
feda42f18f | |||
d86f7fc25e | |||
e4fb675d5f | |||
25e786e6a5 | |||
fd01e98cb1 | |||
2a88cdb8df | |||
be8f842061 | |||
fcb81ae074 | |||
7d9c018b44 | |||
a6e12532f8 | |||
bd202f55ce | |||
f7b5a7bed8 | |||
6123d6f9bf | |||
6c8173d1aa | |||
d2f857d176 | |||
1e2afafbc4 | |||
927c5c5e36 | |||
b2ea96b4a7 | |||
6afb53dd7d | |||
d7477833d6 | |||
7624645626 | |||
53753c0127 | |||
95604ff66a | |||
99e0d1071a | |||
13aacbcc05 | |||
b137b25169 | |||
b44fb101c4 | |||
accc8ac254 | |||
435a526140 | |||
346580d955 | |||
81f343dbe8 | |||
fa443f2e5f | |||
a25a86e42f | |||
1ffa924483 | |||
463a16a68f | |||
d2adca8d68 | |||
057bf89894 | |||
c9037982d7 | |||
ce1264564f | |||
61ffab376d | |||
f3bcaae4e4 | |||
2201214717 | |||
eba4231cdc | |||
de0a810fcf | |||
644fc36c32 | |||
41144bffeb | |||
c84709dd9d | |||
f28651eaf7 | |||
9e40eb992e | |||
f445cb7895 | |||
dfc0ecdf69 | |||
6f11f812f8 | |||
4191a9bc3c | |||
f2fbdafe64 | |||
22a037b213 | |||
dbe3ab6c97 | |||
6bed610af3 | |||
4d9547066b | |||
54c18ae0c6 | |||
e49fb9f0d0 | |||
33da2af31e | |||
bcb3678055 | |||
28da2d245b | |||
e6864adfb6 | |||
8562319638 | |||
6be17cec37 | |||
f34e6e9c4c | |||
e8051838a3 | |||
f1f5497d8d | |||
1b44ba4ce0 | |||
a4d2dc856c | |||
7964d1c2bf | |||
5df5dd155f | |||
89cce21161 | |||
0bdef36e2a | |||
e549a07901 | |||
98603dad66 | |||
c37fbda7a8 | |||
34ea4d8f41 | |||
452a76105f | |||
4982676ca8 | |||
83d115acca | |||
86bd0f7c37 | |||
83fe00a0cf | |||
526abc6a9f | |||
63feda6efc | |||
c9b3dedbb0 | |||
dae8dfe1fc | |||
100def7609 | |||
8594012fa1 | |||
27400f66a9 | |||
bb1e590222 | |||
a4b414010d | |||
1d72a96654 | |||
9b85ed86a9 | |||
e36066a9a2 | |||
8082158a16 | |||
1a8567a6da | |||
b17cbe30e2 | |||
8aadc88dd5 | |||
f3d26fae64 | |||
828d6f5baf | |||
2003806481 | |||
362823c1e1 | |||
9c10e00234 | |||
a4cef2fbd8 | |||
e5fca99b52 | |||
f4c692eed2 | |||
2e0688db5f | |||
ac2b358f87 | |||
251a2c9fa4 | |||
0064d4b2c5 | |||
ebbac6b483 | |||
d5373a62f4 | |||
681b91a6a4 | |||
8c66352994 | |||
4e1ec1215a | |||
6981894060 | |||
57c92e877c | |||
e8c0b8504a | |||
93bbe8f2a3 | |||
c78bb1f572 | |||
7256102785 | |||
fc907c568d | |||
9e078ff4d7 | |||
5658e7f718 | |||
111eb55a9f | |||
0630ec5503 | |||
38cc493eb7 | |||
254507c3a3 | |||
7cdcc9099b | |||
fb046c43ea | |||
73ddf80fc1 | |||
a5a224ac6f | |||
c56dfda833 | |||
6081f9ff1b | |||
f3c7d71b3b | |||
5748bf9549 | |||
84a0a6a418 | |||
1ee9f183cc | |||
55e8523925 | |||
c9efe24959 | |||
69aff39205 | |||
f6e9e15253 | |||
b7f685ed62 | |||
6e03eff303 | |||
3e0b95e1e1 | |||
a32997ceba | |||
63674d85e8 | |||
56848ece7a | |||
449722f08c | |||
949b4562c7 | |||
75f68c8be1 | |||
1b117712cf | |||
11356af502 | |||
9dbdf7fc8d | |||
dec38273b6 | |||
5098f5f420 | |||
d32fd72d13 | |||
a4692d5457 | |||
24ea7aaede | |||
b7f10982c3 | |||
8f28d1ad43 | |||
d5db2f0eb7 | |||
fe69f3de04 | |||
5534394b06 | |||
24fc2f7e14 | |||
5b23c9a294 | |||
7338a02b48 | |||
8555e83cb1 | |||
39494d18bf | |||
aab42c3cff | |||
f5a23c3817 | |||
b3eb81c3b4 | |||
4f4c88aca9 | |||
c3ad3ebb57 | |||
f13734dda4 | |||
24e63e7a02 | |||
a163048396 | |||
55f8471aff | |||
04e9f38e0e | |||
90972e9ce0 | |||
6e8f60a27a | |||
014090c407 | |||
e40b251c06 | |||
414a194c9d | |||
7bffe3993d | |||
3828e6d15e | |||
85c582bc93 | |||
ea1c9dec12 | |||
6753121a6a | |||
f63d899c42 | |||
7219c9f806 | |||
df2f1d10fd | |||
3f71067b67 | |||
8dc68a01fd | |||
9e0ded958c | |||
68243aa95b | |||
507df1f507 | |||
1800c29b44 | |||
0343548f6e | |||
5cb5c9713e | |||
5e2c5c793f | |||
3022cb6955 | |||
4687665ff3 | |||
001f075a49 | |||
7d78e4a60a | |||
2786bfbeb8 | |||
d3049a8d62 | |||
831a2582ed | |||
0919019123 | |||
7dd9c99c91 | |||
326c24911a | |||
133fa1495b | |||
7c040ed99f | |||
f88a2de8a9 | |||
a24ec8b07a | |||
d6ad6f96e8 | |||
411764854b | |||
973f77012f | |||
1d80da2559 | |||
00d298935a | |||
4a9a478243 | |||
9040bbb75a | |||
abcc4c0a12 | |||
ceb3a19b81 | |||
2a2f7825cc | |||
a0e9f6a92d | |||
dbc73f5086 | |||
78a67b754e | |||
de4b02eaf1 | |||
751c82fd29 | |||
8c8a94fc71 | |||
1174958e8b | |||
6399de7a66 | |||
c0f2275b61 | |||
256a9ee45d | |||
c835ce780a | |||
d7b7d2de6e | |||
1dd0635e5e | |||
67506511c3 | |||
3fbb4cde36 | |||
9aaa80a213 | |||
acb6cbffa0 | |||
6a70ab74bc | |||
852c252302 | |||
3a670b55b6 | |||
d01435f4f2 | |||
f1638c9cd7 | |||
4943504898 | |||
7d7480c120 | |||
78182fea0a | |||
947e57b5b4 | |||
e0e4a594e9 | |||
4839fe37a3 | |||
9914b7ea38 | |||
f86ed97820 | |||
8d8b0807e2 | |||
e3c6237430 | |||
e964c7fa5c | |||
f1e84e145c | |||
2e2773fa6b | |||
a9c7a27d47 | |||
e41ecb19cf | |||
5b091b602f | |||
768681c4f2 | |||
2e4e5c1873 | |||
4a61806e60 | |||
883187f9ac | |||
2d9a5031e9 | |||
39c93f6512 | |||
a7905b373e | |||
a7c82ff9b9 | |||
5b4b4a4051 | |||
c348fec609 | |||
4af17356f3 | |||
384e5052bc | |||
a5adae1491 | |||
fe62b810db | |||
ee78496058 | |||
8afe4bae87 | |||
b04bb2b740 | |||
3d46f0d72f | |||
a65d973660 | |||
df83d90c06 | |||
a1d55f2529 | |||
aa097f3fd6 | |||
e0b72202fd | |||
e8769fff7d | |||
ed33652534 | |||
d1c1f972a6 | |||
6008275aae | |||
edf8bb3945 | |||
dd7d133263 | |||
b6f783674b | |||
eab70fae3b | |||
fed40ef104 | |||
6d087f5a38 | |||
0edcf33547 | |||
443417b0f4 | |||
369e994b0d | |||
55469327c6 | |||
27f326673c | |||
e6fd766fff | |||
7da3ccfacb | |||
624d6b3b0b | |||
9528083a66 | |||
55408f6ccb | |||
dce5a39b10 | |||
03a23876a7 | |||
775357dd94 | |||
d10cbb2823 | |||
63a2465bac | |||
d97ed52e91 | |||
e1dc12c14d | |||
7c755d4e2d | |||
55b786d9f0 | |||
131f4be4ea | |||
d819617d2b | |||
b9219a2b62 | |||
554e378dd6 | |||
cc11402bc9 | |||
40220f92c1 | |||
8c4d9021c2 | |||
efefb02d86 | |||
3ee281aaf9 | |||
097b6d5097 | |||
6cdaf05f98 | |||
3be0f58c30 | |||
f3489a3b01 | |||
173dbeb24a | |||
0607b86818 | |||
1282a65bcb | |||
45d3207dfe | |||
76b46f59e9 | |||
19fa108f61 | |||
2372d06591 | |||
7015375892 | |||
e9bf2b361f | |||
51b790b767 | |||
ac84431361 | |||
7dc8463da9 | |||
71ae579bc0 | |||
5036d25b60 | |||
ff6d169862 | |||
dde8898aae | |||
72cc23ef46 | |||
5390b4ed42 | |||
17c7d90d52 | |||
5c3b5be613 | |||
5ab7769745 | |||
05374d1145 | |||
311e10f91e | |||
2b94791387 | |||
fbcae11cd0 | |||
0d6eff2a9a | |||
6a9b7fdb6d | |||
e8f703648a | |||
710f82de0f | |||
bee35acfa6 | |||
90fdaf80e4 | |||
27feb1ddd7 | |||
2be7e0f7e6 | |||
186ab70bf9 | |||
0fa9006e45 | |||
60c83bae93 | |||
553c398c8e | |||
1c90bb383f | |||
4281225b02 | |||
14dc9c6c43 | |||
c9e10b1a3e | |||
915c850760 | |||
2c3f430203 | |||
1a152ed7fa | |||
5953480807 | |||
b5c1a195be | |||
310cc086c6 | |||
61d6cd3c18 | |||
cccabf5330 | |||
6f33460afd | |||
603d524aaf | |||
eb2cd001b6 | |||
b5b57790be | |||
286f4ef961 | |||
ad28d1906c | |||
dfdccac67d | |||
b8c2752237 | |||
834c7b0def | |||
5bfe70142e | |||
b35c5a22bb | |||
eecc825c90 | |||
3823c22dad | |||
551bd3dbfe | |||
7e9956963a | |||
80c24a1993 | |||
66091bae24 | |||
73d4f10f4b | |||
ee7ea77fc3 | |||
32e1cbe2a3 | |||
3539724843 | |||
940b25f158 | |||
37e601e5b5 | |||
0230374709 | |||
86db237e5d | |||
1542b1cebb | |||
990071af5c | |||
f543e00307 | |||
34b4f8265a | |||
a366d1af2a | |||
ebe5785a91 | |||
887d46725b | |||
a326b718f2 | |||
c14b298cb9 | |||
9cca8f3f55 | |||
f5cee56740 | |||
972d183d85 | |||
eebdf04357 | |||
9ede20a367 | |||
b0c3e22a52 | |||
a78db17784 | |||
dbb9998f69 | |||
2745dbd124 | |||
c0357daf01 | |||
064fa6027d | |||
779e02a05e | |||
e222d0356a | |||
d2ae333bb8 | |||
764c42a810 | |||
18b18f1c3d | |||
b54a8b40a4 | |||
edf724d20d | |||
622a190a61 | |||
5b4a78ba20 | |||
44b85f6e4b | |||
7f1f760645 | |||
54d8c96c30 | |||
c6ab7827e7 | |||
ae26079e2e | |||
3e993156f2 | |||
3b2fafd789 | |||
72ebaddcb8 | |||
5a9950cc19 | |||
cf29d7e400 | |||
244dba3614 | |||
21886517e1 | |||
3996236729 | |||
560cb3ac82 | |||
81c7e23ae9 | |||
0b8bd6d4fc | |||
7c271c8207 | |||
58947d91a6 | |||
20096be990 | |||
7c8508b651 | |||
b56d0fdd9b | |||
d0cc06f766 | |||
d8d2b215d1 | |||
c478d383b4 | |||
e01cd1c037 | |||
e63019c469 | |||
90a325a1b2 | |||
698594525f | |||
fd540148f3 | |||
078a024931 | |||
f8193b2419 | |||
808ba603c5 | |||
61d70fa688 | |||
493a33e754 | |||
bd75c3e559 | |||
cb9e72a879 | |||
9d2fd4982f | |||
eed9a0e376 | |||
d77dbb2cca | |||
7810946484 | |||
e2906e3be5 | |||
0a8b66ee95 | |||
8ff270c5f7 | |||
4012a86cac | |||
dd4fff3a79 | |||
0ed99b7687 | |||
2c389ae11e | |||
15ff8f9d2a | |||
bd4d3b914b | |||
59b02120b6 | |||
92655c30c1 | |||
484567f242 | |||
ef6e70a38b | |||
e695e30a9b | |||
65e67b64bd | |||
ddbe339541 | |||
b2c0e6a8c2 | |||
f9384ded27 | |||
4488f25ce0 | |||
52b22b5784 | |||
5a356140d6 | |||
e79de0108c | |||
985f31877c | |||
11a71b7fbb | |||
7f26c11c9d | |||
9b93fcd947 | |||
733ca5174b | |||
bd897d780b | |||
429065d2b9 | |||
b90734f1e2 | |||
db97a7d9d3 | |||
6ff67aeadf | |||
dd7d282d17 | |||
4e637ae58a | |||
1ec2e55322 | |||
556eb3f8c1 | |||
76b49ebc95 | |||
e82443a302 | |||
5de86a6416 | |||
3f3c8cabb8 | |||
cd59aa9afb | |||
34e12e575b | |||
4c8c261ab4 | |||
099bb3b9ff | |||
c623a6aacc | |||
ce7356794d | |||
523494f9cf | |||
0edc867d45 | |||
ce4c45a075 | |||
e29941e3eb | |||
86ce3595f6 | |||
6e958b8415 | |||
d485703768 | |||
109e2107d1 | |||
3469905365 | |||
75b3846f8f | |||
a9ec38208c | |||
c38b9a4144 | |||
b6128aeaa1 | |||
881782be05 | |||
0c05930501 | |||
b96f2a19b5 | |||
c1906714ee | |||
32181d1bd2 | |||
7dfb413d87 | |||
7934a96ad1 | |||
abddbf9c7d | |||
77e66241f7 | |||
4b3f2e19a4 | |||
b29c6485a8 | |||
f4dba7a68c | |||
2817408db3 | |||
9ff3c758eb | |||
3dcc189740 | |||
4a12d1954e | |||
e4d645110a | |||
01a32067d5 | |||
fc5ce4739c | |||
ae7b8f9ecf | |||
0f5d2d6821 | |||
48eca22a00 | |||
5e164493a8 | |||
ead99208f2 | |||
4f5ad05792 | |||
bc52e72605 | |||
038674835a | |||
00f21c17ca | |||
818a1508a0 | |||
2d9480a6a7 | |||
0bec4e25c8 | |||
950a540df4 | |||
2e66c5f807 | |||
7033c2616b | |||
7292024ee6 | |||
8d4cde4534 | |||
d6df6cbb5d | |||
344e94d8a1 | |||
5cf73a9165 | |||
96b1a1c79c | |||
0bbe7f8c73 | |||
e333bb1cca | |||
454cd8d784 | |||
743a43ae17 | |||
5a1b0e19b2 | |||
da6cde5cbd | |||
5ea864da39 | |||
175448deda | |||
16f90dd821 | |||
9efac669e6 | |||
adf9ba29df | |||
cacddb9abb | |||
edbe213410 | |||
891f78be37 | |||
175be346a8 | |||
9ae981614f | |||
16f5f3ef46 | |||
2cd19b0273 | |||
e158e54a26 | |||
63a6fe9133 | |||
779eaee310 | |||
0ecfe8105f | |||
b8cc867ba4 | |||
7230556d1b | |||
afd79f4655 | |||
5d87fb8757 | |||
23e9596506 | |||
428f46fafe | |||
ee847e03a6 | |||
a870981266 | |||
411bf1107d | |||
5b74c8a942 | |||
a24bab0a27 | |||
1cb7764b0e | |||
d835f52a18 | |||
9c20ab81cb | |||
14de3e79c5 | |||
21c12030d5 | |||
2370e16f1b | |||
a384411a28 | |||
1e0ca8f79c | |||
2b5e590819 | |||
bf57b777bf | |||
f656d19ed5 | |||
eb09af06ed | |||
687c339f20 | |||
7bc170a53e | |||
65297c24d4 | |||
ea21f3fba0 | |||
b515a004d3 | |||
7d3fc1ec1a | |||
6987d6c1c6 | |||
de2b5ea905 | |||
f946a0bc08 | |||
4f47ca5742 | |||
54b51fc2fd | |||
1f284e853d | |||
2a30ad0fdf | |||
c454ae336d | |||
cd59c829e0 | |||
429f416b38 | |||
0a881d582d | |||
65b1029216 | |||
c7758fd30e | |||
46f300d62f | |||
4234dfb6f9 | |||
9695720343 | |||
1f28056459 | |||
7dacceef04 | |||
39883e8d68 | |||
949ef2c48a | |||
ada3f8b270 | |||
cf4b835b0c | |||
fec4569ada | |||
083edc4c76 | |||
fe4112a2a3 | |||
c8ea64edab | |||
6e5198f373 | |||
44c2b65372 | |||
5cc21511ad | |||
2edd7ae649 | |||
7a49a74135 | |||
be487019f5 | |||
5dee0a31e6 | |||
9f2c134e44 | |||
cdbb837948 | |||
80c1774a19 | |||
1aaa9b6707 | |||
4a7f578649 | |||
d59416431d | |||
8625fdc571 | |||
3c91e3c1e1 | |||
1560fb724c | |||
0db39ccfbd | |||
5086b96ede | |||
210cd76042 | |||
f77978a295 | |||
3e72f098fe | |||
b9fe493336 | |||
79bfac29ba | |||
2ea80d91f8 | |||
fa90251714 | |||
ff19109787 | |||
091ba4346d | |||
e43176e33a | |||
655e2856d1 | |||
c6a37ef880 | |||
6af2bad123 | |||
3e7e9e2b3d | |||
13db51a556 | |||
d6d95e05e8 | |||
b44bc80bd1 | |||
f39c9fb597 | |||
24f85a337f | |||
a069bae1fb | |||
1c8aca2f6a | |||
620241e067 | |||
da179cb33f | |||
8ea10a18d3 | |||
e2bb81bae4 | |||
dcf91c49ac | |||
c2caf8b839 | |||
95151062f5 | |||
7e4bfb1959 | |||
abae9c7e77 | |||
102aa5f22b | |||
d92c1d3442 | |||
aa186382a8 | |||
70366d34b9 | |||
49b70f44ca | |||
f79fb4190e | |||
d980194600 | |||
fb6e1fd33c | |||
6f7fc638c7 | |||
2459e85c1d | |||
74e27a2edc | |||
808852f4cc | |||
67e6d177b4 | |||
04694b4126 | |||
bb6fb3fdf8 | |||
4ec64f8980 | |||
332874cd4b | |||
276ca61cde | |||
5f3ad70190 | |||
ff8ec43a28 | |||
ecc369c2f8 | |||
26fdd0a62c | |||
64ff6a0ff5 | |||
fd7dba1d74 | |||
38ec517200 | |||
20a74d1654 | |||
d5451756fd | |||
893ca1b328 | |||
2dd69e21c0 | |||
a01b3a2473 | |||
6ac538fba4 | |||
41c2d64ef0 | |||
a1970bbfe3 | |||
d329aef876 | |||
abc0489ac6 | |||
2bc47f4e97 | |||
933a09f981 | |||
adc2d5fe7c | |||
def149a29e | |||
39cb23813f | |||
85f5a6a84a | |||
6ace5668b8 | |||
c193a4ceb7 | |||
1abc110f8a | |||
73740aea89 | |||
83110975fa | |||
881c5ea308 | |||
22f1b94062 | |||
ea30d94324 | |||
1ed462a29a | |||
a2efd7f7c5 | |||
ca33058637 | |||
a6f143d1ca | |||
1368fa4c3b | |||
cca3099d13 | |||
368c66727b | |||
a688edbdf1 | |||
e570aadd72 | |||
f85cf61d68 | |||
f27c71a0d4 | |||
940682255d | |||
a00722bef4 | |||
84132d9459 | |||
42fd284560 | |||
40d6847c96 | |||
abd3f8b3b5 | |||
3c986ed681 | |||
8b24b0f657 | |||
0f8042eeb4 | |||
f97632202b | |||
a79e933c37 | |||
ef53bcf601 | |||
08a87f3a21 | |||
b3dabb221d | |||
899a6734ee | |||
7f48c8c14e | |||
2c24e9eff6 | |||
5cdca9d490 | |||
1ac6c50334 | |||
4cbad399f7 | |||
2b8aebbdf9 | |||
e9a15b4e9b | |||
dd18abcac3 | |||
b046f21e0d | |||
29fb803be1 | |||
bc2eb0d79f | |||
0bec1f1585 | |||
a1ec3e0a22 | |||
7bc22353cc | |||
efc2242046 | |||
5dadb8749e | |||
82735dd571 | |||
9fb4cde2a5 | |||
164ae2bcbc | |||
a172a17c81 | |||
5ee90f4e61 | |||
3aae791bee | |||
9f05519ccd | |||
f19491f909 | |||
967a856061 | |||
87d5997b46 | |||
c20a1f24cd | |||
260c36727c | |||
03d9965758 | |||
e853d61e16 | |||
42e8ea29ff | |||
1e90c821dc | |||
cad0ae0113 | |||
21f3dd11f4 | |||
a9c13aa20e | |||
d3fd115743 | |||
df0e6016bb | |||
cb70e51016 | |||
cf2fa37e56 | |||
28d9f25f9a | |||
fdb83c24be | |||
660b57cdd3 | |||
405dcda824 | |||
266612bbdf | |||
2722e27415 | |||
f571ad9d47 | |||
ef8a9835b0 | |||
b71b939307 | |||
9e3ba11e8a | |||
91e9406304 | |||
0d8272890c | |||
a182cca5e9 | |||
e6fbd6acf1 | |||
062a82c89e | |||
89cc6f9bf3 | |||
3c8e0b17a7 | |||
e0023bb908 | |||
a5547491ed | |||
78546e9246 | |||
7457ef043b | |||
e0e97a3629 | |||
f5e6820903 | |||
27fd3b0b14 | |||
ced244d30a | |||
6fa7cb4af5 | |||
94cb808285 | |||
42df3c9c3f | |||
0c4c113b0a | |||
3c1b08daab | |||
d7f4eb5955 | |||
87b5fbd237 | |||
6c97cc6192 | |||
cbcd3f7c4d | |||
f7dceb782c | |||
140fff9c23 | |||
8c586dc360 | |||
fe52f60389 | |||
9064fcbc77 | |||
9a1d2970cc | |||
26ba6e4756 | |||
ae58cc74bd | |||
37e795d539 | |||
49960b257d | |||
25a421402b | |||
8e72c48319 | |||
c1965b607b | |||
d38f5aca5c | |||
c06859aa9f | |||
e706a2cfe2 | |||
0c301b2f5d | |||
deb12972fb | |||
8346b9822d | |||
19cdc7d34a | |||
49cc5d606b | |||
58470e8911 | |||
38699cca8f | |||
0eb8895959 | |||
99d4e0c390 | |||
6d32aeb310 | |||
8184308755 | |||
b68953b733 | |||
7dce494ad6 | |||
4921bf8b6a | |||
32cb0d6e4d | |||
e2c5a3c498 | |||
ec34a65cff | |||
9296615dbf | |||
56795940b9 | |||
09a5952248 | |||
735435306d | |||
bdd57bf356 | |||
8840b28968 | |||
e31591a35e | |||
457a218723 | |||
9724660dda | |||
eac6c1c552 | |||
54d73e834b | |||
099e3c7198 | |||
96a68c6b14 | |||
2a0a1247e3 | |||
7555d66748 | |||
c76940f7ce | |||
b2ed027bc3 | |||
01a1981ca1 | |||
03228c528e | |||
ac510c1553 | |||
a1ff5e1a4f | |||
7b43444d81 | |||
16966a4957 | |||
7e7a6d7807 | |||
f32dbeeb6d | |||
f78ffaded0 | |||
8480ebde89 | |||
44ee7d6a6b | |||
2ab47b7968 | |||
7181b7632b | |||
b82eaca45e | |||
fd04f31c5f | |||
75abe8a0af | |||
f7995ce49a | |||
2c2309acac | |||
5f79ab34f5 | |||
5bcbee7423 | |||
961049cf9b | |||
72e7492a78 | |||
5e4b4bbacd | |||
a64d368de2 | |||
6146b58520 | |||
f35e6e99af | |||
5d8440fdd1 | |||
d1b394b20a | |||
520d6decac | |||
4a251f0ab0 | |||
c215abb50c | |||
31ca47837d | |||
560699fc6b | |||
51ec94f78c | |||
ac1210fbea | |||
c03f93521b | |||
62ede23b0e | |||
60f28599d9 | |||
629f23c4f3 | |||
27db344739 | |||
777a47fd99 | |||
e913159cb4 | |||
b285c3137a | |||
f0576ddcd9 | |||
6e2cc333f2 | |||
18c7c5a9be | |||
01945716d3 | |||
2f5b231dc3 | |||
75c1aafaef | |||
b9714d0ac1 | |||
5774654582 | |||
0a46b8ab6a | |||
a556573a4f | |||
fd91819b2c | |||
24c04057e9 | |||
2960b73da5 | |||
c4645a9a96 | |||
877e84ea1d | |||
cb1058c693 | |||
be6bf5052e | |||
7780003d01 | |||
445ca937fd | |||
1f4aff7f27 | |||
788351a0cd | |||
5ba7753bfa | |||
ae57829190 | |||
030ddf4ea1 | |||
ccc43633b7 | |||
aba2ea9746 | |||
d5ebbb99a7 | |||
a636a60e00 | |||
ad7e3f83aa | |||
baa7aab1d7 | |||
2e320c01b3 | |||
3cd6c618a4 | |||
5ea759bc3e | |||
11cb3a1bf7 | |||
7412cf586b | |||
f976cf7ae5 | |||
e92b498b68 | |||
1b0810ec87 | |||
45e523a468 | |||
d42481d196 | |||
11c946bfe4 | |||
589d6f9e12 | |||
79b3a232fc | |||
f95230b86e | |||
fc9a16aa81 | |||
81a4d28918 | |||
fd137fe054 | |||
efd3b244e1 | |||
dbeaaf270c | |||
32642b7ec8 | |||
096c2f6165 | |||
91ae8b4cc7 | |||
cc52890d45 | |||
5a12f1f56e | |||
b7b9ee5a80 | |||
97a0cbdd18 | |||
b8f43732fe | |||
658b3df123 | |||
d32312e738 | |||
20023f8d8a | |||
6b2ff04bbf | |||
d80a9c820d | |||
4b62af1675 | |||
6414e1d9e3 | |||
a55f04dc28 | |||
2d68e28a70 | |||
4c2a157dce | |||
d9647dec95 | |||
15647c81f0 | |||
24a0d9123e | |||
720c29350d | |||
aa939b07b1 | |||
0e3c3abf73 | |||
a8606f4efa | |||
475f898222 | |||
69f5fdf8e1 | |||
fe1ad35cad | |||
352a0b7377 | |||
52689bc5e8 | |||
3dd3ea1c35 | |||
fff33d8c29 | |||
db9829a11e | |||
9a1b5d869d | |||
605cd36e27 | |||
24a23b67dd | |||
0df72411a0 | |||
5a72033622 | |||
4e6095ca13 | |||
f81b0b2a84 | |||
314f634e16 | |||
ba040ba2ba | |||
a22ed40256 | |||
58a4ff8246 | |||
9a5ebfb642 | |||
5d0faaa5a8 | |||
108b892e30 | |||
0ff37c9999 | |||
8c3de609ab | |||
073efef2a1 | |||
b9fd97dae4 | |||
60a7af6a8c | |||
0f02b3b653 | |||
620335631f | |||
3ef96cb215 | |||
59e1c88726 | |||
3a27fbc883 | |||
ce6b96ea84 | |||
3275bb59bf | |||
fbb62fa8a6 | |||
261c46d4ef | |||
0c0ceb2caa | |||
de60f20c21 | |||
314fe7d309 | |||
a271143c52 | |||
2d4a3db250 | |||
7fba6f78d6 | |||
eee12b9b66 | |||
d3e151feeb | |||
dd1fe74956 | |||
49aed34325 | |||
81ba2a5a74 | |||
1c87f83463 | |||
e15f227c48 | |||
ea5f2742f8 | |||
9fd0943b75 | |||
80acd52fc2 | |||
b8312d545c | |||
82f36a1ac3 | |||
9f7c827572 | |||
6328206e78 | |||
154be7fa81 | |||
b8c9eee8af | |||
43664672fc | |||
5cc9a328ab | |||
6556c37e58 | |||
292049199a | |||
ed4452b115 | |||
fbfbf340c1 | |||
a57ea79bf8 | |||
22e8e02f3d | |||
a10625a052 | |||
42020e2498 | |||
36a2228220 | |||
206054b35f | |||
b87048020d | |||
f7d4bf5fa8 | |||
f0bf531e1b | |||
176cf17f9f | |||
b41262a20e | |||
d0a6861369 | |||
0bc872eafd | |||
edc2d10574 | |||
a033e2d1fe | |||
c0aefe4c62 | |||
86069ab7c6 | |||
86f2dde6f3 | |||
96f60aab66 | |||
2763853d8d | |||
a78b461d45 | |||
2fa20b5c52 | |||
0b0b63aa7d | |||
fe174e35c8 | |||
88e40e28fc | |||
73ce3c94e9 | |||
d2033aacea | |||
dfb47a089b | |||
81960ce051 | |||
2ecceb8ed2 | |||
00a9a2c04d | |||
7716fe1d1c | |||
09b3df5520 | |||
ee2e4896d2 | |||
390f1935d6 | |||
365c3eaf4b | |||
b83121a951 | |||
efe8e599fd | |||
bc69259dd1 | |||
607e8eb477 | |||
139171a79f | |||
848e525919 | |||
b805822eea | |||
13e9306753 | |||
880d5bb8b0 | |||
f9de23b16f | |||
e5aa71e4e1 | |||
ba441ca77c | |||
decf482367 | |||
253e4b13b5 | |||
04ca6621f1 | |||
20334772b5 | |||
a62bc3846e | |||
3b092f28c3 | |||
2de46f545f | |||
8fce660afa | |||
dbeffe426f | |||
d4c42bd546 | |||
fad5d1d744 | |||
46a9c1b6b2 | |||
5ac1e9454f | |||
17f9d57207 | |||
5bdec19f31 | |||
90b80083e8 | |||
8d02e8b8f7 | |||
7e41841a74 | |||
0f296e7e37 | |||
9c32ff278c | |||
60139035d8 | |||
5ab34436ec | |||
178080fd12 | |||
915b9145f6 | |||
6020faf970 | |||
ec82fc82a2 | |||
8d7b775875 | |||
682114d6f1 | |||
0ac5cd6789 | |||
209040fdc1 | |||
643f290057 | |||
ea0b462d0b | |||
442a7e3576 | |||
f7385e8e88 | |||
20a10c7fc5 | |||
2f05040081 | |||
7e0fb43dbe | |||
26e7069099 | |||
7d90c468ae | |||
f9494c940e | |||
135ef8701c | |||
1bd6e898ad | |||
b15ddc00a5 | |||
b4088a6d00 | |||
c57f68aee3 | |||
31eed6c5e5 | |||
09852dcada | |||
5768fcd429 | |||
d901cc875a | |||
4f1ccfe58f | |||
f852f9fa89 | |||
5c388d4271 | |||
f9cae60225 | |||
49ceadc6ad | |||
47469e8759 | |||
742f60b48d | |||
0615ff6dd8 | |||
2fbe33bca0 | |||
fdd73552ea | |||
17b4f0b4dd | |||
f87322591d | |||
47099190f4 | |||
9cf777b4e5 | |||
8675d3fa46 | |||
2b9d8dba5f | |||
b37814976c | |||
895aa0cb34 | |||
18a9866a02 | |||
88ead0aed1 | |||
f19c7dc575 | |||
7eab5d567e | |||
5b1c6daa2a | |||
464e1a509f | |||
c1394b290d | |||
0029c7fe09 | |||
e9f9871c1e | |||
d1b46c838e | |||
a7b9187234 | |||
c7202154de | |||
6809da0353 | |||
fbdcbf17c7 | |||
44a9de6dcb | |||
a077081e46 | |||
728fd7f5b9 | |||
053160a6eb | |||
9bbaba3d59 | |||
b1577d101c | |||
53e6cf3e4a | |||
a9f9f4ef04 | |||
15f142fc70 | |||
6e9429dbbf | |||
be628051a7 | |||
f0eb177a8e | |||
7c481291dc | |||
f1d20f591a | |||
c6a8e03367 | |||
cbb7869da1 | |||
1f796ca0e5 | |||
703b29a05e | |||
56ceee220b | |||
0328037b49 | |||
3c796ca7c8 | |||
e6e14be528 | |||
f42d1a89f2 | |||
5a89388fb0 | |||
d043412e0f | |||
71168b1a5f | |||
95c1b0214c | |||
2408c0a4c7 | |||
4b3f593df9 | |||
67aea4db3f | |||
6b44a8ae75 | |||
70f4fa2665 | |||
17ff3250c9 | |||
2cce47a13d | |||
c1f62f8ead | |||
e0c174b9b6 | |||
e5ec4de3a4 | |||
bcf09964ab | |||
f3992f8e53 | |||
66cc557d2f | |||
9786b3e1b9 | |||
30bc691c95 | |||
83110326f4 | |||
182835fabf | |||
7345d3ea19 | |||
3f4aa320c2 | |||
dfd853fa87 | |||
3289e84b21 | |||
39639d45fe | |||
b45abf67a5 | |||
e57871cab7 | |||
484ea15959 | |||
40109263f0 | |||
b45261c3dc | |||
73bcf18498 | |||
0dcc6f350d | |||
0488cc4086 | |||
7784743cb1 | |||
0a4bc1d4e3 | |||
3630084a64 | |||
53c561e895 | |||
88db253515 | |||
da928efb43 | |||
cd3d638337 | |||
3de2d84e2b | |||
1d5d09feab | |||
2c2b419685 | |||
a7f8838d9a | |||
a18f8b2a4c | |||
9b65a091da | |||
8ccf9d2e53 | |||
cd6137bdb0 | |||
6d69c25a2f | |||
7b6bab7f4e | |||
257a8c63d2 | |||
3146ab5d12 | |||
2d4722477e | |||
d815daed29 | |||
c4e7674585 | |||
94f565db84 | |||
6e03aa3a3d | |||
737c29b510 | |||
0222c56c4d | |||
8d0584aa59 | |||
7dbeab11a5 | |||
7cad06ea18 | |||
3236fb6b3d | |||
0194e09410 | |||
187e30d055 | |||
39a7062503 | |||
4ff7c868ef | |||
5573c3871c | |||
d620579247 | |||
48651286b6 | |||
0e7a2b3141 | |||
f3b8ae4224 | |||
a2451a716d | |||
2e5dabb913 | |||
4e43e7d3c3 | |||
49acf4bdb9 | |||
f3d8c30f95 | |||
4486a85d4c | |||
8a6892bf3c | |||
087a8f6dd0 | |||
5e681ec03c | |||
784a5cd349 | |||
5345dd2674 | |||
8127fd9960 | |||
3177aaf591 | |||
70b484a2c2 | |||
ed6c9a08ce | |||
601989c5ff | |||
234585dc97 | |||
2388b2a62b | |||
69d9438c71 | |||
0b500dba54 | |||
798b12ce7b | |||
334d50c367 | |||
dd1da95a40 | |||
6684ac5a57 | |||
b533d7a1dd | |||
95d1f43799 | |||
9c7cc87c5f | |||
374379ba03 | |||
56ac577b0a | |||
941c0f4297 | |||
f34745bef9 | |||
9fef7f0ba9 | |||
971cd2ca0f | |||
6bf8bec22d | |||
d771816b02 | |||
f78a1a7b15 | |||
77f8414c63 | |||
4d84d03a63 | |||
065607b68c | |||
f5807364e8 | |||
92d86192aa | |||
d44c60614d | |||
19a8326f0f | |||
be9aaf8902 | |||
5cfa2b7fdd | |||
6218b773fd | |||
7bcfea13fb | |||
89843c0d65 | |||
31d4a5c72e | |||
83f25cbc16 | |||
27fc19d6b3 | |||
9cfccc5cd4 | |||
9da19fbf54 | |||
a481a5deda | |||
e8692334f6 | |||
239befa4ee | |||
2e9b0066de | |||
55d905a0d0 | |||
181adb277f | |||
82ec0164b0 | |||
c8354b100e | |||
4366719ed2 | |||
971eb8e35c | |||
a785c238b1 | |||
eda02750ae | |||
e5d50eb45c | |||
b66bf5f4c0 | |||
d1c8cc38f2 | |||
10bada0bcc | |||
5d7e7b1796 | |||
0f7fe260d1 | |||
46be56af43 | |||
dce2655004 | |||
36acdd7797 | |||
47e297fecb | |||
9ce19c7e83 | |||
9954900a0e | |||
a7855ae664 | |||
76865a1730 | |||
8febdc19ea | |||
85a814c21a | |||
ab5650f84b | |||
77c591ce81 | |||
dc067642b2 | |||
d0ee0c2919 | |||
6d50aa2e25 | |||
b68b9794ec | |||
e6ea4cb613 | |||
47ba127733 | |||
bbd694c5ea | |||
7ba2a7cd3d | |||
a1ed99962c | |||
c2970631a5 | |||
d38c843574 | |||
a2213a1aa4 | |||
dee2d2c538 | |||
fec19849b5 | |||
5b77adccb1 | |||
a82c4666d4 | |||
df6a8da290 | |||
39c8996093 | |||
af1a0f3587 | |||
703912fdc9 | |||
744881da59 | |||
5f55c18373 | |||
2137eb1794 | |||
3dcf1784fb | |||
9a3dcd3daa | |||
1b74f380a6 | |||
cd2a4ea535 | |||
536a9566da | |||
d2abac9b18 | |||
94f8afec38 | |||
3d5c1411c0 | |||
9a7e5d86fc | |||
ca29b4e370 | |||
392fe1cbd0 | |||
aa955819b0 | |||
b1f190a7f8 | |||
5453df94e4 | |||
7b314b58a4 | |||
7c41c7c2f3 | |||
5e144a2c98 | |||
61b6eea52c | |||
cd8dc24454 | |||
6531dcbc78 | |||
123963f760 | |||
08a94b6f7c | |||
43ae62afd8 | |||
e08dc5f0d7 | |||
1e26feb566 | |||
96567dea4d | |||
c720933d34 | |||
f61d03ec8f | |||
b7bc4401eb | |||
7a07a2e63e | |||
2c242944c7 | |||
6265233903 | |||
be3e26c202 | |||
9f76293684 | |||
1be2e510da | |||
af049eecc9 | |||
fe237f15aa | |||
bdce78ba6f | |||
f26e3c42dd | |||
92cd03cf2a | |||
e7da3d7f5f | |||
f966eeb604 | |||
34176f974b | |||
60f0f775ef | |||
5f044a7948 | |||
9f1dd3dd5d | |||
db6f983364 | |||
386883fbe5 | |||
e08527a0af | |||
4a6d5e8395 | |||
83bf34fb77 | |||
1c8666e946 | |||
0440b7a2f7 | |||
223a85baca | |||
ed39a755bc | |||
519156512c | |||
9fa424ea9b | |||
883a97a38c | |||
c671a8e235 | |||
55a44aecc3 | |||
81fc1c9010 | |||
8037bef847 | |||
98ec287797 | |||
bc2765eb1f | |||
94644c2863 | |||
fa090131ae | |||
48b46d74cf | |||
66b22a218a | |||
3d18d4f9ce | |||
cba2f4d7b6 | |||
54f14c64b5 | |||
785be3cb26 | |||
20a9522542 | |||
f8d957a705 | |||
2eecbf966c | |||
cef778731e | |||
adde5e6a7e | |||
37863df67e | |||
0e56a155f8 | |||
c6de3ebea0 | |||
a25c1155c2 | |||
42ee6fe96e | |||
a33fd6de27 | |||
a2830dd527 | |||
f46e1b7a3a | |||
bf59dda01b | |||
1dc15326c9 | |||
1a3720cfb9 | |||
5ea3f2bbc6 | |||
08103cfc59 | |||
78e88b6115 | |||
26bb74afd5 | |||
0587f5b964 | |||
e0ec4ff435 | |||
0677bcecb9 | |||
9023358d9e | |||
3edf7e6ca8 | |||
59114bbd86 | |||
fac45d37f8 | |||
d0c73af459 | |||
fcd1862f40 | |||
a6d42bedc1 | |||
04ebdf428b | |||
6a90388649 | |||
1b2e410f12 | |||
f7647438ef | |||
436a6bc521 | |||
8ea3909a64 | |||
ba0ab796c6 | |||
0e61020652 | |||
c51c35018d | |||
5505c6c0e3 | |||
d6a8360b29 | |||
0687ceb8a4 | |||
6d7854d113 | |||
aebe891895 | |||
e69d5e5873 | |||
6ea49910d5 | |||
b17e318373 | |||
b0aa4c6682 | |||
4d6c452d79 | |||
d06d5def72 | |||
aa3c943651 | |||
1369b10e76 | |||
8059e04499 | |||
71c4201487 | |||
425c1fb0a8 | |||
bbf829fe92 | |||
6f2779fe5d | |||
b51f2ae722 | |||
d5dd4d4b3a | |||
eaecad3d49 | |||
a8a0cbbbf3 | |||
94e21bf8a1 | |||
02c3d870ff | |||
91f0217d39 | |||
5e73348263 | |||
2f871db3ae | |||
28972b838b | |||
0473405b92 | |||
0d00d0534a | |||
fe210aadcf | |||
6fcc76e199 | |||
e927669632 | |||
24d2b94e79 | |||
f81b1089c1 | |||
fee748d384 | |||
d003dffc5a | |||
a300168658 | |||
0a638b7ea5 | |||
ffcf6b873a | |||
c69e80249d | |||
2a4b25705c | |||
412a1f78cd | |||
73dacd9418 | |||
caf5159002 | |||
d9b6842b6a | |||
affa54e5fc | |||
c3ac10fe29 | |||
0a907627be | |||
26dda104dd | |||
7c6fabaee2 | |||
dc07ea56d2 | |||
b68e5db46b | |||
fd9ca0cd15 | |||
6c47c7506e | |||
18d93dfcc9 | |||
900a4d463f | |||
4b24d6e3e4 | |||
2988724456 | |||
c79d5f0cf1 | |||
148b7dccfd | |||
83f64cebbd | |||
69de40c409 | |||
bf9552bd56 | |||
5afaa6d0ee | |||
d2c86ab71c | |||
6d9de80f12 | |||
bf77d70d82 | |||
8525fd6ae8 | |||
dfca7f0b68 | |||
500b3e08fe | |||
6739d5a570 | |||
f8dd90c9a9 | |||
8d459d86d3 | |||
f30bd18bdc | |||
0035f56e15 | |||
4123aa4c23 | |||
1bff3c6615 | |||
936b04e075 | |||
4d23ad03c3 | |||
a75823227d | |||
96d337388b | |||
c89793eab9 | |||
e27b40390d | |||
1d80914070 | |||
e61caa37a8 | |||
d07a1d265e | |||
54921c5dbd | |||
5cb2a970fa | |||
89b951e7d5 | |||
6daf387c90 | |||
2c3d8ce695 | |||
17fa5f77d5 | |||
c0a9ae5c70 | |||
4b7916c2af | |||
6d968a7093 | |||
634afdcf8a | |||
3ebeb1bab2 | |||
5ab564a6d0 | |||
04dc6230e7 | |||
51115c1254 | |||
40c8046546 | |||
fa761f9616 | |||
5fabadd059 | |||
38408c1e1f | |||
fbdfe31f17 | |||
bd8ba738cf | |||
8b8b6c3b9e | |||
bd3b29c304 | |||
1339924c2e | |||
46eab99888 | |||
461d71c2c7 | |||
1ccc526960 | |||
5338a9caa3 | |||
4ae302762a | |||
98513c9967 | |||
b6688db8b7 | |||
d5c98e4f27 | |||
b7e9bcb609 | |||
9de7297d38 | |||
97acf23a6d | |||
80718ebb95 | |||
643d118062 | |||
15b5a14995 | |||
68a3b3f9e7 | |||
5f1bd286c7 | |||
d18b4f12d0 | |||
d9010230a4 | |||
686d615639 | |||
d205370e9b | |||
ce66033190 | |||
bcff26d4d7 | |||
898d79956d | |||
522f814811 | |||
394d272324 | |||
84ce7db3e3 | |||
f873bd41f8 | |||
5c8c10e7d3 | |||
a504a13fa8 | |||
d09964a897 | |||
4416c12747 | |||
80a1b95887 | |||
9eb18ca72d | |||
37653d8446 | |||
e269c91d26 | |||
ac3236693f | |||
d0b3c5ee86 | |||
23a2022f04 | |||
c5177efd0b | |||
d51cd9c277 | |||
6257480d0d | |||
4af615a49c | |||
6186b425d4 | |||
c038074489 | |||
d25f1ddc21 | |||
5011e47709 | |||
33d16bfcf0 | |||
bae21f1cdd | |||
5f9fd9260e | |||
61db0f1d6a | |||
1598c8197e | |||
01db26f204 | |||
12876c5c2b | |||
5c60d922ca | |||
0570fc6c48 | |||
f3c8277572 | |||
1dbaed5beb | |||
19c1574993 | |||
346579b93c | |||
0ce2343836 | |||
d5e277b7cc | |||
3cc4af1723 | |||
8d05903bf3 | |||
7f845ab943 | |||
ddc8a353cb | |||
9ce9171a36 | |||
dec43bec9d | |||
99377130eb | |||
3df01405a1 | |||
3adcf66453 | |||
b88c262a9d | |||
5999bcee3f | |||
078b0af312 | |||
a0fb88e28a | |||
3cd4b005d8 | |||
0a523fc06c | |||
16864509af | |||
cb8af5add9 | |||
3f96dc1432 | |||
e1aa29cb36 | |||
6e87089ded | |||
a84195bb5a | |||
70a0bd72c0 | |||
fea7e97112 | |||
7beb332b31 | |||
7b2c1f27fc | |||
67f7b6cda3 | |||
4266cba53b | |||
19725fec04 | |||
a6e5a0c7f5 | |||
2baea815fe | |||
b38c57bcf9 | |||
e6d1805f0b | |||
9a95b0df56 | |||
70a5d32925 | |||
c2a549b0c2 | |||
0fda79a075 | |||
21e2ac6695 | |||
e7d75d1412 | |||
4386dc56ea | |||
a30b79c5fc | |||
f29a74042f | |||
c46643f0fd | |||
5fe4de626b | |||
774380c2ef | |||
8643f0454e | |||
f844d9cb47 | |||
f94fbb951a | |||
899de600e4 | |||
971049b41c | |||
aaf61082c1 | |||
b14abeff48 | |||
f26553b451 | |||
8b93ad00a2 | |||
5e1a931145 | |||
abfee8308e | |||
d2449762c2 | |||
59848c3115 | |||
834206ff20 | |||
ce4ed18ceb | |||
b710bbd80e | |||
446176bf5e | |||
0b2d780618 | |||
d9b0364aa8 | |||
76818448e2 | |||
131144ec34 | |||
a21d75b273 | |||
c79b3ff7f1 | |||
2b5ac57fd0 | |||
32d317d3cb | |||
71f606912a | |||
62b80c396b | |||
7687a0a929 | |||
f63fe1699b | |||
4f98a3fd42 | |||
693304590c | |||
947e31ca8d | |||
45d4b92678 | |||
07a2a49d93 | |||
7ddd22209f | |||
6cd4484be9 | |||
c235974eb6 | |||
1b4f945907 | |||
4fdd51e190 | |||
95b9ae9f24 | |||
94e580bf4e | |||
e4dca37874 | |||
7f607b742f | |||
2c0e0b2619 | |||
8e55c9e4d0 | |||
41ff1b0188 | |||
48c1aeae03 | |||
eab2fd91fd | |||
ba6d120cf4 | |||
e9df6166ee | |||
9bdc85fd52 | |||
fa48a6460f | |||
f07854879a | |||
9ca53c57f4 | |||
ee420cc35e | |||
d49d91269d | |||
a59efc2fc0 | |||
20f28383a0 | |||
d3c3491a91 | |||
e6a7cc2036 | |||
4d615416f3 | |||
137a5d1c42 | |||
65354e995d | |||
3032cade17 | |||
948a977fa5 | |||
c7d8b50474 | |||
f1da544279 | |||
40d1bb204c | |||
21a36f8ee9 | |||
be24c6ab4d | |||
3d96e584fb | |||
963eabc3f5 | |||
f63041a402 | |||
c084db1619 | |||
4edfb8f6cc | |||
6501c1f171 | |||
41ab97203a | |||
a51a03843d | |||
e9b8478942 | |||
29cf1c8596 | |||
e992e156bf | |||
cf13ec4035 | |||
c1d01dbe34 | |||
ff5da3c3cc | |||
7d9e10095e | |||
bb17630571 | |||
7746f7eeee | |||
e089c4a546 | |||
8f29d951cb | |||
f182a6dcae | |||
0c76116948 | |||
2d4421e8e5 | |||
c141189883 | |||
51cb87bc6f | |||
cae708f2d7 | |||
3a77bed0d4 | |||
10a7367b33 | |||
dea6551841 | |||
4ca71e7df1 | |||
d9e6111ac3 | |||
d4af985eb8 | |||
0d904ad1cc | |||
b3ac567b53 | |||
03aec79cc1 | |||
9a6d148bdc | |||
a9288e5a5b | |||
8280fd557b | |||
15a1cbf95a | |||
775797b4e5 | |||
4c8df44a5a | |||
3f52a6d5ba | |||
237d55208c | |||
5a72e3f2a1 | |||
1ce913ea74 | |||
9911a04232 | |||
f1549e1f0e | |||
4659a44bd6 | |||
e39d4f5c32 | |||
fb9fe8c77c | |||
ab914e1566 | |||
e0b9d080fa | |||
42c22758bb | |||
522b42c2a9 | |||
5a707a4849 | |||
1bed2650f3 | |||
65f30dd7b4 | |||
59b27d5d0a | |||
6575ddab26 | |||
dc4a5df2af | |||
2e4c9411af | |||
e477d1fbcf | |||
6059e0dce6 | |||
234bb75b9b | |||
38dfa40e32 | |||
5add5f1cfb | |||
f1b809f8dd | |||
47aa63fc04 | |||
08000a6f62 | |||
31d4706abd | |||
e23ca2c109 | |||
17efa89047 | |||
527efe3359 | |||
5aed0efd61 | |||
27ac17e8c3 | |||
d2c545eb27 | |||
035ab14202 | |||
eb57e14ac1 | |||
3cc996d89f | |||
d7d981f522 | |||
eedc1c3ccd | |||
63075c9607 | |||
b6ccda3568 | |||
35f96a534a | |||
7e8349d45e | |||
88d06b4437 | |||
3019e35724 | |||
38bb2c61c1 | |||
c7949ecd07 | |||
2b467f3fee | |||
a750b06fc8 | |||
286a53991c | |||
b92a8467c9 | |||
a092f87c50 | |||
02937216b0 | |||
e02d7528ad | |||
2ae41c8434 | |||
aa2bc4a63b | |||
74218f4f93 | |||
a60c57ac58 |
@ -1,27 +0,0 @@
|
||||
build: off
|
||||
|
||||
platform:
|
||||
- x86
|
||||
- x64
|
||||
|
||||
environment:
|
||||
matrix:
|
||||
- TOXENV: "py27"
|
||||
|
||||
install:
|
||||
- cmd: git submodule update --init --recursive
|
||||
- cmd: SET PATH=C:\MinGW\bin;%PATH%
|
||||
- if %PLATFORM% == x64 SET PATH=C:\Python27-x64;C:\Python27-x64\Scripts;%PATH%
|
||||
- if %PLATFORM% == x86 SET PATH=C:\Python27;C:\Python27\Scripts;%PATH%
|
||||
- cmd: pip install tox
|
||||
|
||||
test_script:
|
||||
- cmd: tox
|
||||
|
||||
notifications:
|
||||
- provider: Slack
|
||||
incoming_webhook:
|
||||
secure: E9H0SU0Ju7WLDvgxsV8cs3J62T3nTTX7QkEjsczN0Sto/c9hWkVfhc5gGWUkxhlD975cokHByKGJIdwYwCewqOI+7BrcT8U+nlga4Uau7J8=
|
||||
on_build_success: false
|
||||
on_build_failure: true
|
||||
on_build_status_changed: true
|
1
.github/FUNDING.yml
vendored
Normal file
1
.github/FUNDING.yml
vendored
Normal file
@ -0,0 +1 @@
|
||||
custom: https://platformio.org/donate
|
5
.github/ISSUE_TEMPLATE.md
vendored
5
.github/ISSUE_TEMPLATE.md
vendored
@ -6,9 +6,8 @@ What kind of issue is this?
|
||||
use [Community Forums](https://community.platformio.org) or [Premium Support](https://platformio.org/support)
|
||||
|
||||
- [ ] **PlatformIO IDE**.
|
||||
All issues related to PlatformIO IDE should be reported to appropriate repository:
|
||||
[PlatformIO IDE for Atom](https://github.com/platformio/platformio-atom-ide/issues) or
|
||||
[PlatformIO IDE for VSCode](https://github.com/platformio/platformio-vscode-ide/issues)
|
||||
All issues related to PlatformIO IDE should be reported to the
|
||||
[PlatformIO IDE for VSCode](https://github.com/platformio/platformio-vscode-ide/issues) repository
|
||||
|
||||
- [ ] **Development Platform or Board**.
|
||||
All issues (building, uploading, adding new boards, etc.) related to PlatformIO development platforms
|
||||
|
53
.github/workflows/core.yml
vendored
Normal file
53
.github/workflows/core.yml
vendored
Normal file
@ -0,0 +1,53 @@
|
||||
name: Core
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
python-version: ["3.11", "3.12", "3.13"]
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install tox
|
||||
|
||||
- name: Run "codespell" on Linux
|
||||
if: startsWith(matrix.os, 'ubuntu')
|
||||
run: |
|
||||
python -m pip install codespell
|
||||
make codespell
|
||||
|
||||
- name: Core System Info
|
||||
run: |
|
||||
tox -e py
|
||||
|
||||
- name: Integration Tests
|
||||
if: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
tox -e testcore
|
||||
|
||||
- name: Slack Notification
|
||||
uses: homoluctus/slatify@master
|
||||
if: failure()
|
||||
with:
|
||||
type: ${{ job.status }}
|
||||
job_name: '*Core*'
|
||||
commit: true
|
||||
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}
|
||||
token: ${{ secrets.SLACK_GITHUB_TOKEN }}
|
45
.github/workflows/deployment.yml
vendored
Normal file
45
.github/workflows/deployment.yml
vendored
Normal file
@ -0,0 +1,45 @@
|
||||
name: Deployment
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "release/**"
|
||||
|
||||
jobs:
|
||||
deployment:
|
||||
runs-on: ubuntu-latest
|
||||
environment: production
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.11"
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install tox build
|
||||
|
||||
- name: Deployment Tests
|
||||
env:
|
||||
TEST_EMAIL_LOGIN: ${{ secrets.TEST_EMAIL_LOGIN }}
|
||||
TEST_EMAIL_PASSWORD: ${{ secrets.TEST_EMAIL_PASSWORD }}
|
||||
TEST_EMAIL_IMAP_SERVER: ${{ secrets.TEST_EMAIL_IMAP_SERVER }}
|
||||
run: |
|
||||
tox -e testcore
|
||||
|
||||
- name: Build Python distributions
|
||||
run: python -m build
|
||||
|
||||
- name: Publish package to PyPI
|
||||
if: ${{ github.ref == 'refs/heads/master' }}
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
user: __token__
|
||||
password: ${{ secrets.PYPI_API_TOKEN }}
|
109
.github/workflows/docs.yml
vendored
Normal file
109
.github/workflows/docs.yml
vendored
Normal file
@ -0,0 +1,109 @@
|
||||
name: Docs
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build Docs
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install tox
|
||||
|
||||
- name: Build docs
|
||||
run: |
|
||||
tox -e docs
|
||||
|
||||
- name: Slack Notification
|
||||
uses: homoluctus/slatify@master
|
||||
if: failure()
|
||||
with:
|
||||
type: ${{ job.status }}
|
||||
job_name: '*Docs*'
|
||||
commit: true
|
||||
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}
|
||||
token: ${{ secrets.SLACK_GITHUB_TOKEN }}
|
||||
|
||||
- name: Preserve Docs
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
run: |
|
||||
tar -czvf docs.tar.gz -C docs/_build html rtdpage
|
||||
|
||||
- name: Save artifact
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: docs
|
||||
path: ./docs.tar.gz
|
||||
|
||||
deploy:
|
||||
name: Deploy Docs
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
DOCS_REPO: platformio/platformio-docs
|
||||
DOCS_DIR: platformio-docs
|
||||
LATEST_DOCS_DIR: latest-docs
|
||||
RELEASE_BUILD: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
steps:
|
||||
- name: Download artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: docs
|
||||
- name: Unpack artifact
|
||||
run: |
|
||||
mkdir ./${{ env.LATEST_DOCS_DIR }}
|
||||
tar -xzf ./docs.tar.gz -C ./${{ env.LATEST_DOCS_DIR }}
|
||||
- name: Delete Artifact
|
||||
uses: geekyeggo/delete-artifact@v5
|
||||
with:
|
||||
name: docs
|
||||
- name: Select Docs type
|
||||
id: get-destination-dir
|
||||
run: |
|
||||
if [[ ${{ env.RELEASE_BUILD }} == true ]]; then
|
||||
echo "::set-output name=dst_dir::stable"
|
||||
else
|
||||
echo "::set-output name=dst_dir::latest"
|
||||
fi
|
||||
- name: Checkout latest Docs
|
||||
continue-on-error: true
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: ${{ env.DOCS_REPO }}
|
||||
path: ${{ env.DOCS_DIR }}
|
||||
ref: gh-pages
|
||||
- name: Synchronize Docs
|
||||
run: |
|
||||
rm -rf ${{ env.DOCS_DIR }}/.git
|
||||
rm -rf ${{ env.DOCS_DIR }}/en/${{ steps.get-destination-dir.outputs.dst_dir }}
|
||||
mkdir -p ${{ env.DOCS_DIR }}/en/${{ steps.get-destination-dir.outputs.dst_dir }}
|
||||
cp -rf ${{ env.LATEST_DOCS_DIR }}/html/* ${{ env.DOCS_DIR }}/en/${{ steps.get-destination-dir.outputs.dst_dir }}
|
||||
if [[ ${{ env.RELEASE_BUILD }} == false ]]; then
|
||||
rm -rf ${{ env.DOCS_DIR }}/page
|
||||
mkdir -p ${{ env.DOCS_DIR }}/page
|
||||
cp -rf ${{ env.LATEST_DOCS_DIR }}/rtdpage/* ${{ env.DOCS_DIR }}/page
|
||||
fi
|
||||
- name: Validate Docs
|
||||
run: |
|
||||
if [ -z "$(ls -A ${{ env.DOCS_DIR }})" ]; then
|
||||
echo "Docs folder is empty. Aborting!"
|
||||
exit 1
|
||||
fi
|
||||
- name: Deploy to Github Pages
|
||||
uses: peaceiris/actions-gh-pages@v4
|
||||
with:
|
||||
personal_token: ${{ secrets.DEPLOY_GH_DOCS_TOKEN }}
|
||||
external_repository: ${{ env.DOCS_REPO }}
|
||||
publish_dir: ./${{ env.DOCS_DIR }}
|
||||
commit_message: Sync Docs
|
63
.github/workflows/examples.yml
vendored
Normal file
63
.github/workflows/examples.yml
vendored
Normal file
@ -0,0 +1,63 @@
|
||||
name: Examples
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
env:
|
||||
PIO_INSTALL_DEVPLATFORM_OWNERNAMES: "platformio"
|
||||
PIO_INSTALL_DEVPLATFORM_NAMES: "aceinna_imu,atmelavr,atmelmegaavr,atmelsam,espressif32,espressif8266,nordicnrf52,raspberrypi,ststm32,teensy"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.11"
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install tox
|
||||
|
||||
- name: Run on Linux
|
||||
if: startsWith(matrix.os, 'ubuntu')
|
||||
run: |
|
||||
# Free space
|
||||
sudo apt clean
|
||||
# docker rmi $(docker image ls -aq)
|
||||
df -h
|
||||
tox -e testexamples
|
||||
|
||||
- name: Run on macOS
|
||||
if: startsWith(matrix.os, 'macos')
|
||||
run: |
|
||||
df -h
|
||||
tox -e testexamples
|
||||
|
||||
- name: Run on Windows
|
||||
if: startsWith(matrix.os, 'windows')
|
||||
env:
|
||||
PLATFORMIO_CORE_DIR: C:/pio
|
||||
PLATFORMIO_WORKSPACE_DIR: C:/pio-workspace/$PROJECT_HASH
|
||||
run: |
|
||||
tox -e testexamples
|
||||
|
||||
- name: Slack Notification
|
||||
uses: homoluctus/slatify@master
|
||||
if: failure()
|
||||
with:
|
||||
type: ${{ job.status }}
|
||||
job_name: '*Examples*'
|
||||
commit: true
|
||||
url: ${{ secrets.SLACK_BUILD_WEBHOOK }}
|
||||
token: ${{ secrets.SLACK_GITHUB_TOKEN }}
|
56
.github/workflows/projects.yml
vendored
Normal file
56
.github/workflows/projects.yml
vendored
Normal file
@ -0,0 +1,56 @@
|
||||
name: Projects
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
project:
|
||||
- marlin:
|
||||
repository: "MarlinFirmware/Marlin"
|
||||
folder: "Marlin"
|
||||
config_dir: "Marlin"
|
||||
env_name: "mega2560"
|
||||
- smartknob:
|
||||
repository: "scottbez1/smartknob"
|
||||
folder: "smartknob"
|
||||
config_dir: "smartknob"
|
||||
env_name: "view"
|
||||
- espurna:
|
||||
repository: "xoseperez/espurna"
|
||||
folder: "espurna"
|
||||
config_dir: "espurna/code"
|
||||
env_name: "nodemcu-lolin"
|
||||
- OpenMQTTGateway:
|
||||
repository: "1technophile/OpenMQTTGateway"
|
||||
folder: "OpenMQTTGateway"
|
||||
config_dir: "OpenMQTTGateway"
|
||||
env_name: "esp32-m5atom-lite"
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.11
|
||||
|
||||
- name: Install PlatformIO
|
||||
run: pip install -U .
|
||||
|
||||
- name: Check out ${{ matrix.project.repository }}
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: "recursive"
|
||||
repository: ${{ matrix.project.repository }}
|
||||
path: ${{ matrix.project.folder }}
|
||||
|
||||
- name: Compile ${{ matrix.project.repository }}
|
||||
run: pio run -d ${{ matrix.project.config_dir }} -e ${{ matrix.project.env_name }}
|
||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@ -1,6 +1,6 @@
|
||||
*.egg-info
|
||||
*.pyc
|
||||
.pioenvs
|
||||
__pycache__
|
||||
.tox
|
||||
docs/_build
|
||||
dist
|
||||
|
@ -1,3 +0,0 @@
|
||||
[settings]
|
||||
line_length=79
|
||||
known_third_party=bottle,click,pytest,requests,SCons,semantic_version,serial
|
33
.pylintrc
33
.pylintrc
@ -1,23 +1,12 @@
|
||||
[REPORTS]
|
||||
output-format=colorized
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
|
||||
# Only show warnings with the listed confidence levels. Leave empty to show
|
||||
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
|
||||
confidence=
|
||||
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
# multiple time. See also the "--disable" option for examples.
|
||||
#enable=
|
||||
|
||||
# Disable the message, report, category or checker with the given id(s). You
|
||||
# can either give multiple identifiers separated by comma (,) or put this
|
||||
# option multiple times (only on the command line, not in the configuration
|
||||
# file where it should appear only once).You can also use "--disable=all" to
|
||||
# disable everything first and then reenable specific checks. For example, if
|
||||
# you want to run only the similarities checker, you can use "--disable=all
|
||||
# --enable=similarities". If you want to run only the classes checker, but have
|
||||
# no Warning level messages displayed, use"--disable=all --enable=classes
|
||||
# --disable=W"
|
||||
# disable=import-star-module-level,old-octal-literal,oct-method,print-statement,unpacking-in-except,parameter-unpacking,backtick,old-raise-syntax,old-ne-operator,long-suffix,dict-view-method,dict-iter-method,metaclass-assignment,next-method-called,raising-string,indexing-exception,raw_input-builtin,long-builtin,file-builtin,execfile-builtin,coerce-builtin,cmp-builtin,buffer-builtin,basestring-builtin,apply-builtin,filter-builtin-not-iterating,using-cmp-argument,useless-suppression,range-builtin-not-iterating,suppressed-message,no-absolute-import,old-division,cmp-method,reload-builtin,zip-builtin-not-iterating,intern-builtin,unichr-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,input-builtin,round-builtin,hex-method,nonzero-method,map-builtin-not-iterating
|
||||
|
||||
disable=locally-disabled,missing-docstring,invalid-name,too-few-public-methods,redefined-variable-type,import-error,similarities,unsupported-membership-test,unsubscriptable-object,ungrouped-imports,cyclic-import,superfluous-parens
|
||||
disable=
|
||||
missing-docstring,
|
||||
duplicate-code,
|
||||
invalid-name,
|
||||
too-few-public-methods,
|
||||
consider-using-f-string,
|
||||
cyclic-import,
|
||||
use-dict-literal
|
||||
|
@ -1,3 +0,0 @@
|
||||
[style]
|
||||
blank_line_before_nested_class_or_def = true
|
||||
allow_multiline_lambdas = true
|
39
.travis.yml
39
.travis.yml
@ -1,39 +0,0 @@
|
||||
language: python
|
||||
|
||||
matrix:
|
||||
include:
|
||||
- os: linux
|
||||
sudo: false
|
||||
python: 2.7
|
||||
env: TOX_ENV=docs
|
||||
- os: linux
|
||||
sudo: false
|
||||
python: 2.7
|
||||
env: TOX_ENV=lint
|
||||
- os: linux
|
||||
sudo: required
|
||||
python: 2.7
|
||||
env: TOX_ENV=py27
|
||||
- os: osx
|
||||
language: generic
|
||||
env: TOX_ENV=skipexamples
|
||||
|
||||
install:
|
||||
- git submodule update --init --recursive
|
||||
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then curl -fsSL https://bootstrap.pypa.io/get-pip.py | sudo python; fi
|
||||
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then sudo pip install "tox==3.0.0"; else pip install -U tox; fi
|
||||
|
||||
# ChipKIT issue: install 32-bit support for GCC PIC32
|
||||
- if [[ "$TOX_ENV" == "py27" ]] && [[ "$TRAVIS_OS_NAME" == "linux" ]]; then sudo apt-get install libc6-i386; fi
|
||||
|
||||
script:
|
||||
- tox -e $TOX_ENV
|
||||
|
||||
notifications:
|
||||
email: false
|
||||
|
||||
slack:
|
||||
rooms:
|
||||
secure: JD6VGfN4+SLU2CwDdiIOr1VgwD+zbYUCE/srwyGuHavnjIkPItkl6T6Bn8Y4VrU6ysbuKotfdV2TAJJ82ivFbY8BvZBc7FBcYp/AGQ4FaCCV5ySv8RDAcQgdE12oaGzMdODiLqsB85f65zOlAFa+htaXyEiRTcotn6Y2hupatrI=
|
||||
on_failure: always
|
||||
on_success: change
|
15
.vscode/settings.json
vendored
15
.vscode/settings.json
vendored
@ -1,15 +0,0 @@
|
||||
{
|
||||
"python.pythonPath": "${workspaceRoot}/.tox/develop/bin/python",
|
||||
"python.formatting.provider": "yapf",
|
||||
"files.exclude": {
|
||||
"**/*.pyc": true,
|
||||
"*.egg-info": true,
|
||||
".cache": true,
|
||||
"build": true,
|
||||
"dist": true
|
||||
},
|
||||
"editor.rulers": [79],
|
||||
"restructuredtext.builtDocumentationPath": "${workspaceRoot}/docs/_build/html",
|
||||
"restructuredtext.confPath": "${workspaceRoot}/docs",
|
||||
"restructuredtext.linter.executablePath": "${workspaceRoot}/.tox/docs/bin/restructuredtext-lint"
|
||||
}
|
3
CODE_OF_CONDUCT.md
Normal file
3
CODE_OF_CONDUCT.md
Normal file
@ -0,0 +1,3 @@
|
||||
# Code of Conduct
|
||||
|
||||
See https://piolabs.com/legal/code-of-conduct.html
|
@ -1,21 +1,22 @@
|
||||
Contributing
|
||||
------------
|
||||
|
||||
To get started, <a href="https://www.clahub.com/agreements/platformio/platformio-core">sign the Contributor License Agreement</a>.
|
||||
To get started, <a href="https://cla-assistant.io/platformio/platformio-core">sign the Contributor License Agreement</a>.
|
||||
|
||||
1. Fork the repository on GitHub.
|
||||
2. Make a branch off of ``develop``
|
||||
3. Run ``pip install tox``
|
||||
4. Go to the root of project where is located ``tox.ini`` and run ``tox -e develop``
|
||||
1. Fork the repository on GitHub
|
||||
2. Clone repository `git clone --recursive https://github.com/YourGithubUsername/platformio-core.git`
|
||||
3. Run `pip install tox`
|
||||
4. Go to the root of the PlatformIO Core project where `tox.ini` is located (``cd platformio-core``) and run `tox -e py39`.
|
||||
You can replace `py39` with your own Python version. For example, `py311` means Python 3.11.
|
||||
5. Activate current development environment:
|
||||
|
||||
* Windows: ``.tox\develop\Scripts\activate``
|
||||
* Bash/ZSH: ``source .tox/develop/bin/activate``
|
||||
* Fish: ``source .tox/bin/activate.fish``
|
||||
* Windows: `.tox\py39\Scripts\activate`
|
||||
* Bash/ZSH: `source .tox/py39/bin/activate`
|
||||
* Fish: `source .tox/py39/bin/activate.fish`
|
||||
|
||||
6. Make changes to code, documentation, etc.
|
||||
7. Lint source code ``tox -e lint``
|
||||
8. Run the tests ``tox -e py27``
|
||||
9. Build documentation ``tox -e docs`` (creates a directory _build under docs where you can find the html)
|
||||
7. Lint source code `make before-commit`
|
||||
8. Run the tests `make test`
|
||||
9. Build documentation `tox -e docs` (creates a directory _build under docs where you can find the html)
|
||||
10. Commit changes to your forked repository
|
||||
11. Submit a Pull Request on GitHub.
|
||||
11. Submit a Pull Request on GitHub
|
||||
|
2011
HISTORY.rst
2011
HISTORY.rst
File diff suppressed because it is too large
Load Diff
31
Makefile
31
Makefile
@ -1,18 +1,22 @@
|
||||
|
||||
lint:
|
||||
pylint --rcfile=./.pylintrc ./tests
|
||||
pylint --rcfile=./.pylintrc ./platformio
|
||||
|
||||
isort:
|
||||
isort -rc ./platformio
|
||||
isort -rc ./tests
|
||||
isort ./platformio
|
||||
isort ./tests
|
||||
|
||||
yapf:
|
||||
yapf --recursive --in-place platformio/
|
||||
format:
|
||||
black ./platformio
|
||||
black ./tests
|
||||
|
||||
codespell:
|
||||
codespell --skip "./build,./docs/_build" -L "AtLeast,TRE,ans,dout,homestate,ser"
|
||||
|
||||
test:
|
||||
py.test -v -s -n 3 --dist=loadscope tests --ignore tests/test_examples.py --ignore tests/test_pkgmanifest.py
|
||||
pytest --verbose --exitfirst -n 6 --dist=loadscope tests --ignore tests/test_examples.py
|
||||
|
||||
before-commit: isort yapf lint test
|
||||
before-commit: codespell isort format lint
|
||||
|
||||
clean-docs:
|
||||
rm -rf docs/_build
|
||||
@ -23,4 +27,15 @@ clean: clean-docs
|
||||
rm -rf .cache
|
||||
rm -rf build
|
||||
rm -rf htmlcov
|
||||
rm -f .coverage
|
||||
rm -f .coverage
|
||||
|
||||
profile:
|
||||
# Usage $ > make PIOARGS="boards" profile
|
||||
python -m cProfile -o .tox/.tmp/cprofile.prof -m platformio ${PIOARGS}
|
||||
snakeviz .tox/.tmp/cprofile.prof
|
||||
|
||||
pack:
|
||||
python setup.py sdist
|
||||
|
||||
publish:
|
||||
python setup.py sdist upload
|
||||
|
157
README.rst
157
README.rst
@ -1,130 +1,95 @@
|
||||
PlatformIO
|
||||
==========
|
||||
PlatformIO Core
|
||||
===============
|
||||
|
||||
.. image:: https://travis-ci.org/platformio/platformio-core.svg?branch=develop
|
||||
:target: https://travis-ci.org/platformio/platformio-core
|
||||
:alt: Travis.CI Build Status
|
||||
.. image:: https://ci.appveyor.com/api/projects/status/unnpw0n3c5k14btn/branch/develop?svg=true
|
||||
:target: https://ci.appveyor.com/project/ivankravets/platformio-core
|
||||
:alt: AppVeyor.CI Build Status
|
||||
.. image:: https://github.com/platformio/platformio-core/workflows/Core/badge.svg
|
||||
:target: https://docs.platformio.org/en/latest/core/index.html
|
||||
:alt: CI Build for PlatformIO Core
|
||||
.. image:: https://github.com/platformio/platformio-core/workflows/Docs/badge.svg
|
||||
:target: https://docs.platformio.org?utm_source=github&utm_medium=core
|
||||
:alt: CI Build for Docs
|
||||
.. image:: https://github.com/platformio/platformio-core/workflows/Examples/badge.svg
|
||||
:target: https://github.com/platformio/platformio-examples
|
||||
:alt: CI Build for dev-platform examples
|
||||
.. image:: https://github.com/platformio/platformio-core/workflows/Projects/badge.svg
|
||||
:target: https://docs.platformio.org/en/latest/tutorials/index.html#projects
|
||||
:alt: CI Build for the Community Projects
|
||||
.. image:: https://img.shields.io/pypi/v/platformio.svg
|
||||
:target: https://pypi.python.org/pypi/platformio/
|
||||
:alt: Latest Version
|
||||
.. image:: https://img.shields.io/pypi/l/platformio.svg
|
||||
:target: https://pypi.python.org/pypi/platformio/
|
||||
:alt: License
|
||||
.. image:: https://img.shields.io/PlatformIO/Community.png
|
||||
:alt: Community Forums
|
||||
:target: https://community.platformio.org?utm_source=github&utm_medium=core
|
||||
.. image:: https://img.shields.io/PIO/Plus.png?color=orange
|
||||
:alt: PIO Plus: Professional solutions for an awesome open source PlatformIO ecosystem
|
||||
:target: https://platformio.org/pricing?utm_source=github&utm_medium=core
|
||||
.. image:: https://img.shields.io/badge/PlatformIO-Labs-orange.svg
|
||||
:alt: PlatformIO Labs
|
||||
:target: https://piolabs.com/?utm_source=github&utm_medium=core
|
||||
|
||||
**Quick Links:** `Web <https://platformio.org?utm_source=github&utm_medium=core>`_ |
|
||||
`PIO Plus <https://platformio.org/pricing?utm_source=github&utm_medium=core>`_ |
|
||||
**Quick Links:** `Homepage <https://platformio.org?utm_source=github&utm_medium=core>`_ |
|
||||
`PlatformIO IDE <https://platformio.org/platformio-ide?utm_source=github&utm_medium=core>`_ |
|
||||
`Project Examples <https://github.com/platformio/platformio-examples/>`_ |
|
||||
`Registry <https://registry.platformio.org?utm_source=github&utm_medium=core>`_ |
|
||||
`Project Examples <https://github.com/platformio/platformio-examples/>`__ |
|
||||
`Docs <https://docs.platformio.org?utm_source=github&utm_medium=core>`_ |
|
||||
`Donate <https://platformio.org/donate?utm_source=github&utm_medium=core>`_ |
|
||||
`Contact Us <https://platformio.org/contact?utm_source=github&utm_medium=core>`_
|
||||
`Contact Us <https://piolabs.com/?utm_source=github&utm_medium=core>`_
|
||||
|
||||
**Social:** `Twitter <https://twitter.com/PlatformIO_Org>`_ |
|
||||
`LinkedIn <https://www.linkedin.com/company/platformio/>`_ |
|
||||
**Social:** `LinkedIn <https://www.linkedin.com/company/platformio/>`_ |
|
||||
`Twitter <https://twitter.com/PlatformIO_Org>`_ |
|
||||
`Facebook <https://www.facebook.com/platformio>`_ |
|
||||
`Hackaday <https://hackaday.io/project/7980-platformio>`_ |
|
||||
`Bintray <https://bintray.com/platformio>`_ |
|
||||
`Community <https://community.platformio.org?utm_source=github&utm_medium=core>`_
|
||||
`Community Forums <https://community.platformio.org?utm_source=github&utm_medium=core>`_
|
||||
|
||||
.. image:: https://raw.githubusercontent.com/platformio/platformio-web/develop/app/images/platformio-ide-laptop.png
|
||||
:target: https://platformio.org?utm_source=github&utm_medium=core
|
||||
|
||||
`PlatformIO <https://platformio.org?utm_source=github&utm_medium=core>`_ is an open source ecosystem for IoT
|
||||
development. Cross-platform IDE and unified debugger. Remote unit testing and
|
||||
firmware updates.
|
||||
`PlatformIO <https://platformio.org>`_: Your Gateway to Embedded Software Development Excellence.
|
||||
|
||||
Unlock the true potential of embedded software development with
|
||||
PlatformIO's collaborative ecosystem, embracing declarative principles,
|
||||
test-driven methodologies, and modern toolchains for unrivaled success.
|
||||
|
||||
* Open source, maximum permissive Apache 2.0 license
|
||||
* Cross-platform IDE and Unified Debugger
|
||||
* Static Code Analyzer and Remote Unit Testing
|
||||
* Multi-platform and Multi-architecture Build System
|
||||
* Firmware File Explorer and Memory Inspection
|
||||
|
||||
Get Started
|
||||
-----------
|
||||
|
||||
* `What is PlatformIO? <https://docs.platformio.org/en/latest/what-is-platformio.html?utm_source=github&utm_medium=core>`_
|
||||
|
||||
Open Source
|
||||
-----------
|
||||
|
||||
* `PlatformIO IDE <https://platformio.org/platformio-ide?utm_source=github&utm_medium=core>`_
|
||||
* `PlatformIO Core (CLI) <https://docs.platformio.org/en/latest/core.html?utm_source=github&utm_medium=core>`_
|
||||
* `Library Management <https://docs.platformio.org/page/librarymanager/index.html?utm_source=github&utm_medium=core>`_
|
||||
* `Project Examples <https://github.com/platformio/platformio-examples?utm_source=github&utm_medium=core>`_
|
||||
* `Desktop IDEs Integration <https://docs.platformio.org/page/ide.html?utm_source=github&utm_medium=core>`_
|
||||
* `Continuous Integration <https://docs.platformio.org/page/ci/index.html?utm_source=github&utm_medium=core>`_
|
||||
* `Advanced Scripting API <https://docs.platformio.org/page/projectconf/advanced_scripting.html?utm_source=github&utm_medium=core>`_
|
||||
* `Project Examples <https://github.com/platformio/platformio-examples?utm_source=github&utm_medium=core>`__
|
||||
|
||||
PIO Plus
|
||||
--------
|
||||
Solutions
|
||||
---------
|
||||
|
||||
* `PIO Remote <https://docs.platformio.org/page/plus/pio-remote.html?utm_source=github&utm_medium=core>`_
|
||||
* `PIO Unified Debugger <https://docs.platformio.org/page/plus/debugging.html?utm_source=github&utm_medium=core>`_
|
||||
* `PIO Unit Testing <https://docs.platformio.org/en/latest/plus/unit-testing.html?utm_source=github&utm_medium=core>`_
|
||||
* `Cloud IDEs Integration <https://docs.platformio.org/en/latest/ide.html?utm_source=github&utm_medium=core#solution-pio-delivery>`_
|
||||
* `Integration Services <https://platformio.org/pricing?utm_source=github&utm_medium=core#enterprise-features>`_
|
||||
* `Library Management <https://docs.platformio.org/en/latest/librarymanager/index.html?utm_source=github&utm_medium=core>`_
|
||||
* `Desktop IDEs Integration <https://docs.platformio.org/en/latest/ide.html?utm_source=github&utm_medium=core>`_
|
||||
* `Continuous Integration <https://docs.platformio.org/en/latest/ci/index.html?utm_source=github&utm_medium=core>`_
|
||||
|
||||
**Advanced**
|
||||
|
||||
* `Debugging <https://docs.platformio.org/en/latest/plus/debugging.html?utm_source=github&utm_medium=core>`_
|
||||
* `Unit Testing <https://docs.platformio.org/en/latest/advanced/unit-testing/index.html?utm_source=github&utm_medium=core>`_
|
||||
* `Static Code Analysis <https://docs.platformio.org/en/latest/plus/pio-check.html?utm_source=github&utm_medium=core>`_
|
||||
* `Remote Development <https://docs.platformio.org/en/latest/plus/pio-remote.html?utm_source=github&utm_medium=core>`_
|
||||
|
||||
Registry
|
||||
--------
|
||||
|
||||
* `Libraries <https://platformio.org/lib?utm_source=github&utm_medium=core>`_
|
||||
* `Development Platforms <https://platformio.org/platforms?utm_source=github&utm_medium=core>`_
|
||||
* `Frameworks <https://platformio.org/frameworks?utm_source=github&utm_medium=core>`_
|
||||
* `Embedded Boards <https://platformio.org/boards?utm_source=github&utm_medium=core>`_
|
||||
|
||||
Development Platforms
|
||||
---------------------
|
||||
|
||||
* `Atmel AVR <https://platformio.org/platforms/atmelavr?utm_source=github&utm_medium=core>`_
|
||||
* `Atmel SAM <https://platformio.org/platforms/atmelsam?utm_source=github&utm_medium=core>`_
|
||||
* `Espressif 32 <https://platformio.org/platforms/espressif32?utm_source=github&utm_medium=core>`_
|
||||
* `Espressif 8266 <https://platformio.org/platforms/espressif8266?utm_source=github&utm_medium=core>`_
|
||||
* `Freescale Kinetis <https://platformio.org/platforms/freescalekinetis?utm_source=github&utm_medium=core>`_
|
||||
* `Infineon XMC <https://platformio.org/platforms/infineonxmc?utm_source=github&utm_medium=core>`_
|
||||
* `Intel ARC32 <https://platformio.org/platforms/intel_arc32?utm_source=github&utm_medium=core>`_
|
||||
* `Intel MCS-51 (8051) <https://platformio.org/platforms/intel_mcs51?utm_source=github&utm_medium=core>`_
|
||||
* `Lattice iCE40 <https://platformio.org/platforms/lattice_ice40?utm_source=github&utm_medium=core>`_
|
||||
* `Maxim 32 <https://platformio.org/platforms/maxim32?utm_source=github&utm_medium=core>`_
|
||||
* `Microchip PIC32 <https://platformio.org/platforms/microchippic32?utm_source=github&utm_medium=core>`_
|
||||
* `Nordic nRF51 <https://platformio.org/platforms/nordicnrf51?utm_source=github&utm_medium=core>`_
|
||||
* `Nordic nRF52 <https://platformio.org/platforms/nordicnrf52?utm_source=github&utm_medium=core>`_
|
||||
* `NXP LPC <https://platformio.org/platforms/nxplpc?utm_source=github&utm_medium=core>`_
|
||||
* `RISC-V <https://platformio.org/platforms/riscv?utm_source=github&utm_medium=core>`_
|
||||
* `Samsung ARTIK <https://platformio.org/platforms/samsung_artik?utm_source=github&utm_medium=core>`_
|
||||
* `Silicon Labs EFM32 <https://platformio.org/platforms/siliconlabsefm32?utm_source=github&utm_medium=core>`_
|
||||
* `ST STM32 <https://platformio.org/platforms/ststm32?utm_source=github&utm_medium=core>`_
|
||||
* `Teensy <https://platformio.org/platforms/teensy?utm_source=github&utm_medium=core>`_
|
||||
* `TI MSP430 <https://platformio.org/platforms/timsp430?utm_source=github&utm_medium=core>`_
|
||||
* `TI Tiva <https://platformio.org/platforms/titiva?utm_source=github&utm_medium=core>`_
|
||||
* `WIZNet W7500 <https://platformio.org/platforms/wiznet7500?utm_source=github&utm_medium=core>`_
|
||||
|
||||
Frameworks
|
||||
----------
|
||||
|
||||
* `Arduino <https://platformio.org/frameworks/arduino?utm_source=github&utm_medium=core>`_
|
||||
* `ARTIK SDK <https://platformio.org/frameworks/artik-sdk?utm_source=github&utm_medium=core>`_
|
||||
* `CMSIS <https://platformio.org/frameworks/cmsis?utm_source=github&utm_medium=core>`_
|
||||
* `Energia <https://platformio.org/frameworks/energia?utm_source=github&utm_medium=core>`_
|
||||
* `ESP-IDF <https://platformio.org/frameworks/espidf?utm_source=github&utm_medium=core>`_
|
||||
* `ESP8266 Non-OS SDK <https://platformio.org/frameworks/esp8266-nonos-sdk?utm_source=github&utm_medium=core>`_
|
||||
* `ESP8266 RTOS SDK <https://platformio.org/frameworks/esp8266-rtos-sdk?utm_source=github&utm_medium=core>`_
|
||||
* `libOpenCM3 <https://platformio.org/frameworks/libopencm3?utm_source=github&utm_medium=core>`_
|
||||
* `mbed <https://platformio.org/frameworks/mbed?utm_source=github&utm_medium=core>`_
|
||||
* `Pumbaa <https://platformio.org/frameworks/pumbaa?utm_source=github&utm_medium=core>`_
|
||||
* `Simba <https://platformio.org/frameworks/simba?utm_source=github&utm_medium=core>`_
|
||||
* `SPL <https://platformio.org/frameworks/spl?utm_source=github&utm_medium=core>`_
|
||||
* `STM32Cube <https://platformio.org/frameworks/stm32cube?utm_source=github&utm_medium=core>`_
|
||||
* `Tizen RT <https://platformio.org/frameworks/tizenrt?utm_source=github&utm_medium=core>`_
|
||||
* `WiringPi <https://platformio.org/frameworks/wiringpi?utm_source=github&utm_medium=core>`_
|
||||
* `Libraries <https://registry.platformio.org/search?t=library&utm_source=github&utm_medium=core>`_
|
||||
* `Development Platforms <https://registry.platformio.org/search?t=platform&utm_source=github&utm_medium=core>`_
|
||||
* `Development Tools <https://registry.platformio.org/search?t=tool&utm_source=github&utm_medium=core>`_
|
||||
|
||||
Contributing
|
||||
------------
|
||||
|
||||
See `contributing guidelines <https://github.com/platformio/platformio/blob/develop/CONTRIBUTING.md>`_.
|
||||
|
||||
Telemetry / Privacy Policy
|
||||
--------------------------
|
||||
|
||||
Share minimal diagnostics and usage information to help us make PlatformIO better.
|
||||
It is enabled by default. For more information see:
|
||||
|
||||
* `Telemetry Setting <https://docs.platformio.org/en/latest/userguide/cmd_settings.html?utm_source=github&utm_medium=core#enable-telemetry>`_
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
@ -132,3 +97,7 @@ Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
|
||||
The PlatformIO is licensed under the permissive Apache 2.0 license,
|
||||
so you can use it in both commercial and personal projects with confidence.
|
||||
|
||||
.. image:: https://raw.githubusercontent.com/vshymanskyy/StandWithUkraine/main/banner-direct.svg
|
||||
:target: https://github.com/vshymanskyy/StandWithUkraine/blob/main/docs/README.md
|
||||
:alt: SWUbanner
|
34
SECURITY.md
Normal file
34
SECURITY.md
Normal file
@ -0,0 +1,34 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
We are committed to ensuring the security and protection of PlatformIO Core.
|
||||
To this end, we support only the following versions:
|
||||
|
||||
| Version | Supported |
|
||||
| ------- | ------------------ |
|
||||
| 6.1.x | :white_check_mark: |
|
||||
| < 6.1 | :x: |
|
||||
|
||||
Unsupported versions of the PlatformIO Core may have known vulnerabilities or security issues that could compromise the security of our organization's systems and data.
|
||||
Therefore, it is important that all developers use only supported versions of the PlatformIO Core.
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
We take the security of our systems and data very seriously. We encourage responsible disclosure of any vulnerabilities or security issues that you may find in our systems or applications. If you believe you have discovered a vulnerability, please report it to us immediately.
|
||||
|
||||
To report a vulnerability, please send an email to our security team at contact@piolabs.com. Please include as much information as possible, including:
|
||||
|
||||
- A description of the vulnerability and how it can be exploited
|
||||
- Steps to reproduce the vulnerability
|
||||
- Any additional information that can help us understand and reproduce the vulnerability
|
||||
|
||||
Once we receive your report, our security team will acknowledge receipt within 24 hours and will work to validate the reported vulnerability. We will provide periodic updates on the progress of the vulnerability assessment, and will notify you once a fix has been deployed.
|
||||
|
||||
If the vulnerability is accepted, we will work to remediate the issue as quickly as possible. We may also provide credit or recognition to the individual who reported the vulnerability, at our discretion.
|
||||
|
||||
If the vulnerability is declined, we will provide a justification for our decision and may offer guidance on how to improve the report or how to test the system more effectively.
|
||||
|
||||
Please note that we will not take any legal action against individuals who report vulnerabilities in good faith and in accordance with this policy.
|
||||
|
||||
Thank you for helping us keep our systems and data secure.
|
2
docs
2
docs
Submodule docs updated: 3a6c69a1ae...70ab7ee27b
2
examples
2
examples
Submodule examples updated: 9c16f551d7...0409a90a01
@ -12,31 +12,34 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
|
||||
VERSION = (3, 6, 6)
|
||||
VERSION = (6, 1, "19a2")
|
||||
__version__ = ".".join([str(s) for s in VERSION])
|
||||
|
||||
__title__ = "platformio"
|
||||
__description__ = (
|
||||
"An open source ecosystem for IoT development. "
|
||||
"Cross-platform IDE and unified debugger. "
|
||||
"Remote unit testing and firmware updates. "
|
||||
"Arduino, ARM mbed, Espressif (ESP8266/ESP32), STM32, PIC32, nRF51/nRF52, "
|
||||
"FPGA, CMSIS, SPL, AVR, Samsung ARTIK, libOpenCM3")
|
||||
"Your Gateway to Embedded Software Development Excellence. "
|
||||
"Unlock the true potential of embedded software development "
|
||||
"with PlatformIO's collaborative ecosystem, embracing "
|
||||
"declarative principles, test-driven methodologies, and "
|
||||
"modern toolchains for unrivaled success."
|
||||
)
|
||||
__url__ = "https://platformio.org"
|
||||
|
||||
__author__ = "PlatformIO"
|
||||
__email__ = "contact@platformio.org"
|
||||
__author__ = "PlatformIO Labs"
|
||||
__email__ = "contact@piolabs.com"
|
||||
|
||||
__license__ = "Apache Software License"
|
||||
__copyright__ = "Copyright 2014-present PlatformIO"
|
||||
__copyright__ = "Copyright 2014-present PlatformIO Labs"
|
||||
|
||||
__apiurl__ = "https://api.platformio.org"
|
||||
__accounts_api__ = "https://api.accounts.platformio.org"
|
||||
__registry_mirror_hosts__ = [
|
||||
"registry.platformio.org",
|
||||
"registry.nm1.platformio.org",
|
||||
]
|
||||
__pioremote_endpoint__ = "ssl:host=remote.platformio.org:port=4413"
|
||||
|
||||
if sys.version_info < (2, 7, 0) or sys.version_info >= (3, 0, 0):
|
||||
msg = ("PlatformIO Core v%s does not run under Python version %s.\n"
|
||||
"Minimum supported version is 2.7, please upgrade Python.\n"
|
||||
"Python 3 is not yet supported.\n")
|
||||
sys.stderr.write(msg % (__version__, sys.version))
|
||||
sys.exit(1)
|
||||
__check_internet_hosts__ = [
|
||||
"185.199.110.153", # Github.com
|
||||
"88.198.170.159", # platformio.org
|
||||
"github.com",
|
||||
] + __registry_mirror_hosts__
|
||||
|
@ -14,92 +14,67 @@
|
||||
|
||||
import os
|
||||
import sys
|
||||
from os.path import join
|
||||
from platform import system
|
||||
from traceback import format_exc
|
||||
import traceback
|
||||
|
||||
import click
|
||||
|
||||
from platformio import __version__, exception, maintenance
|
||||
from platformio.util import get_source_dir
|
||||
|
||||
|
||||
class PlatformioCLI(click.MultiCommand): # pylint: disable=R0904
|
||||
|
||||
def list_commands(self, ctx):
|
||||
cmds = []
|
||||
for filename in os.listdir(join(get_source_dir(), "commands")):
|
||||
if filename.startswith("__init__"):
|
||||
continue
|
||||
if filename.endswith(".py"):
|
||||
cmds.append(filename[:-3])
|
||||
cmds.sort()
|
||||
return cmds
|
||||
|
||||
def get_command(self, ctx, cmd_name):
|
||||
mod = None
|
||||
try:
|
||||
mod = __import__("platformio.commands." + cmd_name, None, None,
|
||||
["cli"])
|
||||
except ImportError:
|
||||
try:
|
||||
return self._handle_obsolate_command(cmd_name)
|
||||
except AttributeError:
|
||||
raise click.UsageError('No such command "%s"' % cmd_name, ctx)
|
||||
return mod.cli
|
||||
|
||||
@staticmethod
|
||||
def _handle_obsolate_command(name):
|
||||
if name == "platforms":
|
||||
from platformio.commands import platform
|
||||
return platform.cli
|
||||
elif name == "serialports":
|
||||
from platformio.commands import device
|
||||
return device.cli
|
||||
raise AttributeError()
|
||||
from platformio.cli import PlatformioCLI
|
||||
from platformio.compat import IS_CYGWIN, ensure_python3
|
||||
|
||||
|
||||
@click.command(
|
||||
cls=PlatformioCLI,
|
||||
context_settings=dict(help_option_names=["-h", "--help"]))
|
||||
@click.version_option(__version__, prog_name="PlatformIO")
|
||||
@click.option(
|
||||
"--force",
|
||||
"-f",
|
||||
is_flag=True,
|
||||
help="Force to accept any confirmation prompts.")
|
||||
@click.option("--caller", "-c", help="Caller ID (service).")
|
||||
cls=PlatformioCLI, context_settings=dict(help_option_names=["-h", "--help"])
|
||||
)
|
||||
@click.version_option(__version__, prog_name="PlatformIO Core")
|
||||
@click.option("--force", "-f", is_flag=True, help="DEPRECATED", hidden=True)
|
||||
@click.option("--caller", "-c", help="Caller ID (service)")
|
||||
@click.option("--no-ansi", is_flag=True, help="Do not print ANSI control characters")
|
||||
@click.pass_context
|
||||
def cli(ctx, force, caller):
|
||||
maintenance.on_platformio_start(ctx, force, caller)
|
||||
def cli(ctx, force, caller, no_ansi): # pylint: disable=unused-argument
|
||||
try:
|
||||
if (
|
||||
no_ansi
|
||||
or str(
|
||||
os.getenv("PLATFORMIO_NO_ANSI", os.getenv("PLATFORMIO_DISABLE_COLOR"))
|
||||
).lower()
|
||||
== "true"
|
||||
):
|
||||
# pylint: disable=protected-access
|
||||
click._compat.isatty = lambda stream: False
|
||||
elif (
|
||||
str(
|
||||
os.getenv("PLATFORMIO_FORCE_ANSI", os.getenv("PLATFORMIO_FORCE_COLOR"))
|
||||
).lower()
|
||||
== "true"
|
||||
):
|
||||
# pylint: disable=protected-access
|
||||
click._compat.isatty = lambda stream: True
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
maintenance.on_cmd_start(ctx, caller)
|
||||
|
||||
|
||||
@cli.resultcallback()
|
||||
@cli.result_callback()
|
||||
@click.pass_context
|
||||
def process_result(ctx, result, force, caller): # pylint: disable=W0613
|
||||
maintenance.on_platformio_end(ctx, result)
|
||||
def process_result(*_, **__):
|
||||
maintenance.on_cmd_end()
|
||||
|
||||
|
||||
def configure():
|
||||
if "cygwin" in system().lower():
|
||||
if IS_CYGWIN:
|
||||
raise exception.CygwinEnvDetected()
|
||||
|
||||
# https://urllib3.readthedocs.org
|
||||
# /en/latest/security.html#insecureplatformwarning
|
||||
try:
|
||||
import urllib3
|
||||
import urllib3 # pylint: disable=import-outside-toplevel
|
||||
|
||||
urllib3.disable_warnings()
|
||||
except (AttributeError, ImportError):
|
||||
pass
|
||||
|
||||
# handle PLATFORMIO_FORCE_COLOR
|
||||
if str(os.getenv("PLATFORMIO_FORCE_COLOR", "")).lower() == "true":
|
||||
try:
|
||||
# pylint: disable=protected-access
|
||||
click._compat.isatty = lambda stream: True
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
# Handle IOError issue with VSCode's Terminal (Windows)
|
||||
click_echo_origin = [click.echo, click.secho]
|
||||
|
||||
@ -108,34 +83,45 @@ def configure():
|
||||
click_echo_origin[origin](*args, **kwargs)
|
||||
except IOError:
|
||||
(sys.stderr.write if kwargs.get("err") else sys.stdout.write)(
|
||||
"%s\n" % (args[0] if args else ""))
|
||||
"%s\n" % (args[0] if args else "")
|
||||
)
|
||||
|
||||
click.echo = lambda *args, **kwargs: _safe_echo(0, *args, **kwargs)
|
||||
click.secho = lambda *args, **kwargs: _safe_echo(1, *args, **kwargs)
|
||||
|
||||
|
||||
def main():
|
||||
def main(argv=None):
|
||||
exit_code = 0
|
||||
prev_sys_argv = sys.argv[:]
|
||||
if argv:
|
||||
assert isinstance(argv, list)
|
||||
sys.argv = argv
|
||||
|
||||
try:
|
||||
ensure_python3(raise_exception=True)
|
||||
configure()
|
||||
cli(None, None, None)
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
if not isinstance(e, exception.ReturnErrorCode):
|
||||
maintenance.on_platformio_exception(e)
|
||||
error_str = "Error: "
|
||||
if isinstance(e, exception.PlatformioException):
|
||||
error_str += str(e)
|
||||
cli() # pylint: disable=no-value-for-parameter
|
||||
except SystemExit as exc:
|
||||
if exc.code and str(exc.code).isdigit():
|
||||
exit_code = int(exc.code)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
if not isinstance(exc, exception.ReturnErrorCode):
|
||||
maintenance.on_platformio_exception(exc)
|
||||
error_str = f"{exc.__class__.__name__}: "
|
||||
if isinstance(exc, exception.PlatformioException):
|
||||
error_str += str(exc)
|
||||
else:
|
||||
error_str += format_exc()
|
||||
error_str += traceback.format_exc()
|
||||
error_str += """
|
||||
============================================================
|
||||
|
||||
An unexpected error occurred. Further steps:
|
||||
|
||||
* Verify that you have the latest version of PlatformIO using
|
||||
`pip install -U platformio` command
|
||||
`python -m pip install -U platformio` command
|
||||
|
||||
* Try to find answer in FAQ Troubleshooting section
|
||||
https://docs.platformio.org/page/faq.html
|
||||
https://docs.platformio.org/page/faq/index.html
|
||||
|
||||
* Report this problem to the developers
|
||||
https://github.com/platformio/platformio-core/issues
|
||||
@ -143,13 +129,15 @@ An unexpected error occurred. Further steps:
|
||||
============================================================
|
||||
"""
|
||||
click.secho(error_str, fg="red", err=True)
|
||||
return int(str(e)) if str(e).isdigit() else 1
|
||||
return 0
|
||||
exit_code = int(str(exc)) if str(exc).isdigit() else 1
|
||||
|
||||
maintenance.on_platformio_exit()
|
||||
sys.argv = prev_sys_argv
|
||||
return exit_code
|
||||
|
||||
|
||||
def debug_gdb_main():
|
||||
sys.argv = [sys.argv[0], "debug", "--interface", "gdb"] + sys.argv[1:]
|
||||
return main()
|
||||
return main([sys.argv[0], "debug", "--interface", "gdb"] + sys.argv[1:])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
44
platformio/account/cli.py
Normal file
44
platformio/account/cli.py
Normal file
@ -0,0 +1,44 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.commands.destroy import account_destroy_cmd
|
||||
from platformio.account.commands.forgot import account_forgot_cmd
|
||||
from platformio.account.commands.login import account_login_cmd
|
||||
from platformio.account.commands.logout import account_logout_cmd
|
||||
from platformio.account.commands.password import account_password_cmd
|
||||
from platformio.account.commands.register import account_register_cmd
|
||||
from platformio.account.commands.show import account_show_cmd
|
||||
from platformio.account.commands.token import account_token_cmd
|
||||
from platformio.account.commands.update import account_update_cmd
|
||||
|
||||
|
||||
@click.group(
|
||||
"account",
|
||||
commands=[
|
||||
account_destroy_cmd,
|
||||
account_forgot_cmd,
|
||||
account_login_cmd,
|
||||
account_logout_cmd,
|
||||
account_password_cmd,
|
||||
account_register_cmd,
|
||||
account_show_cmd,
|
||||
account_token_cmd,
|
||||
account_update_cmd,
|
||||
],
|
||||
short_help="Manage PlatformIO account",
|
||||
)
|
||||
def cli():
|
||||
pass
|
352
platformio/account/client.py
Normal file
352
platformio/account/client.py
Normal file
@ -0,0 +1,352 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import time
|
||||
|
||||
from platformio import __accounts_api__, app
|
||||
from platformio.exception import PlatformioException, UserSideException
|
||||
from platformio.http import HTTPClient, HTTPClientError
|
||||
|
||||
|
||||
class AccountError(PlatformioException):
|
||||
MESSAGE = "{0}"
|
||||
|
||||
|
||||
class AccountNotAuthorized(AccountError, UserSideException):
|
||||
MESSAGE = "You are not authorized! Please log in to PlatformIO Account."
|
||||
|
||||
|
||||
class AccountAlreadyAuthorized(AccountError, UserSideException):
|
||||
MESSAGE = "You are already authorized with {0} account."
|
||||
|
||||
|
||||
class AccountClient(HTTPClient): # pylint:disable=too-many-public-methods
|
||||
SUMMARY_CACHE_TTL = 60 * 60 * 24 * 7
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(__accounts_api__)
|
||||
|
||||
@staticmethod
|
||||
def get_refresh_token():
|
||||
try:
|
||||
return app.get_state_item("account").get("auth").get("refresh_token")
|
||||
except Exception as exc:
|
||||
raise AccountNotAuthorized() from exc
|
||||
|
||||
@staticmethod
|
||||
def delete_local_session():
|
||||
app.delete_state_item("account")
|
||||
|
||||
@staticmethod
|
||||
def delete_local_state(key):
|
||||
account = app.get_state_item("account")
|
||||
if not account or key not in account:
|
||||
return
|
||||
del account[key]
|
||||
app.set_state_item("account", account)
|
||||
|
||||
def fetch_json_data(self, *args, **kwargs):
|
||||
try:
|
||||
return super().fetch_json_data(*args, **kwargs)
|
||||
except HTTPClientError as exc:
|
||||
raise AccountError(exc) from exc
|
||||
|
||||
def fetch_authentication_token(self):
|
||||
if os.environ.get("PLATFORMIO_AUTH_TOKEN"):
|
||||
return os.environ.get("PLATFORMIO_AUTH_TOKEN")
|
||||
auth = app.get_state_item("account", {}).get("auth", {})
|
||||
if auth.get("access_token") and auth.get("access_token_expire"):
|
||||
if auth.get("access_token_expire") > time.time():
|
||||
return auth.get("access_token")
|
||||
if auth.get("refresh_token"):
|
||||
try:
|
||||
data = self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/login",
|
||||
headers={
|
||||
"Authorization": "Bearer %s" % auth.get("refresh_token")
|
||||
},
|
||||
)
|
||||
app.set_state_item("account", data)
|
||||
return data.get("auth").get("access_token")
|
||||
except AccountError:
|
||||
self.delete_local_session()
|
||||
raise AccountNotAuthorized()
|
||||
|
||||
def login(self, username, password):
|
||||
try:
|
||||
self.fetch_authentication_token()
|
||||
except: # pylint:disable=bare-except
|
||||
pass
|
||||
else:
|
||||
raise AccountAlreadyAuthorized(
|
||||
app.get_state_item("account", {}).get("email", "")
|
||||
)
|
||||
|
||||
data = self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/login",
|
||||
data={"username": username, "password": password},
|
||||
)
|
||||
app.set_state_item("account", data)
|
||||
return data
|
||||
|
||||
def login_with_code(self, client_id, code, redirect_uri):
|
||||
try:
|
||||
self.fetch_authentication_token()
|
||||
except: # pylint:disable=bare-except
|
||||
pass
|
||||
else:
|
||||
raise AccountAlreadyAuthorized(
|
||||
app.get_state_item("account", {}).get("email", "")
|
||||
)
|
||||
|
||||
result = self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/login/code",
|
||||
data={"client_id": client_id, "code": code, "redirect_uri": redirect_uri},
|
||||
)
|
||||
app.set_state_item("account", result)
|
||||
return result
|
||||
|
||||
def logout(self):
|
||||
refresh_token = self.get_refresh_token()
|
||||
self.delete_local_session()
|
||||
try:
|
||||
self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/logout",
|
||||
data={"refresh_token": refresh_token},
|
||||
)
|
||||
except AccountError:
|
||||
pass
|
||||
return True
|
||||
|
||||
def change_password(self, old_password, new_password):
|
||||
return self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/password",
|
||||
data={"old_password": old_password, "new_password": new_password},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def registration(
|
||||
self, username, email, password, firstname, lastname
|
||||
): # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
try:
|
||||
self.fetch_authentication_token()
|
||||
except: # pylint:disable=bare-except
|
||||
pass
|
||||
else:
|
||||
raise AccountAlreadyAuthorized(
|
||||
app.get_state_item("account", {}).get("email", "")
|
||||
)
|
||||
|
||||
return self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/registration",
|
||||
data={
|
||||
"username": username,
|
||||
"email": email,
|
||||
"password": password,
|
||||
"firstname": firstname,
|
||||
"lastname": lastname,
|
||||
},
|
||||
)
|
||||
|
||||
def auth_token(self, password, regenerate):
|
||||
return self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/token",
|
||||
data={"password": password, "regenerate": 1 if regenerate else 0},
|
||||
x_with_authorization=True,
|
||||
).get("auth_token")
|
||||
|
||||
def forgot_password(self, username):
|
||||
return self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/forgot",
|
||||
data={"username": username},
|
||||
)
|
||||
|
||||
def get_profile(self):
|
||||
return self.fetch_json_data(
|
||||
"get",
|
||||
"/v1/profile",
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def update_profile(self, profile, current_password):
|
||||
profile["current_password"] = current_password
|
||||
self.delete_local_state("summary")
|
||||
response = self.fetch_json_data(
|
||||
"put",
|
||||
"/v1/profile",
|
||||
data=profile,
|
||||
x_with_authorization=True,
|
||||
)
|
||||
return response
|
||||
|
||||
def get_account_info(self, offline=False):
|
||||
account = app.get_state_item("account") or {}
|
||||
if (
|
||||
account.get("summary")
|
||||
and account["summary"].get("expire_at", 0) > time.time()
|
||||
):
|
||||
return account["summary"]
|
||||
if offline and account.get("email"):
|
||||
return {
|
||||
"profile": {
|
||||
"email": account.get("email"),
|
||||
"username": account.get("username"),
|
||||
}
|
||||
}
|
||||
result = self.fetch_json_data(
|
||||
"get",
|
||||
"/v1/summary",
|
||||
x_with_authorization=True,
|
||||
)
|
||||
account["summary"] = dict(
|
||||
profile=result.get("profile"),
|
||||
packages=result.get("packages"),
|
||||
subscriptions=result.get("subscriptions"),
|
||||
user_id=result.get("user_id"),
|
||||
expire_at=int(time.time()) + self.SUMMARY_CACHE_TTL,
|
||||
)
|
||||
app.set_state_item("account", account)
|
||||
return result
|
||||
|
||||
def get_logged_username(self):
|
||||
return self.get_account_info(offline=True).get("profile").get("username")
|
||||
|
||||
def destroy_account(self):
|
||||
return self.fetch_json_data(
|
||||
"delete",
|
||||
"/v1/account",
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def create_org(self, orgname, email, displayname):
|
||||
return self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/orgs",
|
||||
data={"orgname": orgname, "email": email, "displayname": displayname},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def get_org(self, orgname):
|
||||
return self.fetch_json_data(
|
||||
"get",
|
||||
"/v1/orgs/%s" % orgname,
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def list_orgs(self):
|
||||
return self.fetch_json_data(
|
||||
"get",
|
||||
"/v1/orgs",
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def update_org(self, orgname, data):
|
||||
return self.fetch_json_data(
|
||||
"put",
|
||||
"/v1/orgs/%s" % orgname,
|
||||
data={k: v for k, v in data.items() if v},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def destroy_org(self, orgname):
|
||||
return self.fetch_json_data(
|
||||
"delete",
|
||||
"/v1/orgs/%s" % orgname,
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def add_org_owner(self, orgname, username):
|
||||
return self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/orgs/%s/owners" % orgname,
|
||||
data={"username": username},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def list_org_owners(self, orgname):
|
||||
return self.fetch_json_data(
|
||||
"get",
|
||||
"/v1/orgs/%s/owners" % orgname,
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def remove_org_owner(self, orgname, username):
|
||||
return self.fetch_json_data(
|
||||
"delete",
|
||||
"/v1/orgs/%s/owners" % orgname,
|
||||
params={"username": username},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def create_team(self, orgname, teamname, description):
|
||||
return self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/orgs/%s/teams" % orgname,
|
||||
data={"name": teamname, "description": description},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def destroy_team(self, orgname, teamname):
|
||||
return self.fetch_json_data(
|
||||
"delete",
|
||||
"/v1/orgs/%s/teams/%s" % (orgname, teamname),
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def get_team(self, orgname, teamname):
|
||||
return self.fetch_json_data(
|
||||
"get",
|
||||
"/v1/orgs/%s/teams/%s" % (orgname, teamname),
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def list_teams(self, orgname):
|
||||
return self.fetch_json_data(
|
||||
"get",
|
||||
"/v1/orgs/%s/teams" % orgname,
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def update_team(self, orgname, teamname, data):
|
||||
return self.fetch_json_data(
|
||||
"put",
|
||||
"/v1/orgs/%s/teams/%s" % (orgname, teamname),
|
||||
data={k: v for k, v in data.items() if v},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def add_team_member(self, orgname, teamname, username):
|
||||
return self.fetch_json_data(
|
||||
"post",
|
||||
"/v1/orgs/%s/teams/%s/members" % (orgname, teamname),
|
||||
data={"username": username},
|
||||
x_with_authorization=True,
|
||||
)
|
||||
|
||||
def remove_team_member(self, orgname, teamname, username):
|
||||
return self.fetch_json_data(
|
||||
"delete",
|
||||
"/v1/orgs/%s/teams/%s/members" % (orgname, teamname),
|
||||
params={"username": username},
|
||||
x_with_authorization=True,
|
||||
)
|
37
platformio/account/commands/destroy.py
Normal file
37
platformio/account/commands/destroy.py
Normal file
@ -0,0 +1,37 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient, AccountNotAuthorized
|
||||
|
||||
|
||||
@click.command("destroy", short_help="Destroy account")
|
||||
def account_destroy_cmd():
|
||||
client = AccountClient()
|
||||
click.confirm(
|
||||
"Are you sure you want to delete the %s user account?\n"
|
||||
"Warning! All linked data will be permanently removed and can not be restored."
|
||||
% client.get_logged_username(),
|
||||
abort=True,
|
||||
)
|
||||
client.destroy_account()
|
||||
try:
|
||||
client.logout()
|
||||
except AccountNotAuthorized:
|
||||
pass
|
||||
click.secho(
|
||||
"User account has been destroyed.",
|
||||
fg="green",
|
||||
)
|
@ -12,14 +12,18 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from platformio.commands.update import cli as cmd_update
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
|
||||
|
||||
def test_update(clirunner, validate_cliresult):
|
||||
matches = ("Platform Manager", "Up-to-date", "Library Manager")
|
||||
result = clirunner.invoke(cmd_update, ["--only-check"])
|
||||
validate_cliresult(result)
|
||||
assert all([m in result.output for m in matches])
|
||||
result = clirunner.invoke(cmd_update)
|
||||
validate_cliresult(result)
|
||||
assert all([m in result.output for m in matches])
|
||||
@click.command("forgot", short_help="Forgot password")
|
||||
@click.option("--username", prompt="Username or email")
|
||||
def account_forgot_cmd(username):
|
||||
client = AccountClient()
|
||||
client.forgot_password(username)
|
||||
click.secho(
|
||||
"If this account is registered, we will send the "
|
||||
"further instructions to your email.",
|
||||
fg="green",
|
||||
)
|
26
platformio/account/commands/login.py
Normal file
26
platformio/account/commands/login.py
Normal file
@ -0,0 +1,26 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
|
||||
|
||||
@click.command("login", short_help="Log in to PlatformIO Account")
|
||||
@click.option("-u", "--username", prompt="Username or email")
|
||||
@click.option("-p", "--password", prompt=True, hide_input=True)
|
||||
def account_login_cmd(username, password):
|
||||
client = AccountClient()
|
||||
client.login(username, password)
|
||||
click.secho("Successfully logged in!", fg="green")
|
24
platformio/account/commands/logout.py
Normal file
24
platformio/account/commands/logout.py
Normal file
@ -0,0 +1,24 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
|
||||
|
||||
@click.command("logout", short_help="Log out of PlatformIO Account")
|
||||
def account_logout_cmd():
|
||||
client = AccountClient()
|
||||
client.logout()
|
||||
click.secho("Successfully logged out!", fg="green")
|
26
platformio/account/commands/password.py
Normal file
26
platformio/account/commands/password.py
Normal file
@ -0,0 +1,26 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
|
||||
|
||||
@click.command("password", short_help="Change password")
|
||||
@click.option("--old-password", prompt=True, hide_input=True)
|
||||
@click.option("--new-password", prompt=True, hide_input=True, confirmation_prompt=True)
|
||||
def account_password_cmd(old_password, new_password):
|
||||
client = AccountClient()
|
||||
client.change_password(old_password, new_password)
|
||||
click.secho("Password successfully changed!", fg="green")
|
52
platformio/account/commands/register.py
Normal file
52
platformio/account/commands/register.py
Normal file
@ -0,0 +1,52 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
from platformio.account.validate import (
|
||||
validate_email,
|
||||
validate_password,
|
||||
validate_username,
|
||||
)
|
||||
|
||||
|
||||
@click.command("register", short_help="Create new PlatformIO Account")
|
||||
@click.option(
|
||||
"-u",
|
||||
"--username",
|
||||
prompt=True,
|
||||
callback=lambda _, __, value: validate_username(value),
|
||||
)
|
||||
@click.option(
|
||||
"-e", "--email", prompt=True, callback=lambda _, __, value: validate_email(value)
|
||||
)
|
||||
@click.option(
|
||||
"-p",
|
||||
"--password",
|
||||
prompt=True,
|
||||
hide_input=True,
|
||||
confirmation_prompt=True,
|
||||
callback=lambda _, __, value: validate_password(value),
|
||||
)
|
||||
@click.option("--firstname", prompt=True)
|
||||
@click.option("--lastname", prompt=True)
|
||||
def account_register_cmd(username, email, password, firstname, lastname):
|
||||
client = AccountClient()
|
||||
client.registration(username, email, password, firstname, lastname)
|
||||
click.secho(
|
||||
"An account has been successfully created. "
|
||||
"Please check your mail to activate your account and verify your email address.",
|
||||
fg="green",
|
||||
)
|
116
platformio/account/commands/show.py
Normal file
116
platformio/account/commands/show.py
Normal file
@ -0,0 +1,116 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
|
||||
import click
|
||||
from tabulate import tabulate
|
||||
|
||||
from platformio import util
|
||||
from platformio.account.client import AccountClient
|
||||
|
||||
|
||||
@click.command("show", short_help="PlatformIO Account information")
|
||||
@click.option("--offline", is_flag=True)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def account_show_cmd(offline, json_output):
|
||||
client = AccountClient()
|
||||
info = client.get_account_info(offline)
|
||||
if json_output:
|
||||
click.echo(json.dumps(info))
|
||||
return
|
||||
click.echo()
|
||||
if info.get("profile"):
|
||||
print_profile(info["profile"])
|
||||
if info.get("packages"):
|
||||
print_packages(info["packages"])
|
||||
if info.get("subscriptions"):
|
||||
print_subscriptions(info["subscriptions"])
|
||||
click.echo()
|
||||
|
||||
|
||||
def print_profile(profile):
|
||||
click.secho("Profile", fg="cyan", bold=True)
|
||||
click.echo("=" * len("Profile"))
|
||||
data = []
|
||||
if profile.get("username"):
|
||||
data.append(("Username:", profile["username"]))
|
||||
if profile.get("email"):
|
||||
data.append(("Email:", profile["email"]))
|
||||
if profile.get("firstname"):
|
||||
data.append(("First name:", profile["firstname"]))
|
||||
if profile.get("lastname"):
|
||||
data.append(("Last name:", profile["lastname"]))
|
||||
click.echo(tabulate(data, tablefmt="plain"))
|
||||
|
||||
|
||||
def print_packages(packages):
|
||||
click.echo()
|
||||
click.secho("Packages", fg="cyan")
|
||||
click.echo("=" * len("Packages"))
|
||||
for package in packages:
|
||||
click.echo()
|
||||
click.secho(package.get("name"), bold=True)
|
||||
click.echo("-" * len(package.get("name")))
|
||||
if package.get("description"):
|
||||
click.echo(package.get("description"))
|
||||
data = []
|
||||
expire = "-"
|
||||
if "subscription" in package:
|
||||
expire = util.parse_datetime(
|
||||
package["subscription"].get("end_at")
|
||||
or package["subscription"].get("next_bill_at")
|
||||
).strftime("%Y-%m-%d")
|
||||
data.append(("Expire:", expire))
|
||||
services = []
|
||||
for key in package:
|
||||
if not key.startswith("service."):
|
||||
continue
|
||||
if isinstance(package[key], dict):
|
||||
services.append(package[key].get("title"))
|
||||
else:
|
||||
services.append(package[key])
|
||||
if services:
|
||||
data.append(("Services:", ", ".join(services)))
|
||||
click.echo(tabulate(data, tablefmt="plain"))
|
||||
|
||||
|
||||
def print_subscriptions(subscriptions):
|
||||
click.echo()
|
||||
click.secho("Subscriptions", fg="cyan")
|
||||
click.echo("=" * len("Subscriptions"))
|
||||
for subscription in subscriptions:
|
||||
click.echo()
|
||||
click.secho(subscription.get("product_name"), bold=True)
|
||||
click.echo("-" * len(subscription.get("product_name")))
|
||||
data = [("State:", subscription.get("status"))]
|
||||
begin_at = util.parse_datetime(subscription.get("begin_at")).strftime("%c")
|
||||
data.append(("Start date:", begin_at or "-"))
|
||||
end_at = subscription.get("end_at")
|
||||
if end_at:
|
||||
end_at = util.parse_datetime(subscription.get("end_at")).strftime("%c")
|
||||
data.append(("End date:", end_at or "-"))
|
||||
next_bill_at = subscription.get("next_bill_at")
|
||||
if next_bill_at:
|
||||
next_bill_at = util.parse_datetime(
|
||||
subscription.get("next_bill_at")
|
||||
).strftime("%c")
|
||||
data.append(("Next payment:", next_bill_at or "-"))
|
||||
data.append(
|
||||
("Edit:", click.style(subscription.get("update_url"), fg="blue") or "-")
|
||||
)
|
||||
data.append(
|
||||
("Cancel:", click.style(subscription.get("cancel_url"), fg="blue") or "-")
|
||||
)
|
||||
click.echo(tabulate(data, tablefmt="plain"))
|
32
platformio/account/commands/token.py
Normal file
32
platformio/account/commands/token.py
Normal file
@ -0,0 +1,32 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
|
||||
|
||||
@click.command("token", short_help="Get or regenerate Authentication Token")
|
||||
@click.option("-p", "--password", prompt=True, hide_input=True)
|
||||
@click.option("--regenerate", is_flag=True)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def account_token_cmd(password, regenerate, json_output):
|
||||
client = AccountClient()
|
||||
auth_token = client.auth_token(password, regenerate)
|
||||
if json_output:
|
||||
click.echo(json.dumps({"status": "success", "result": auth_token}))
|
||||
return
|
||||
click.secho("Personal Authentication Token: %s" % auth_token, fg="green")
|
59
platformio/account/commands/update.py
Normal file
59
platformio/account/commands/update.py
Normal file
@ -0,0 +1,59 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient, AccountNotAuthorized
|
||||
from platformio.account.validate import validate_email, validate_username
|
||||
|
||||
|
||||
@click.command("update", short_help="Update profile information")
|
||||
@click.option("--current-password", prompt=True, hide_input=True)
|
||||
@click.option("--username")
|
||||
@click.option("--email")
|
||||
@click.option("--firstname")
|
||||
@click.option("--lastname")
|
||||
def account_update_cmd(current_password, **kwargs):
|
||||
client = AccountClient()
|
||||
profile = client.get_profile()
|
||||
new_profile = profile.copy()
|
||||
if not any(kwargs.values()):
|
||||
for field in profile:
|
||||
new_profile[field] = click.prompt(
|
||||
field.replace("_", " ").capitalize(), default=profile[field]
|
||||
)
|
||||
if field == "email":
|
||||
validate_email(new_profile[field])
|
||||
if field == "username":
|
||||
validate_username(new_profile[field])
|
||||
else:
|
||||
new_profile.update({key: value for key, value in kwargs.items() if value})
|
||||
client.update_profile(new_profile, current_password)
|
||||
click.secho("Profile successfully updated!", fg="green")
|
||||
username_changed = new_profile["username"] != profile["username"]
|
||||
email_changed = new_profile["email"] != profile["email"]
|
||||
if not username_changed and not email_changed:
|
||||
return None
|
||||
try:
|
||||
client.logout()
|
||||
except AccountNotAuthorized:
|
||||
pass
|
||||
if email_changed:
|
||||
click.secho(
|
||||
"Please check your mail to verify your new email address and re-login. ",
|
||||
fg="yellow",
|
||||
)
|
||||
return None
|
||||
click.secho("Please re-login.", fg="yellow")
|
||||
return None
|
13
platformio/account/org/__init__.py
Normal file
13
platformio/account/org/__init__.py
Normal file
@ -0,0 +1,13 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
38
platformio/account/org/cli.py
Normal file
38
platformio/account/org/cli.py
Normal file
@ -0,0 +1,38 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.org.commands.add import org_add_cmd
|
||||
from platformio.account.org.commands.create import org_create_cmd
|
||||
from platformio.account.org.commands.destroy import org_destroy_cmd
|
||||
from platformio.account.org.commands.list import org_list_cmd
|
||||
from platformio.account.org.commands.remove import org_remove_cmd
|
||||
from platformio.account.org.commands.update import org_update_cmd
|
||||
|
||||
|
||||
@click.group(
|
||||
"account",
|
||||
commands=[
|
||||
org_add_cmd,
|
||||
org_create_cmd,
|
||||
org_destroy_cmd,
|
||||
org_list_cmd,
|
||||
org_remove_cmd,
|
||||
org_update_cmd,
|
||||
],
|
||||
short_help="Manage organizations",
|
||||
)
|
||||
def cli():
|
||||
pass
|
13
platformio/account/org/commands/__init__.py
Normal file
13
platformio/account/org/commands/__init__.py
Normal file
@ -0,0 +1,13 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
34
platformio/account/org/commands/add.py
Normal file
34
platformio/account/org/commands/add.py
Normal file
@ -0,0 +1,34 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
|
||||
|
||||
@click.command("add", short_help="Add a new owner to organization")
|
||||
@click.argument(
|
||||
"orgname",
|
||||
)
|
||||
@click.argument(
|
||||
"username",
|
||||
)
|
||||
def org_add_cmd(orgname, username):
|
||||
client = AccountClient()
|
||||
client.add_org_owner(orgname, username)
|
||||
return click.secho(
|
||||
"The new owner `%s` has been successfully added to the `%s` organization."
|
||||
% (username, orgname),
|
||||
fg="green",
|
||||
)
|
38
platformio/account/org/commands/create.py
Normal file
38
platformio/account/org/commands/create.py
Normal file
@ -0,0 +1,38 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
from platformio.account.validate import validate_email, validate_orgname
|
||||
|
||||
|
||||
@click.command("create", short_help="Create a new organization")
|
||||
@click.argument(
|
||||
"orgname",
|
||||
callback=lambda _, __, value: validate_orgname(value),
|
||||
)
|
||||
@click.option(
|
||||
"--email", callback=lambda _, __, value: validate_email(value) if value else value
|
||||
)
|
||||
@click.option(
|
||||
"--displayname",
|
||||
)
|
||||
def org_create_cmd(orgname, email, displayname):
|
||||
client = AccountClient()
|
||||
client.create_org(orgname, email, displayname)
|
||||
return click.secho(
|
||||
"The organization `%s` has been successfully created." % orgname,
|
||||
fg="green",
|
||||
)
|
34
platformio/account/org/commands/destroy.py
Normal file
34
platformio/account/org/commands/destroy.py
Normal file
@ -0,0 +1,34 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
|
||||
|
||||
@click.command("destroy", short_help="Destroy organization")
|
||||
@click.argument("orgname")
|
||||
def org_destroy_cmd(orgname):
|
||||
client = AccountClient()
|
||||
click.confirm(
|
||||
"Are you sure you want to delete the `%s` organization account?\n"
|
||||
"Warning! All linked data will be permanently removed and can not be restored."
|
||||
% orgname,
|
||||
abort=True,
|
||||
)
|
||||
client.destroy_org(orgname)
|
||||
return click.secho(
|
||||
"Organization `%s` has been destroyed." % orgname,
|
||||
fg="green",
|
||||
)
|
48
platformio/account/org/commands/list.py
Normal file
48
platformio/account/org/commands/list.py
Normal file
@ -0,0 +1,48 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
|
||||
import click
|
||||
from tabulate import tabulate
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
|
||||
|
||||
@click.command("list", short_help="List organizations and their members")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def org_list_cmd(json_output):
|
||||
client = AccountClient()
|
||||
orgs = client.list_orgs()
|
||||
if json_output:
|
||||
return click.echo(json.dumps(orgs))
|
||||
if not orgs:
|
||||
return click.echo("You do not have any organization")
|
||||
for org in orgs:
|
||||
click.echo()
|
||||
click.secho(org.get("orgname"), fg="cyan")
|
||||
click.echo("-" * len(org.get("orgname")))
|
||||
data = []
|
||||
if org.get("displayname"):
|
||||
data.append(("Display Name:", org.get("displayname")))
|
||||
if org.get("email"):
|
||||
data.append(("Email:", org.get("email")))
|
||||
data.append(
|
||||
(
|
||||
"Owners:",
|
||||
", ".join((owner.get("username") for owner in org.get("owners"))),
|
||||
)
|
||||
)
|
||||
click.echo(tabulate(data, tablefmt="plain"))
|
||||
return click.echo()
|
34
platformio/account/org/commands/remove.py
Normal file
34
platformio/account/org/commands/remove.py
Normal file
@ -0,0 +1,34 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
|
||||
|
||||
@click.command("remove", short_help="Remove an owner from organization")
|
||||
@click.argument(
|
||||
"orgname",
|
||||
)
|
||||
@click.argument(
|
||||
"username",
|
||||
)
|
||||
def org_remove_cmd(orgname, username):
|
||||
client = AccountClient()
|
||||
client.remove_org_owner(orgname, username)
|
||||
return click.secho(
|
||||
"The `%s` owner has been successfully removed from the `%s` organization."
|
||||
% (username, orgname),
|
||||
fg="green",
|
||||
)
|
50
platformio/account/org/commands/update.py
Normal file
50
platformio/account/org/commands/update.py
Normal file
@ -0,0 +1,50 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
from platformio.account.validate import validate_email, validate_orgname
|
||||
|
||||
|
||||
@click.command("update", short_help="Update organization")
|
||||
@click.argument("cur_orgname")
|
||||
@click.option(
|
||||
"--orgname",
|
||||
callback=lambda _, __, value: validate_orgname(value) if value else value,
|
||||
help="A new orgname",
|
||||
)
|
||||
@click.option(
|
||||
"--email",
|
||||
callback=lambda _, __, value: validate_email(value) if value else value,
|
||||
)
|
||||
@click.option("--displayname")
|
||||
def org_update_cmd(cur_orgname, **kwargs):
|
||||
client = AccountClient()
|
||||
org = client.get_org(cur_orgname)
|
||||
new_org = {
|
||||
key: value if value is not None else org[key] for key, value in kwargs.items()
|
||||
}
|
||||
if not any(kwargs.values()):
|
||||
for key in kwargs:
|
||||
new_org[key] = click.prompt(key.capitalize(), default=org[key])
|
||||
if key == "email":
|
||||
validate_email(new_org[key])
|
||||
if key == "orgname":
|
||||
validate_orgname(new_org[key])
|
||||
client.update_org(cur_orgname, new_org)
|
||||
return click.secho(
|
||||
"The organization `%s` has been successfully updated." % cur_orgname,
|
||||
fg="green",
|
||||
)
|
13
platformio/account/team/__init__.py
Normal file
13
platformio/account/team/__init__.py
Normal file
@ -0,0 +1,13 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
38
platformio/account/team/cli.py
Normal file
38
platformio/account/team/cli.py
Normal file
@ -0,0 +1,38 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.team.commands.add import team_add_cmd
|
||||
from platformio.account.team.commands.create import team_create_cmd
|
||||
from platformio.account.team.commands.destroy import team_destroy_cmd
|
||||
from platformio.account.team.commands.list import team_list_cmd
|
||||
from platformio.account.team.commands.remove import team_remove_cmd
|
||||
from platformio.account.team.commands.update import team_update_cmd
|
||||
|
||||
|
||||
@click.group(
|
||||
"team",
|
||||
commands=[
|
||||
team_add_cmd,
|
||||
team_create_cmd,
|
||||
team_destroy_cmd,
|
||||
team_list_cmd,
|
||||
team_remove_cmd,
|
||||
team_update_cmd,
|
||||
],
|
||||
short_help="Manage organization teams",
|
||||
)
|
||||
def cli():
|
||||
pass
|
13
platformio/account/team/commands/__init__.py
Normal file
13
platformio/account/team/commands/__init__.py
Normal file
@ -0,0 +1,13 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
38
platformio/account/team/commands/add.py
Normal file
38
platformio/account/team/commands/add.py
Normal file
@ -0,0 +1,38 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
from platformio.account.validate import validate_orgname_teamname
|
||||
|
||||
|
||||
@click.command("add", short_help="Add a new member to team")
|
||||
@click.argument(
|
||||
"orgname_teamname",
|
||||
metavar="ORGNAME:TEAMNAME",
|
||||
callback=lambda _, __, value: validate_orgname_teamname(value),
|
||||
)
|
||||
@click.argument(
|
||||
"username",
|
||||
)
|
||||
def team_add_cmd(orgname_teamname, username):
|
||||
orgname, teamname = orgname_teamname.split(":", 1)
|
||||
client = AccountClient()
|
||||
client.add_team_member(orgname, teamname, username)
|
||||
return click.secho(
|
||||
"The new member %s has been successfully added to the %s team."
|
||||
% (username, teamname),
|
||||
fg="green",
|
||||
)
|
37
platformio/account/team/commands/create.py
Normal file
37
platformio/account/team/commands/create.py
Normal file
@ -0,0 +1,37 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
from platformio.account.validate import validate_orgname_teamname
|
||||
|
||||
|
||||
@click.command("create", short_help="Create a new team")
|
||||
@click.argument(
|
||||
"orgname_teamname",
|
||||
metavar="ORGNAME:TEAMNAME",
|
||||
callback=lambda _, __, value: validate_orgname_teamname(value),
|
||||
)
|
||||
@click.option(
|
||||
"--description",
|
||||
)
|
||||
def team_create_cmd(orgname_teamname, description):
|
||||
orgname, teamname = orgname_teamname.split(":", 1)
|
||||
client = AccountClient()
|
||||
client.create_team(orgname, teamname, description)
|
||||
return click.secho(
|
||||
"The team %s has been successfully created." % teamname,
|
||||
fg="green",
|
||||
)
|
40
platformio/account/team/commands/destroy.py
Normal file
40
platformio/account/team/commands/destroy.py
Normal file
@ -0,0 +1,40 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
from platformio.account.validate import validate_orgname_teamname
|
||||
|
||||
|
||||
@click.command("destroy", short_help="Destroy a team")
|
||||
@click.argument(
|
||||
"orgname_teamname",
|
||||
metavar="ORGNAME:TEAMNAME",
|
||||
callback=lambda _, __, value: validate_orgname_teamname(value),
|
||||
)
|
||||
def team_destroy_cmd(orgname_teamname):
|
||||
orgname, teamname = orgname_teamname.split(":", 1)
|
||||
click.confirm(
|
||||
click.style(
|
||||
"Are you sure you want to destroy the %s team?" % teamname, fg="yellow"
|
||||
),
|
||||
abort=True,
|
||||
)
|
||||
client = AccountClient()
|
||||
client.destroy_team(orgname, teamname)
|
||||
return click.secho(
|
||||
"The team %s has been successfully destroyed." % teamname,
|
||||
fg="green",
|
||||
)
|
61
platformio/account/team/commands/list.py
Normal file
61
platformio/account/team/commands/list.py
Normal file
@ -0,0 +1,61 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
|
||||
import click
|
||||
from tabulate import tabulate
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
|
||||
|
||||
@click.command("list", short_help="List teams")
|
||||
@click.argument("orgname", required=False)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def team_list_cmd(orgname, json_output):
|
||||
client = AccountClient()
|
||||
data = {}
|
||||
if not orgname:
|
||||
for item in client.list_orgs():
|
||||
teams = client.list_teams(item.get("orgname"))
|
||||
data[item.get("orgname")] = teams
|
||||
else:
|
||||
teams = client.list_teams(orgname)
|
||||
data[orgname] = teams
|
||||
if json_output:
|
||||
return click.echo(json.dumps(data[orgname] if orgname else data))
|
||||
if not any(data.values()):
|
||||
return click.secho("You do not have any teams.", fg="yellow")
|
||||
for org_name, teams in data.items():
|
||||
for team in teams:
|
||||
click.echo()
|
||||
click.secho("%s:%s" % (org_name, team.get("name")), fg="cyan")
|
||||
click.echo("-" * len("%s:%s" % (org_name, team.get("name"))))
|
||||
table_data = []
|
||||
if team.get("description"):
|
||||
table_data.append(("Description:", team.get("description")))
|
||||
table_data.append(
|
||||
(
|
||||
"Members:",
|
||||
(
|
||||
", ".join(
|
||||
(member.get("username") for member in team.get("members"))
|
||||
)
|
||||
if team.get("members")
|
||||
else "-"
|
||||
),
|
||||
)
|
||||
)
|
||||
click.echo(tabulate(table_data, tablefmt="plain"))
|
||||
return click.echo()
|
36
platformio/account/team/commands/remove.py
Normal file
36
platformio/account/team/commands/remove.py
Normal file
@ -0,0 +1,36 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
from platformio.account.validate import validate_orgname_teamname
|
||||
|
||||
|
||||
@click.command("remove", short_help="Remove a member from team")
|
||||
@click.argument(
|
||||
"orgname_teamname",
|
||||
metavar="ORGNAME:TEAMNAME",
|
||||
callback=lambda _, __, value: validate_orgname_teamname(value),
|
||||
)
|
||||
@click.argument("username")
|
||||
def team_remove_cmd(orgname_teamname, username):
|
||||
orgname, teamname = orgname_teamname.split(":", 1)
|
||||
client = AccountClient()
|
||||
client.remove_team_member(orgname, teamname, username)
|
||||
return click.secho(
|
||||
"The %s member has been successfully removed from the %s team."
|
||||
% (username, teamname),
|
||||
fg="green",
|
||||
)
|
51
platformio/account/team/commands/update.py
Normal file
51
platformio/account/team/commands/update.py
Normal file
@ -0,0 +1,51 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
|
||||
from platformio.account.client import AccountClient
|
||||
from platformio.account.validate import validate_orgname_teamname, validate_teamname
|
||||
|
||||
|
||||
@click.command("update", short_help="Update team")
|
||||
@click.argument(
|
||||
"orgname_teamname",
|
||||
metavar="ORGNAME:TEAMNAME",
|
||||
callback=lambda _, __, value: validate_orgname_teamname(value),
|
||||
)
|
||||
@click.option(
|
||||
"--name",
|
||||
callback=lambda _, __, value: validate_teamname(value) if value else value,
|
||||
help="A new team name",
|
||||
)
|
||||
@click.option(
|
||||
"--description",
|
||||
)
|
||||
def team_update_cmd(orgname_teamname, **kwargs):
|
||||
orgname, teamname = orgname_teamname.split(":", 1)
|
||||
client = AccountClient()
|
||||
team = client.get_team(orgname, teamname)
|
||||
new_team = {
|
||||
key: value if value is not None else team[key] for key, value in kwargs.items()
|
||||
}
|
||||
if not any(kwargs.values()):
|
||||
for key in kwargs:
|
||||
new_team[key] = click.prompt(key.capitalize(), default=team[key])
|
||||
if key == "name":
|
||||
validate_teamname(new_team[key])
|
||||
client.update_team(orgname, teamname, new_team)
|
||||
return click.secho(
|
||||
"The team %s has been successfully updated." % teamname,
|
||||
fg="green",
|
||||
)
|
84
platformio/account/validate.py
Normal file
84
platformio/account/validate.py
Normal file
@ -0,0 +1,84 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import re
|
||||
|
||||
import click
|
||||
|
||||
|
||||
def validate_username(value, field="username"):
|
||||
value = str(value).strip() if value else None
|
||||
if not value or not re.match(
|
||||
r"^[a-z\d](?:[a-z\d]|-(?=[a-z\d])){0,37}$", value, flags=re.I
|
||||
):
|
||||
raise click.BadParameter(
|
||||
"Invalid %s format. "
|
||||
"%s must contain only alphanumeric characters "
|
||||
"or single hyphens, cannot begin or end with a hyphen, "
|
||||
"and must not be longer than 38 characters."
|
||||
% (field.lower(), field.capitalize())
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
def validate_orgname(value):
|
||||
return validate_username(value, "Organization name")
|
||||
|
||||
|
||||
def validate_email(value):
|
||||
value = str(value).strip() if value else None
|
||||
if not value or not re.match(
|
||||
r"^[a-z\d_\.\+\-]+@[a-z\d\-]+\.[a-z\d\-\.]+$", value, flags=re.I
|
||||
):
|
||||
raise click.BadParameter("Invalid email address")
|
||||
return value
|
||||
|
||||
|
||||
def validate_password(value):
|
||||
value = str(value).strip() if value else None
|
||||
if not value or not re.match(r"^(?=.*[a-z])(?=.*\d).{8,}$", value):
|
||||
raise click.BadParameter(
|
||||
"Invalid password format. "
|
||||
"Password must contain at least 8 characters"
|
||||
" including a number and a lowercase letter"
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
def validate_teamname(value):
|
||||
value = str(value).strip() if value else None
|
||||
if not value or not re.match(
|
||||
r"^[a-z\d](?:[a-z\d]|[\-_ ](?=[a-z\d])){0,19}$", value, flags=re.I
|
||||
):
|
||||
raise click.BadParameter(
|
||||
"Invalid team name format. "
|
||||
"Team name must only contain alphanumeric characters, "
|
||||
"single hyphens, underscores, spaces. It can not "
|
||||
"begin or end with a hyphen or a underscore and must"
|
||||
" not be longer than 20 characters."
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
def validate_orgname_teamname(value):
|
||||
value = str(value).strip() if value else None
|
||||
if not value or ":" not in value:
|
||||
raise click.BadParameter(
|
||||
"Please specify organization and team name using the following"
|
||||
" format - orgname:teamname. For example, mycompany:DreamTeam"
|
||||
)
|
||||
orgname, teamname = value.split(":", 1)
|
||||
validate_orgname(orgname)
|
||||
validate_teamname(teamname)
|
||||
return value
|
@ -12,114 +12,118 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import codecs
|
||||
import getpass
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import platform
|
||||
import socket
|
||||
import time
|
||||
import uuid
|
||||
from copy import deepcopy
|
||||
from os import environ, getenv, listdir, remove
|
||||
from os.path import abspath, dirname, expanduser, isdir, isfile, join
|
||||
from time import time
|
||||
|
||||
import requests
|
||||
|
||||
from platformio import exception, lockfile, util
|
||||
from platformio import __version__, exception, fs, proc
|
||||
from platformio.compat import IS_WINDOWS, hashlib_encode_data
|
||||
from platformio.package.lockfile import LockFile
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.helpers import get_default_projects_dir
|
||||
|
||||
|
||||
def projects_dir_validate(projects_dir):
|
||||
assert isdir(projects_dir)
|
||||
return abspath(projects_dir)
|
||||
assert os.path.isdir(projects_dir)
|
||||
return os.path.abspath(projects_dir)
|
||||
|
||||
|
||||
DEFAULT_SETTINGS = {
|
||||
"auto_update_libraries": {
|
||||
"description": "Automatically update libraries (Yes/No)",
|
||||
"value": False
|
||||
},
|
||||
"auto_update_platforms": {
|
||||
"description": "Automatically update platforms (Yes/No)",
|
||||
"value": False
|
||||
},
|
||||
"check_libraries_interval": {
|
||||
"description": "Check for the library updates interval (days)",
|
||||
"value": 7
|
||||
},
|
||||
"check_platformio_interval": {
|
||||
"description": "Check for the new PlatformIO interval (days)",
|
||||
"value": 3
|
||||
"description": "Check for the new PlatformIO Core interval (days)",
|
||||
"value": 7,
|
||||
},
|
||||
"check_platforms_interval": {
|
||||
"description": "Check for the platform updates interval (days)",
|
||||
"value": 7
|
||||
"check_prune_system_threshold": {
|
||||
"description": "Check for pruning unnecessary data threshold (megabytes)",
|
||||
"value": 1024,
|
||||
},
|
||||
"enable_cache": {
|
||||
"description": "Enable caching for API requests and Library Manager",
|
||||
"value": True
|
||||
},
|
||||
"enable_ssl": {
|
||||
"description": "Enable SSL for PlatformIO Services",
|
||||
"value": False
|
||||
"description": "Enable caching for HTTP API requests",
|
||||
"value": True,
|
||||
},
|
||||
"enable_telemetry": {
|
||||
"description":
|
||||
("Telemetry service <https://docs.platformio.org/page/"
|
||||
"userguide/cmd_settings.html?#enable-telemetry> (Yes/No)"),
|
||||
"value":
|
||||
True
|
||||
"description": ("Telemetry service <https://bit.ly/pio-telemetry> (Yes/No)"),
|
||||
"value": True,
|
||||
},
|
||||
"force_verbose": {
|
||||
"description": "Force verbose output when processing environments",
|
||||
"value": False
|
||||
"value": False,
|
||||
},
|
||||
"projects_dir": {
|
||||
"description": "Default location for PlatformIO projects (PIO Home)",
|
||||
"value": join(expanduser("~"), "Documents", "PlatformIO", "Projects"),
|
||||
"validator": projects_dir_validate
|
||||
"description": "Default location for PlatformIO projects (PlatformIO Home)",
|
||||
"value": get_default_projects_dir(),
|
||||
"validator": projects_dir_validate,
|
||||
},
|
||||
"enable_proxy_strict_ssl": {
|
||||
"description": "Verify the proxy server certificate against the list of supplied CAs",
|
||||
"value": True,
|
||||
},
|
||||
}
|
||||
|
||||
SESSION_VARS = {"command_ctx": None, "force_option": False, "caller_id": None}
|
||||
SESSION_VARS = {
|
||||
"command_ctx": None,
|
||||
"caller_id": None,
|
||||
"custom_project_conf": None,
|
||||
"pause_telemetry": False,
|
||||
}
|
||||
|
||||
|
||||
class State(object):
|
||||
def resolve_state_path(conf_option_dir, file_name, ensure_dir_exists=True):
|
||||
state_dir = ProjectConfig.get_instance().get("platformio", conf_option_dir)
|
||||
if ensure_dir_exists and not os.path.isdir(state_dir):
|
||||
os.makedirs(state_dir)
|
||||
return os.path.join(state_dir, file_name)
|
||||
|
||||
|
||||
class State:
|
||||
def __init__(self, path=None, lock=False):
|
||||
self.path = path
|
||||
self.lock = lock
|
||||
if not self.path:
|
||||
self.path = join(util.get_home_dir(), "appstate.json")
|
||||
self._state = {}
|
||||
self._prev_state = {}
|
||||
self.path = resolve_state_path("core_dir", "appstate.json")
|
||||
self._storage = {}
|
||||
self._lockfile = None
|
||||
self.modified = False
|
||||
|
||||
def __enter__(self):
|
||||
try:
|
||||
self._lock_state_file()
|
||||
if isfile(self.path):
|
||||
self._state = util.load_json(self.path)
|
||||
except exception.PlatformioException:
|
||||
self._state = {}
|
||||
self._prev_state = deepcopy(self._state)
|
||||
return self._state
|
||||
if os.path.isfile(self.path):
|
||||
self._storage = fs.load_json(self.path)
|
||||
assert isinstance(self._storage, dict)
|
||||
except (
|
||||
AssertionError,
|
||||
ValueError,
|
||||
UnicodeDecodeError,
|
||||
exception.InvalidJSONFile,
|
||||
):
|
||||
self._storage = {}
|
||||
return self
|
||||
|
||||
def __exit__(self, type_, value, traceback):
|
||||
if self._prev_state != self._state:
|
||||
if self.modified:
|
||||
try:
|
||||
with codecs.open(self.path, "w", encoding="utf8") as fp:
|
||||
json.dump(self._state, fp)
|
||||
except IOError:
|
||||
raise exception.HomeDirPermissionsError(util.get_home_dir())
|
||||
with open(self.path, mode="w", encoding="utf8") as fp:
|
||||
fp.write(json.dumps(self._storage))
|
||||
except IOError as exc:
|
||||
raise exception.HomeDirPermissionsError(
|
||||
os.path.dirname(self.path)
|
||||
) from exc
|
||||
self._unlock_state_file()
|
||||
|
||||
def _lock_state_file(self):
|
||||
if not self.lock:
|
||||
return
|
||||
self._lockfile = lockfile.LockFile(self.path)
|
||||
self._lockfile = LockFile(self.path)
|
||||
try:
|
||||
self._lockfile.acquire()
|
||||
except IOError:
|
||||
raise exception.HomeDirPermissionsError(dirname(self.path))
|
||||
except IOError as exc:
|
||||
raise exception.HomeDirPermissionsError(os.path.dirname(self.path)) from exc
|
||||
|
||||
def _unlock_state_file(self):
|
||||
if hasattr(self, "_lockfile") and self._lockfile:
|
||||
@ -128,137 +132,37 @@ class State(object):
|
||||
def __del__(self):
|
||||
self._unlock_state_file()
|
||||
|
||||
# Dictionary Proxy
|
||||
|
||||
class ContentCache(object):
|
||||
def as_dict(self):
|
||||
return self._storage
|
||||
|
||||
def __init__(self, cache_dir=None):
|
||||
self.cache_dir = None
|
||||
self._db_path = None
|
||||
self._lockfile = None
|
||||
def keys(self):
|
||||
return self._storage.keys()
|
||||
|
||||
self.cache_dir = cache_dir or util.get_cache_dir()
|
||||
self._db_path = join(self.cache_dir, "db.data")
|
||||
def get(self, key, default=True):
|
||||
return self._storage.get(key, default)
|
||||
|
||||
def __enter__(self):
|
||||
self.delete()
|
||||
return self
|
||||
def update(self, *args, **kwargs):
|
||||
self.modified = True
|
||||
return self._storage.update(*args, **kwargs)
|
||||
|
||||
def __exit__(self, type_, value, traceback):
|
||||
pass
|
||||
def clear(self):
|
||||
return self._storage.clear()
|
||||
|
||||
def _lock_dbindex(self):
|
||||
if not self.cache_dir:
|
||||
os.makedirs(self.cache_dir)
|
||||
self._lockfile = lockfile.LockFile(self.cache_dir)
|
||||
try:
|
||||
self._lockfile.acquire()
|
||||
except: # pylint: disable=bare-except
|
||||
return False
|
||||
def __getitem__(self, key):
|
||||
return self._storage[key]
|
||||
|
||||
return True
|
||||
def __setitem__(self, key, value):
|
||||
self.modified = True
|
||||
self._storage[key] = value
|
||||
|
||||
def _unlock_dbindex(self):
|
||||
if self._lockfile:
|
||||
self._lockfile.release()
|
||||
return True
|
||||
def __delitem__(self, key):
|
||||
self.modified = True
|
||||
del self._storage[key]
|
||||
|
||||
def get_cache_path(self, key):
|
||||
assert len(key) > 3
|
||||
return join(self.cache_dir, key[-2:], key)
|
||||
|
||||
@staticmethod
|
||||
def key_from_args(*args):
|
||||
h = hashlib.md5()
|
||||
for data in args:
|
||||
h.update(str(data))
|
||||
return h.hexdigest()
|
||||
|
||||
def get(self, key):
|
||||
cache_path = self.get_cache_path(key)
|
||||
if not isfile(cache_path):
|
||||
return None
|
||||
with codecs.open(cache_path, "rb", encoding="utf8") as fp:
|
||||
return fp.read()
|
||||
|
||||
def set(self, key, data, valid):
|
||||
if not get_setting("enable_cache"):
|
||||
return False
|
||||
cache_path = self.get_cache_path(key)
|
||||
if isfile(cache_path):
|
||||
self.delete(key)
|
||||
if not data:
|
||||
return False
|
||||
if not isdir(self.cache_dir):
|
||||
os.makedirs(self.cache_dir)
|
||||
tdmap = {"s": 1, "m": 60, "h": 3600, "d": 86400}
|
||||
assert valid.endswith(tuple(tdmap.keys()))
|
||||
expire_time = int(time() + tdmap[valid[-1]] * int(valid[:-1]))
|
||||
|
||||
if not self._lock_dbindex():
|
||||
return False
|
||||
|
||||
if not isdir(dirname(cache_path)):
|
||||
os.makedirs(dirname(cache_path))
|
||||
try:
|
||||
with codecs.open(cache_path, "wb", encoding="utf8") as fp:
|
||||
fp.write(data)
|
||||
with open(self._db_path, "a") as fp:
|
||||
fp.write("%s=%s\n" % (str(expire_time), cache_path))
|
||||
except UnicodeError:
|
||||
if isfile(cache_path):
|
||||
try:
|
||||
remove(cache_path)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
return self._unlock_dbindex()
|
||||
|
||||
def delete(self, keys=None):
|
||||
""" Keys=None, delete expired items """
|
||||
if not isfile(self._db_path):
|
||||
return None
|
||||
if not keys:
|
||||
keys = []
|
||||
if not isinstance(keys, list):
|
||||
keys = [keys]
|
||||
paths_for_delete = [self.get_cache_path(k) for k in keys]
|
||||
found = False
|
||||
newlines = []
|
||||
with open(self._db_path) as fp:
|
||||
for line in fp.readlines():
|
||||
line = line.strip()
|
||||
if "=" not in line:
|
||||
continue
|
||||
expire, path = line.split("=")
|
||||
if time() < int(expire) and isfile(path) and \
|
||||
path not in paths_for_delete:
|
||||
newlines.append(line)
|
||||
continue
|
||||
found = True
|
||||
if isfile(path):
|
||||
try:
|
||||
remove(path)
|
||||
if not listdir(dirname(path)):
|
||||
util.rmtree_(dirname(path))
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
if found and self._lock_dbindex():
|
||||
with open(self._db_path, "w") as fp:
|
||||
fp.write("\n".join(newlines) + "\n")
|
||||
self._unlock_dbindex()
|
||||
|
||||
return True
|
||||
|
||||
def clean(self):
|
||||
if not self.cache_dir or not isdir(self.cache_dir):
|
||||
return
|
||||
util.rmtree_(self.cache_dir)
|
||||
|
||||
|
||||
def clean_cache():
|
||||
with ContentCache() as cc:
|
||||
cc.clean()
|
||||
def __contains__(self, item):
|
||||
return item in self._storage
|
||||
|
||||
|
||||
def sanitize_setting(name, value):
|
||||
@ -268,56 +172,58 @@ def sanitize_setting(name, value):
|
||||
defdata = DEFAULT_SETTINGS[name]
|
||||
try:
|
||||
if "validator" in defdata:
|
||||
value = defdata['validator'](value)
|
||||
elif isinstance(defdata['value'], bool):
|
||||
value = defdata["validator"](value)
|
||||
elif isinstance(defdata["value"], bool):
|
||||
if not isinstance(value, bool):
|
||||
value = str(value).lower() in ("true", "yes", "y", "1")
|
||||
elif isinstance(defdata['value'], int):
|
||||
elif isinstance(defdata["value"], int):
|
||||
value = int(value)
|
||||
except Exception:
|
||||
raise exception.InvalidSettingValue(value, name)
|
||||
except Exception as exc:
|
||||
raise exception.InvalidSettingValue(value, name) from exc
|
||||
return value
|
||||
|
||||
|
||||
def get_state_item(name, default=None):
|
||||
with State() as data:
|
||||
return data.get(name, default)
|
||||
with State() as state:
|
||||
return state.get(name, default)
|
||||
|
||||
|
||||
def set_state_item(name, value):
|
||||
with State(lock=True) as data:
|
||||
data[name] = value
|
||||
with State(lock=True) as state:
|
||||
state[name] = value
|
||||
state.modified = True
|
||||
|
||||
|
||||
def delete_state_item(name):
|
||||
with State(lock=True) as data:
|
||||
if name in data:
|
||||
del data[name]
|
||||
with State(lock=True) as state:
|
||||
if name in state:
|
||||
del state[name]
|
||||
|
||||
|
||||
def get_setting(name):
|
||||
_env_name = "PLATFORMIO_SETTING_%s" % name.upper()
|
||||
if _env_name in environ:
|
||||
return sanitize_setting(name, getenv(_env_name))
|
||||
if _env_name in os.environ:
|
||||
return sanitize_setting(name, os.getenv(_env_name))
|
||||
|
||||
with State() as data:
|
||||
if "settings" in data and name in data['settings']:
|
||||
return data['settings'][name]
|
||||
with State() as state:
|
||||
if "settings" in state and name in state["settings"]:
|
||||
return state["settings"][name]
|
||||
|
||||
return DEFAULT_SETTINGS[name]['value']
|
||||
return DEFAULT_SETTINGS[name]["value"]
|
||||
|
||||
|
||||
def set_setting(name, value):
|
||||
with State(lock=True) as data:
|
||||
if "settings" not in data:
|
||||
data['settings'] = {}
|
||||
data['settings'][name] = sanitize_setting(name, value)
|
||||
with State(lock=True) as state:
|
||||
if "settings" not in state:
|
||||
state["settings"] = {}
|
||||
state["settings"][name] = sanitize_setting(name, value)
|
||||
state.modified = True
|
||||
|
||||
|
||||
def reset_settings():
|
||||
with State(lock=True) as data:
|
||||
if "settings" in data:
|
||||
del data['settings']
|
||||
with State(lock=True) as state:
|
||||
if "settings" in state:
|
||||
del state["settings"]
|
||||
|
||||
|
||||
def get_session_var(name, default=None):
|
||||
@ -330,30 +236,58 @@ def set_session_var(name, value):
|
||||
|
||||
|
||||
def is_disabled_progressbar():
|
||||
return any([
|
||||
get_session_var("force_option"),
|
||||
util.is_ci(),
|
||||
getenv("PLATFORMIO_DISABLE_PROGRESSBAR") == "true"
|
||||
])
|
||||
return os.getenv("PLATFORMIO_DISABLE_PROGRESSBAR") == "true"
|
||||
|
||||
|
||||
def get_cid():
|
||||
cid = get_state_item("cid")
|
||||
if not cid:
|
||||
_uid = None
|
||||
if getenv("C9_UID"):
|
||||
_uid = getenv("C9_UID")
|
||||
elif getenv("CHE_API", getenv("CHE_API_ENDPOINT")):
|
||||
try:
|
||||
_uid = requests.get("{api}/user?token={token}".format(
|
||||
api=getenv("CHE_API", getenv("CHE_API_ENDPOINT")),
|
||||
token=getenv("USER_TOKEN"))).json().get("id")
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
cid = str(
|
||||
uuid.UUID(
|
||||
bytes=hashlib.md5(str(
|
||||
_uid if _uid else uuid.getnode())).digest()))
|
||||
if "windows" in util.get_systype() or os.getuid() > 0:
|
||||
set_state_item("cid", cid)
|
||||
if cid:
|
||||
return cid
|
||||
uid = None
|
||||
if os.getenv("GITHUB_USER"):
|
||||
uid = os.getenv("GITHUB_USER")
|
||||
elif os.getenv("GITPOD_GIT_USER_NAME"):
|
||||
uid = os.getenv("GITPOD_GIT_USER_NAME")
|
||||
if not uid:
|
||||
uid = uuid.getnode()
|
||||
cid = uuid.UUID(bytes=hashlib.md5(hashlib_encode_data(uid)).digest())
|
||||
cid = str(cid)
|
||||
if IS_WINDOWS or os.getuid() > 0: # pylint: disable=no-member
|
||||
set_state_item("cid", cid)
|
||||
set_state_item("created_at", int(time.time()))
|
||||
return cid
|
||||
|
||||
|
||||
def get_project_id(project_dir):
|
||||
return hashlib.sha1(hashlib_encode_data(project_dir)).hexdigest()
|
||||
|
||||
|
||||
def get_user_agent():
|
||||
data = [
|
||||
"PlatformIO/%s" % __version__,
|
||||
"CI/%d" % int(proc.is_ci()),
|
||||
"Container/%d" % int(proc.is_container()),
|
||||
]
|
||||
if get_session_var("caller_id"):
|
||||
data.append("Caller/%s" % get_session_var("caller_id"))
|
||||
if os.getenv("PLATFORMIO_IDE"):
|
||||
data.append("IDE/%s" % os.getenv("PLATFORMIO_IDE"))
|
||||
data.append("Python/%s" % platform.python_version())
|
||||
data.append("Platform/%s" % platform.platform())
|
||||
if not get_setting("enable_telemetry"):
|
||||
data.append("Telemetry/0")
|
||||
return " ".join(data)
|
||||
|
||||
|
||||
def get_host_id():
|
||||
h = hashlib.sha1(hashlib_encode_data(get_cid()))
|
||||
try:
|
||||
username = getpass.getuser()
|
||||
h.update(hashlib_encode_data(username))
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
return h.hexdigest()
|
||||
|
||||
|
||||
def get_host_name():
|
||||
return str(socket.gethostname())[:255]
|
||||
|
487
platformio/assets/schema/library.json
Normal file
487
platformio/assets/schema/library.json
Normal file
@ -0,0 +1,487 @@
|
||||
{
|
||||
"$id": "https://example.com/library.json",
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"title": "library.json schema",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"maxLength": 50,
|
||||
"description": "A name of a library.\nMust be unique in the PlatformIO Registry\nShould be slug style for simplicity, consistency, and compatibility. Example: HelloWorld\nCan contain a-z, digits, and dashes (but not start/end with them)\nConsecutive dashes and [:;/,@<>] chars are not allowed.",
|
||||
"required": true
|
||||
},
|
||||
"version": {
|
||||
"type": "string",
|
||||
"maxLength": 20,
|
||||
"description": "A version of a current library source code. Can contain a-z, digits, dots or dash and should be Semantic Versioning compatible.",
|
||||
"required": true
|
||||
},
|
||||
"description": {
|
||||
"type": "string",
|
||||
"maxLength": 255,
|
||||
"description": "The field helps users to identify and search for your library with a brief description. Describe the hardware devices (sensors, boards and etc.) which are suitable with it.",
|
||||
"required": true
|
||||
},
|
||||
"keywords": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"maxLength": 255
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"maxLength": 255
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "Used for search by keyword. Helps to make your library easier to discover without people needing to know its name.\nThe keyword should be lowercased, can contain a-z, digits and dash (but not start/end with them). A list from the keywords can be specified with separator , or declared as Array.",
|
||||
"required": true
|
||||
},
|
||||
"homepage": {
|
||||
"type": "string",
|
||||
"maxLength": 255,
|
||||
"description": "Home page of a library (if is different from repository url).",
|
||||
"required": false
|
||||
},
|
||||
"repository": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": {
|
||||
"enum": [
|
||||
"git",
|
||||
"hg",
|
||||
"svn"
|
||||
],
|
||||
"description": "only “git”, “hg” or “svn” are supported"
|
||||
},
|
||||
"url": {
|
||||
"type": "string"
|
||||
},
|
||||
"branch": {
|
||||
"type": "string",
|
||||
"description": "if is not specified, default branch will be used. This field will be ignored if tag/release exists with the value of version."
|
||||
}
|
||||
},
|
||||
"description": "The repository in which the source code can be found.",
|
||||
"required": false
|
||||
},
|
||||
"authors": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"required": true,
|
||||
"description": "Full name"
|
||||
},
|
||||
"email": {
|
||||
"type": "string"
|
||||
},
|
||||
"url": {
|
||||
"type": "string",
|
||||
"description": "An author’s contact page"
|
||||
},
|
||||
"maintainer": {
|
||||
"type": "boolean",
|
||||
"description": "Specify “maintainer” status"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"required": true,
|
||||
"description": "Full name"
|
||||
},
|
||||
"email": {
|
||||
"type": "string"
|
||||
},
|
||||
"url": {
|
||||
"type": "string",
|
||||
"description": "An author’s contact page"
|
||||
},
|
||||
"maintainer": {
|
||||
"type": "boolean",
|
||||
"description": "Specify “maintainer” status"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "An author contact information\nIf authors field is not defined, PlatformIO will try to fetch data from VCS provider (Github, Gitlab, etc) if repository is declared.",
|
||||
"required": false
|
||||
},
|
||||
"license": {
|
||||
"type": "string",
|
||||
"description": "A SPDX license ID or SPDX Expression. You can check the full list of SPDX license IDs (see “Identifier” column).",
|
||||
"required": false
|
||||
},
|
||||
"frameworks": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "espidf, freertos, *, etc'"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"description": "espidf, freertos, *, etc'"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "A list with compatible frameworks. The available framework names are defined in the Frameworks section.\nIf the library is compatible with the all frameworks, then do not declare this field or you use *",
|
||||
"required": false
|
||||
},
|
||||
"platforms": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "atmelavr, espressif8266, *, etc'"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"description": "atmelavr, espressif8266, *, etc'"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "A list with compatible development platforms. The available platform name are defined in Development Platforms section.\nIf the library is compatible with the all platforms, then do not declare this field or use *.\nPlatformIO does not check platforms for compatibility in default mode. See Compatibility Mode for details. If you need a strict checking for compatible platforms for a library, please set libCompatMode to strict.",
|
||||
"required": false
|
||||
},
|
||||
"headers": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "MyLibrary.h"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"description": "FooCore.h, FooFeature.h"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "A list of header files that can be included in a project source files using #include <...> directive.",
|
||||
"required": false
|
||||
},
|
||||
"examples": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"base": {
|
||||
"type": "string"
|
||||
},
|
||||
"files": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"description": "A list of example patterns.",
|
||||
"required": "false"
|
||||
},
|
||||
"dependencies": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"owner": {
|
||||
"type": "string",
|
||||
"description": "an owner name (username) from the PlatformIO Registry"
|
||||
},
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "library name"
|
||||
},
|
||||
"version": {
|
||||
"type": "string",
|
||||
"description": "Version Requirements or Package Specifications"
|
||||
},
|
||||
"frameworks": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "project compatible Frameworks"
|
||||
},
|
||||
"platforms": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": " project compatible Development Platforms"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"owner": {
|
||||
"type": "string",
|
||||
"description": "an owner name (username) from the PlatformIO Registry"
|
||||
},
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "library name"
|
||||
},
|
||||
"version": {
|
||||
"type": "string",
|
||||
"description": "Version Requirements or Package Specifications"
|
||||
},
|
||||
"frameworks": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "project compatible Frameworks"
|
||||
},
|
||||
"platforms": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": " project compatible Development Platforms"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "A list of dependent libraries that will be automatically installed.",
|
||||
"required": false
|
||||
},
|
||||
"export": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"include": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": "Export only files that matched declared patterns.\n* - matches everything\n? - matches any single character\n[seq] - matches any character in seq\n[!seq] - matches any character not in seq"
|
||||
},
|
||||
"exclude": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": "Exclude the directories and files which match with exclude patterns."
|
||||
}
|
||||
},
|
||||
"description": "This option is useful if you need to exclude extra data (test code, docs, images, PDFs, etc). It allows one to reduce the size of the final archive.\nTo check which files will be included in the final packages, please use pio pkg pack command.",
|
||||
"required": false
|
||||
},
|
||||
"scripts": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"postinstall": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "runs a script AFTER the package has been installed.\nRun a custom Python script located in the package “scripts” folder AFTER the package is installed. Please note that you don’t need to specify a Python interpreter for Python scripts"
|
||||
},
|
||||
"preuninstall": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "runs a script BEFORE the package is removed.\nRun a custom Bash script BEFORE the package is uninstalled. The script is declared as a list of command arguments and is located at the root of a package"
|
||||
}
|
||||
},
|
||||
"description": "Execute custom scripts during the special Package Management CLI life cycle events",
|
||||
"required": false
|
||||
},
|
||||
"build": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"flags": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "Extra flags to control preprocessing, compilation, assembly, and linking processes. More details build_flags.\nKeep in mind when operating with the -I flag (directories to be searched for header files). The path should be relative to the root directory where the library.json manifest is located."
|
||||
},
|
||||
"unflags": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "Remove base/initial flags which were set by development platform. More details build_unflags."
|
||||
},
|
||||
"includeDir": {
|
||||
"type": "string",
|
||||
"description": "Custom directory to be searched for header files. A default value is include and means that folder is located at the root of a library.\nThe Library Dependency Finder (LDF) will pick a library automatically only when a project or other dependent libraries include any header file located in includeDir or srcDir.",
|
||||
"required": false
|
||||
},
|
||||
"srcDir": {
|
||||
"type": "string",
|
||||
"description": "Custom location of library source code. A default value is src and means that folder is located in the root of a library.",
|
||||
"required": "false"
|
||||
},
|
||||
"srcFilter": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"description": "Specify which source files should be included/excluded from build process. The path in filter should be relative to the srcDir option of a library.\nSee syntax for build_src_filter.\nPlease note that you can generate source filter “on-the-fly” using extraScript",
|
||||
"required": false
|
||||
},
|
||||
"extraScript": {
|
||||
"type": "string",
|
||||
"description": "Launch extra script before a build process.",
|
||||
"required": "false"
|
||||
},
|
||||
"libArchive": {
|
||||
"type": "boolean",
|
||||
"description": "Create an archive (*.a, static library) from the object files and link it into a firmware (program). This is default behavior of PlatformIO Build System (\"libArchive\": true).\nSetting \"libArchive\": false will instruct PlatformIO Build System to link object files directly (in-line). This could be useful if you need to override weak symbols defined in framework or other libraries.\nYou can disable library archiving globally using lib_archive option in “platformio.ini” (Project Configuration File).",
|
||||
"required": "false"
|
||||
},
|
||||
"libLDFMode": {
|
||||
"anyOf": [
|
||||
{
|
||||
"enum": [
|
||||
"off"
|
||||
],
|
||||
"description": "“Manual mode”, does not process source files of a project and dependencies. Builds only the libraries that are specified in manifests (library.json, module.json) or using lib_deps option."
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"chain"
|
||||
],
|
||||
"description": "[DEFAULT] Parses ALL C/C++ source files of the project and follows only by nested includes (#include ..., chain...) from the libraries. It also parses C, CC, CPP files from libraries which have the same name as included header file. Does not evaluate C/C++ Preprocessor conditional syntax."
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"deep"
|
||||
],
|
||||
"description": "Parses ALL C/C++ source files of the project and parses ALL C/C++ source files of the each found dependency (recursively). Does not evaluate C/C++ Preprocessor conditional syntax."
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"chain+"
|
||||
],
|
||||
"description": "The same behavior as for the chain but evaluates C/C++ Preprocessor conditional syntax."
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"deep+"
|
||||
],
|
||||
"description": "The same behavior as for the deep but evaluates C/C++ Preprocessor conditional syntax."
|
||||
}
|
||||
],
|
||||
"description": "Specify Library Dependency Finder Mode. See Dependency Finder Mode for details.",
|
||||
"required": false
|
||||
},
|
||||
"libCompatMode": {
|
||||
"type": "string",
|
||||
"description": "Specify Library Compatibility Mode. See Compatibility Mode for details.",
|
||||
"required": false
|
||||
},
|
||||
"builder": {
|
||||
"anyOf": [
|
||||
{
|
||||
"enum": [
|
||||
"PlatformIOLibBuilder"
|
||||
],
|
||||
"description": "Default Builder"
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"ArduinoLibBuilder"
|
||||
]
|
||||
},
|
||||
{
|
||||
"enum": [
|
||||
"MbedLibBuilder"
|
||||
]
|
||||
}
|
||||
],
|
||||
"description": "Override default PlatformIOLibBuilder with another builder.",
|
||||
"required": false
|
||||
}
|
||||
},
|
||||
"required": false
|
||||
}
|
||||
}
|
||||
}
|
183
platformio/assets/system/99-platformio-udev.rules
Normal file
183
platformio/assets/system/99-platformio-udev.rules
Normal file
@ -0,0 +1,183 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
#####################################################################################
|
||||
#
|
||||
# INSTALLATION
|
||||
#
|
||||
# Please visit > https://docs.platformio.org/en/latest/core/installation/udev-rules.html
|
||||
#
|
||||
#####################################################################################
|
||||
|
||||
#
|
||||
# Boards
|
||||
#
|
||||
|
||||
# CP210X USB UART
|
||||
ATTRS{idVendor}=="10c4", ATTRS{idProduct}=="ea[67][013]", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
ATTRS{idVendor}=="10c4", ATTRS{idProduct}=="80a9", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# FT231XS USB UART
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="6015", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Prolific Technology, Inc. PL2303 Serial Port
|
||||
ATTRS{idVendor}=="067b", ATTRS{idProduct}=="2303", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# QinHeng Electronics HL-340 USB-Serial adapter
|
||||
ATTRS{idVendor}=="1a86", ATTRS{idProduct}=="7523", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
# QinHeng Electronics CH343 USB-Serial adapter
|
||||
ATTRS{idVendor}=="1a86", ATTRS{idProduct}=="55d3", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
# QinHeng Electronics CH9102 USB-Serial adapter
|
||||
ATTRS{idVendor}=="1a86", ATTRS{idProduct}=="55d4", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Arduino boards
|
||||
ATTRS{idVendor}=="2341", ATTRS{idProduct}=="[08][023]*", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
ATTRS{idVendor}=="2a03", ATTRS{idProduct}=="[08][02]*", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Arduino SAM-BA
|
||||
ATTRS{idVendor}=="03eb", ATTRS{idProduct}=="6124", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{MTP_NO_PROBE}="1"
|
||||
|
||||
# Digistump boards
|
||||
ATTRS{idVendor}=="16d0", ATTRS{idProduct}=="0753", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Maple with DFU
|
||||
ATTRS{idVendor}=="1eaf", ATTRS{idProduct}=="000[34]", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# USBtiny
|
||||
ATTRS{idProduct}=="0c9f", ATTRS{idVendor}=="1781", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# USBasp V2.0
|
||||
ATTRS{idVendor}=="16c0", ATTRS{idProduct}=="05dc", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Teensy boards
|
||||
ATTRS{idVendor}=="16c0", ATTRS{idProduct}=="04[789B]?", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
ATTRS{idVendor}=="16c0", ATTRS{idProduct}=="04[789A]?", ENV{MTP_NO_PROBE}="1"
|
||||
SUBSYSTEMS=="usb", ATTRS{idVendor}=="16c0", ATTRS{idProduct}=="04[789ABCD]?", MODE:="0666"
|
||||
KERNEL=="ttyACM*", ATTRS{idVendor}=="16c0", ATTRS{idProduct}=="04[789B]?", MODE:="0666"
|
||||
|
||||
# TI Stellaris Launchpad
|
||||
ATTRS{idVendor}=="1cbe", ATTRS{idProduct}=="00fd", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# TI MSP430 Launchpad
|
||||
ATTRS{idVendor}=="0451", ATTRS{idProduct}=="f432", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# GD32V DFU Bootloader
|
||||
ATTRS{idVendor}=="28e9", ATTRS{idProduct}=="0189", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# FireBeetle-ESP32
|
||||
ATTRS{idVendor}=="1a86", ATTRS{idProduct}=="7522", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Wio Terminal
|
||||
ATTRS{idVendor}=="2886", ATTRS{idProduct}=="[08]02d", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Raspberry Pi Pico
|
||||
ATTRS{idVendor}=="2e8a", ATTRS{idProduct}=="[01]*", MODE:="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# AIR32F103
|
||||
ATTRS{idVendor}=="0d28", ATTRS{idProduct}=="0204", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# STM32 virtual COM port
|
||||
ATTRS{idVendor}=="0483", ATTRS{idProduct}=="5740", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
#
|
||||
# Debuggers
|
||||
#
|
||||
|
||||
# Black Magic Probe
|
||||
SUBSYSTEM=="tty", ATTRS{interface}=="Black Magic GDB Server", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
SUBSYSTEM=="tty", ATTRS{interface}=="Black Magic UART Port", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# opendous and estick
|
||||
ATTRS{idVendor}=="03eb", ATTRS{idProduct}=="204f", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Original FT232/FT245/FT2232/FT232H/FT4232
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="60[01][104]", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# DISTORTEC JTAG-lock-pick Tiny 2
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="8220", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# TUMPA, TUMPA Lite
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="8a9[89]", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# XDS100v2
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="a6d0", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Xverve Signalyzer Tool (DT-USB-ST), Signalyzer LITE (DT-USB-SLITE)
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="bca[01]", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# TI/Luminary Stellaris Evaluation Board FTDI (several)
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="bcd[9a]", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# egnite Turtelizer 2
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="bdc8", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Section5 ICEbear
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="c14[01]", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Amontec JTAGkey and JTAGkey-tiny
|
||||
ATTRS{idVendor}=="0403", ATTRS{idProduct}=="cff8", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# TI ICDI
|
||||
ATTRS{idVendor}=="0451", ATTRS{idProduct}=="c32a", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# STLink probes
|
||||
ATTRS{idVendor}=="0483", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Hilscher NXHX Boards
|
||||
ATTRS{idVendor}=="0640", ATTRS{idProduct}=="0028", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Hitex probes
|
||||
ATTRS{idVendor}=="0640", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Altera USB Blaster
|
||||
ATTRS{idVendor}=="09fb", ATTRS{idProduct}=="6001", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Amontec JTAGkey-HiSpeed
|
||||
ATTRS{idVendor}=="0fbb", ATTRS{idProduct}=="1000", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# SEGGER J-Link
|
||||
ATTRS{idVendor}=="1366", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Raisonance RLink
|
||||
ATTRS{idVendor}=="138e", ATTRS{idProduct}=="9000", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Debug Board for Neo1973
|
||||
ATTRS{idVendor}=="1457", ATTRS{idProduct}=="5118", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Olimex probes
|
||||
ATTRS{idVendor}=="15ba", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# USBprog with OpenOCD firmware
|
||||
ATTRS{idVendor}=="1781", ATTRS{idProduct}=="0c63", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# TI/Luminary Stellaris In-Circuit Debug Interface (ICDI) Board
|
||||
ATTRS{idVendor}=="1cbe", ATTRS{idProduct}=="00fd", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Marvell Sheevaplug
|
||||
ATTRS{idVendor}=="9e88", ATTRS{idProduct}=="9e8f", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Keil Software, Inc. ULink
|
||||
ATTRS{idVendor}=="c251", ATTRS{idProduct}=="2710", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# CMSIS-DAP compatible adapters
|
||||
ATTRS{product}=="*CMSIS-DAP*", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Atmel AVR Dragon
|
||||
ATTRS{idVendor}=="03eb", ATTRS{idProduct}=="2107", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Espressif USB JTAG/serial debug unit
|
||||
ATTRS{idVendor}=="303a", ATTRS{idProduct}=="1001", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
||||
|
||||
# Zephyr framework USB CDC-ACM
|
||||
ATTRS{idVendor}=="2fe3", ATTRS{idProduct}=="0100", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1", ENV{ID_MM_PORT_IGNORE}="1"
|
@ -12,204 +12,239 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import base64
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from os import environ
|
||||
from os.path import expanduser, join
|
||||
from time import time
|
||||
|
||||
from SCons.Script import (ARGUMENTS, COMMAND_LINE_TARGETS, DEFAULT_TARGETS,
|
||||
AllowSubstExceptions, AlwaysBuild, Default,
|
||||
DefaultEnvironment, Variables)
|
||||
import click
|
||||
from SCons.Script import ARGUMENTS # pylint: disable=import-error
|
||||
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
|
||||
from SCons.Script import DEFAULT_TARGETS # pylint: disable=import-error
|
||||
from SCons.Script import AllowSubstExceptions # pylint: disable=import-error
|
||||
from SCons.Script import AlwaysBuild # pylint: disable=import-error
|
||||
from SCons.Script import Default # pylint: disable=import-error
|
||||
from SCons.Script import DefaultEnvironment # pylint: disable=import-error
|
||||
from SCons.Script import Import # pylint: disable=import-error
|
||||
from SCons.Script import Variables # pylint: disable=import-error
|
||||
|
||||
from platformio import util
|
||||
from platformio import app, fs
|
||||
from platformio.platform.base import PlatformBase
|
||||
from platformio.proc import get_pythonexe_path
|
||||
from platformio.project.helpers import get_project_dir
|
||||
|
||||
AllowSubstExceptions(NameError)
|
||||
|
||||
# allow common variables from INI file
|
||||
commonvars = Variables(None)
|
||||
commonvars.AddVariables(
|
||||
("PLATFORM_MANIFEST",),
|
||||
# append CLI arguments to build environment
|
||||
clivars = Variables(None)
|
||||
clivars.AddVariables(
|
||||
("BUILD_SCRIPT",),
|
||||
("EXTRA_SCRIPTS",),
|
||||
("PROJECT_CONFIG",),
|
||||
("PIOENV",),
|
||||
("PIOTEST",),
|
||||
("PIOPLATFORM",),
|
||||
("PIOFRAMEWORK",),
|
||||
|
||||
# build options
|
||||
("BUILD_FLAGS",),
|
||||
("SRC_BUILD_FLAGS",),
|
||||
("BUILD_UNFLAGS",),
|
||||
("SRC_FILTER",),
|
||||
|
||||
# library options
|
||||
("LIB_LDF_MODE",),
|
||||
("LIB_COMPAT_MODE",),
|
||||
("LIB_DEPS",),
|
||||
("LIB_IGNORE",),
|
||||
("LIB_EXTRA_DIRS",),
|
||||
("LIB_ARCHIVE",),
|
||||
|
||||
# board options
|
||||
("BOARD",),
|
||||
# deprecated options, use board_{object.path} instead
|
||||
("BOARD_MCU",),
|
||||
("BOARD_F_CPU",),
|
||||
("BOARD_F_FLASH",),
|
||||
("BOARD_FLASH_MODE",),
|
||||
# end of deprecated options
|
||||
|
||||
# upload options
|
||||
("PIOTEST_RUNNING_NAME",),
|
||||
("UPLOAD_PORT",),
|
||||
("UPLOAD_PROTOCOL",),
|
||||
("UPLOAD_SPEED",),
|
||||
("UPLOAD_FLAGS",),
|
||||
("UPLOAD_RESETMETHOD",),
|
||||
|
||||
# test options
|
||||
("TEST_BUILD_PROJECT_SRC",),
|
||||
|
||||
# debug options
|
||||
("DEBUG_TOOL",),
|
||||
("DEBUG_SVD_PATH",),
|
||||
|
||||
) # yapf: disable
|
||||
|
||||
MULTILINE_VARS = [
|
||||
"EXTRA_SCRIPTS", "PIOFRAMEWORK", "BUILD_FLAGS", "SRC_BUILD_FLAGS",
|
||||
"BUILD_UNFLAGS", "UPLOAD_FLAGS", "SRC_FILTER", "LIB_DEPS", "LIB_IGNORE",
|
||||
"LIB_EXTRA_DIRS"
|
||||
]
|
||||
("PROGRAM_ARGS",),
|
||||
)
|
||||
|
||||
DEFAULT_ENV_OPTIONS = dict(
|
||||
tools=[
|
||||
"ar", "gas", "gcc", "g++", "gnulink", "platformio", "pioplatform",
|
||||
"piowinhooks", "piolib", "pioupload", "piomisc", "pioide"
|
||||
], # yapf: disable
|
||||
toolpath=[join(util.get_source_dir(), "builder", "tools")],
|
||||
variables=commonvars,
|
||||
|
||||
# Propagating External Environment
|
||||
PIOVARIABLES=commonvars.keys(),
|
||||
ENV=environ,
|
||||
UNIX_TIME=int(time()),
|
||||
PIOHOME_DIR=util.get_home_dir(),
|
||||
PROJECT_DIR=util.get_project_dir(),
|
||||
PROJECTINCLUDE_DIR=util.get_projectinclude_dir(),
|
||||
PROJECTSRC_DIR=util.get_projectsrc_dir(),
|
||||
PROJECTTEST_DIR=util.get_projecttest_dir(),
|
||||
PROJECTDATA_DIR=util.get_projectdata_dir(),
|
||||
PROJECTBUILD_DIR=util.get_projectbuild_dir(),
|
||||
BUILD_DIR=join("$PROJECTBUILD_DIR", "$PIOENV"),
|
||||
BUILDSRC_DIR=join("$BUILD_DIR", "src"),
|
||||
BUILDTEST_DIR=join("$BUILD_DIR", "test"),
|
||||
LIBPATH=["$BUILD_DIR"],
|
||||
LIBSOURCE_DIRS=[
|
||||
util.get_projectlib_dir(),
|
||||
util.get_projectlibdeps_dir(),
|
||||
join("$PIOHOME_DIR", "lib")
|
||||
"ar",
|
||||
"cc",
|
||||
"c++",
|
||||
"link",
|
||||
"piohooks",
|
||||
"pioasm",
|
||||
"piobuild",
|
||||
"pioproject",
|
||||
"pioplatform",
|
||||
"piotest",
|
||||
"piotarget",
|
||||
"piolib",
|
||||
"pioupload",
|
||||
"piosize",
|
||||
"pioino",
|
||||
"piomisc",
|
||||
"piointegration",
|
||||
"piomaxlen",
|
||||
],
|
||||
toolpath=[os.path.join(fs.get_source_dir(), "builder", "tools")],
|
||||
variables=clivars,
|
||||
# Propagating External Environment
|
||||
ENV=os.environ,
|
||||
UNIX_TIME=int(time()),
|
||||
BUILD_DIR=os.path.join("$PROJECT_BUILD_DIR", "$PIOENV"),
|
||||
BUILD_SRC_DIR=os.path.join("$BUILD_DIR", "src"),
|
||||
BUILD_TEST_DIR=os.path.join("$BUILD_DIR", "test"),
|
||||
COMPILATIONDB_PATH=os.path.join("$PROJECT_DIR", "compile_commands.json"),
|
||||
LIBPATH=["$BUILD_DIR"],
|
||||
PROGNAME="program",
|
||||
PROG_PATH=join("$BUILD_DIR", "$PROGNAME$PROGSUFFIX"),
|
||||
PYTHONEXE=util.get_pythonexe_path())
|
||||
PROGPATH=os.path.join("$BUILD_DIR", "$PROGNAME$PROGSUFFIX"),
|
||||
PROG_PATH="$PROGPATH", # deprecated
|
||||
PYTHONEXE=get_pythonexe_path(),
|
||||
)
|
||||
|
||||
# Declare command verbose messages
|
||||
command_strings = dict(
|
||||
ARCOM="Archiving",
|
||||
LINKCOM="Linking",
|
||||
RANLIBCOM="Indexing",
|
||||
ASCOM="Compiling",
|
||||
ASPPCOM="Compiling",
|
||||
CCCOM="Compiling",
|
||||
CXXCOM="Compiling",
|
||||
)
|
||||
if not int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
DEFAULT_ENV_OPTIONS['ARCOMSTR'] = "Archiving $TARGET"
|
||||
DEFAULT_ENV_OPTIONS['LINKCOMSTR'] = "Linking $TARGET"
|
||||
DEFAULT_ENV_OPTIONS['RANLIBCOMSTR'] = "Indexing $TARGET"
|
||||
for k in ("ASCOMSTR", "ASPPCOMSTR", "CCCOMSTR", "CXXCOMSTR"):
|
||||
DEFAULT_ENV_OPTIONS[k] = "Compiling $TARGET"
|
||||
for name, value in command_strings.items():
|
||||
DEFAULT_ENV_OPTIONS["%sSTR" % name] = "%s $TARGET" % (value)
|
||||
|
||||
env = DefaultEnvironment(**DEFAULT_ENV_OPTIONS)
|
||||
env.SConscriptChdir(False)
|
||||
|
||||
# decode common variables
|
||||
for k in commonvars.keys():
|
||||
if k in env:
|
||||
env[k] = base64.b64decode(env[k])
|
||||
if k in MULTILINE_VARS:
|
||||
env[k] = util.parse_conf_multi_values(env[k])
|
||||
# Load variables from CLI
|
||||
env.Replace(
|
||||
**{
|
||||
key: PlatformBase.decode_scons_arg(env[key])
|
||||
for key in list(clivars.keys())
|
||||
if key in env
|
||||
}
|
||||
)
|
||||
|
||||
if env.GetOption('clean'):
|
||||
env.PioClean(env.subst("$BUILD_DIR"))
|
||||
# Setup project optional directories
|
||||
config = env.GetProjectConfig()
|
||||
app.set_session_var("custom_project_conf", config.path)
|
||||
|
||||
env.Replace(
|
||||
PROJECT_DIR=get_project_dir(),
|
||||
PROJECT_CORE_DIR=config.get("platformio", "core_dir"),
|
||||
PROJECT_PACKAGES_DIR=config.get("platformio", "packages_dir"),
|
||||
PROJECT_WORKSPACE_DIR=config.get("platformio", "workspace_dir"),
|
||||
PROJECT_LIBDEPS_DIR=config.get("platformio", "libdeps_dir"),
|
||||
PROJECT_INCLUDE_DIR=config.get("platformio", "include_dir"),
|
||||
PROJECT_SRC_DIR=config.get("platformio", "src_dir"),
|
||||
PROJECTSRC_DIR="$PROJECT_SRC_DIR", # legacy for dev/platform
|
||||
PROJECT_TEST_DIR=config.get("platformio", "test_dir"),
|
||||
PROJECT_DATA_DIR=config.get("platformio", "data_dir"),
|
||||
PROJECTDATA_DIR="$PROJECT_DATA_DIR", # legacy for dev/platform
|
||||
PROJECT_BUILD_DIR=config.get("platformio", "build_dir"),
|
||||
BUILD_TYPE=env.GetBuildType(),
|
||||
BUILD_CACHE_DIR=config.get("platformio", "build_cache_dir"),
|
||||
LIBSOURCE_DIRS=[
|
||||
config.get("platformio", "lib_dir"),
|
||||
os.path.join("$PROJECT_LIBDEPS_DIR", "$PIOENV"),
|
||||
config.get("platformio", "globallib_dir"),
|
||||
],
|
||||
)
|
||||
|
||||
if int(ARGUMENTS.get("ISATTY", 0)):
|
||||
# pylint: disable=protected-access
|
||||
click._compat.isatty = lambda stream: True
|
||||
|
||||
if env.subst("$BUILD_CACHE_DIR"):
|
||||
if not os.path.isdir(env.subst("$BUILD_CACHE_DIR")):
|
||||
os.makedirs(env.subst("$BUILD_CACHE_DIR"))
|
||||
env.CacheDir("$BUILD_CACHE_DIR")
|
||||
|
||||
if not int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
click.echo("Verbose mode can be enabled via `-v, --verbose` option")
|
||||
|
||||
if not os.path.isdir(env.subst("$BUILD_DIR")):
|
||||
os.makedirs(env.subst("$BUILD_DIR"))
|
||||
|
||||
# Dynamically load dependent tools
|
||||
if "compiledb" in COMMAND_LINE_TARGETS:
|
||||
env.Tool("compilation_db")
|
||||
|
||||
env.LoadProjectOptions()
|
||||
env.LoadPioPlatform()
|
||||
|
||||
env.SConsignFile(
|
||||
os.path.join(
|
||||
"$BUILD_CACHE_DIR" if env.subst("$BUILD_CACHE_DIR") else "$BUILD_DIR",
|
||||
".sconsign%d%d" % (sys.version_info[0], sys.version_info[1]),
|
||||
)
|
||||
)
|
||||
|
||||
env.SConscript(env.GetExtraScripts("pre"), exports="env")
|
||||
|
||||
if env.IsCleanTarget():
|
||||
env.CleanProject(fullclean=int(ARGUMENTS.get("FULLCLEAN", 0)))
|
||||
env.Exit(0)
|
||||
elif not int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
print("Verbose mode can be enabled via `-v, --verbose` option")
|
||||
|
||||
# Handle custom variables from system environment
|
||||
for var in ("BUILD_FLAGS", "SRC_BUILD_FLAGS", "SRC_FILTER", "EXTRA_SCRIPTS",
|
||||
"UPLOAD_PORT", "UPLOAD_FLAGS", "LIB_EXTRA_DIRS"):
|
||||
k = "PLATFORMIO_%s" % var
|
||||
if k not in environ:
|
||||
continue
|
||||
if var in ("UPLOAD_PORT", ):
|
||||
env[var] = environ.get(k)
|
||||
continue
|
||||
env.Append(**{var: util.parse_conf_multi_values(environ.get(k))})
|
||||
|
||||
# Configure extra library source directories for LDF
|
||||
if util.get_project_optional_dir("lib_extra_dirs"):
|
||||
env.Prepend(
|
||||
LIBSOURCE_DIRS=util.parse_conf_multi_values(
|
||||
util.get_project_optional_dir("lib_extra_dirs")))
|
||||
env.Prepend(LIBSOURCE_DIRS=env.get("LIB_EXTRA_DIRS", []))
|
||||
env['LIBSOURCE_DIRS'] = [
|
||||
expanduser(d) if d.startswith("~") else d for d in env['LIBSOURCE_DIRS']
|
||||
]
|
||||
|
||||
env.LoadPioPlatform(commonvars)
|
||||
|
||||
env.SConscriptChdir(0)
|
||||
env.SConsignFile(join("$PROJECTBUILD_DIR", ".sconsign.dblite"))
|
||||
|
||||
for item in env.GetExtraScripts("pre"):
|
||||
env.SConscript(item, exports="env")
|
||||
|
||||
env.SConscript("$BUILD_SCRIPT")
|
||||
|
||||
if "UPLOAD_FLAGS" in env:
|
||||
env.Prepend(UPLOADERFLAGS=["$UPLOAD_FLAGS"])
|
||||
if env.GetProjectOption("upload_command"):
|
||||
env.Replace(UPLOADCMD=env.GetProjectOption("upload_command"))
|
||||
|
||||
for item in env.GetExtraScripts("post"):
|
||||
env.SConscript(item, exports="env")
|
||||
env.SConscript(env.GetExtraScripts("post"), exports="env")
|
||||
|
||||
##############################################################################
|
||||
|
||||
# Checking program size
|
||||
if env.get("SIZETOOL") and "nobuild" not in COMMAND_LINE_TARGETS:
|
||||
env.Depends(["upload", "program"], "checkprogsize")
|
||||
if env.get("SIZETOOL") and not (
|
||||
set(["nobuild", "sizedata"]) & set(COMMAND_LINE_TARGETS)
|
||||
):
|
||||
env.Depends("upload", "checkprogsize")
|
||||
# Replace platform's "size" target with our
|
||||
_new_targets = [t for t in DEFAULT_TARGETS if str(t) != "size"]
|
||||
Default(None)
|
||||
Default(_new_targets)
|
||||
Default("checkprogsize")
|
||||
|
||||
if "compiledb" in COMMAND_LINE_TARGETS:
|
||||
env.Alias("compiledb", env.CompilationDatabase("$COMPILATIONDB_PATH"))
|
||||
|
||||
# Print configured protocols
|
||||
env.AddPreAction(
|
||||
["upload", "program"],
|
||||
env.VerboseAction(lambda source, target, env: env.PrintUploadInfo(),
|
||||
"Configuring upload protocol..."))
|
||||
"upload",
|
||||
env.VerboseAction(
|
||||
lambda source, target, env: env.PrintUploadInfo(),
|
||||
"Configuring upload protocol...",
|
||||
),
|
||||
)
|
||||
|
||||
AlwaysBuild(env.Alias("debug", DEFAULT_TARGETS))
|
||||
AlwaysBuild(env.Alias("__debug", DEFAULT_TARGETS))
|
||||
AlwaysBuild(env.Alias("__test", DEFAULT_TARGETS))
|
||||
|
||||
env.ProcessDelayedActions()
|
||||
|
||||
##############################################################################
|
||||
|
||||
if "envdump" in COMMAND_LINE_TARGETS:
|
||||
print(env.Dump())
|
||||
click.echo(env.Dump())
|
||||
env.Exit(0)
|
||||
|
||||
if "idedata" in COMMAND_LINE_TARGETS:
|
||||
if env.IsIntegrationDump():
|
||||
projenv = None
|
||||
try:
|
||||
print("\n%s\n" % util.path_to_unicode(
|
||||
json.dumps(env.DumpIDEData(), ensure_ascii=False)))
|
||||
env.Exit(0)
|
||||
except UnicodeDecodeError:
|
||||
sys.stderr.write(
|
||||
"\nUnicodeDecodeError: Non-ASCII characters found in build "
|
||||
"environment\n"
|
||||
"See explanation in FAQ > Troubleshooting > Building\n"
|
||||
"https://docs.platformio.org/page/faq.html\n\n")
|
||||
env.Exit(1)
|
||||
Import("projenv")
|
||||
except: # pylint: disable=bare-except
|
||||
projenv = env
|
||||
data = projenv.DumpIntegrationData(env)
|
||||
# dump to file for the further reading by project.helpers.load_build_metadata
|
||||
with open(
|
||||
projenv.subst(os.path.join("$BUILD_DIR", "idedata.json")),
|
||||
mode="w",
|
||||
encoding="utf8",
|
||||
) as fp:
|
||||
json.dump(data, fp)
|
||||
click.echo("\n%s\n" % json.dumps(data)) # pylint: disable=undefined-variable
|
||||
env.Exit(0)
|
||||
|
||||
if "sizedata" in COMMAND_LINE_TARGETS:
|
||||
AlwaysBuild(
|
||||
env.Alias(
|
||||
"sizedata",
|
||||
DEFAULT_TARGETS,
|
||||
env.VerboseAction(env.DumpSizeData, "Generating memory usage report..."),
|
||||
)
|
||||
)
|
||||
|
||||
Default("sizedata")
|
||||
|
||||
# issue #4604: process targets sequentially
|
||||
for index, target in enumerate(
|
||||
[t for t in COMMAND_LINE_TARGETS if not t.startswith("__")][1:]
|
||||
):
|
||||
env.Depends(target, COMMAND_LINE_TARGETS[index])
|
||||
|
29
platformio/builder/tools/pioasm.py
Normal file
29
platformio/builder/tools/pioasm.py
Normal file
@ -0,0 +1,29 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import SCons.Tool.asm # pylint: disable=import-error
|
||||
|
||||
#
|
||||
# Resolve https://github.com/platformio/platformio-core/issues/3917
|
||||
# Avoid forcing .S to bare assembly on Windows OS
|
||||
#
|
||||
|
||||
if ".S" in SCons.Tool.asm.ASSuffixes:
|
||||
SCons.Tool.asm.ASSuffixes.remove(".S")
|
||||
if ".S" not in SCons.Tool.asm.ASPPSuffixes:
|
||||
SCons.Tool.asm.ASPPSuffixes.append(".S")
|
||||
|
||||
|
||||
generate = SCons.Tool.asm.generate
|
||||
exists = SCons.Tool.asm.exists
|
402
platformio/builder/tools/piobuild.py
Normal file
402
platformio/builder/tools/piobuild.py
Normal file
@ -0,0 +1,402 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import fnmatch
|
||||
import os
|
||||
import sys
|
||||
|
||||
from SCons import Builder, Util # pylint: disable=import-error
|
||||
from SCons.Node import FS # pylint: disable=import-error
|
||||
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
|
||||
from SCons.Script import AlwaysBuild # pylint: disable=import-error
|
||||
from SCons.Script import DefaultEnvironment # pylint: disable=import-error
|
||||
from SCons.Script import SConscript # pylint: disable=import-error
|
||||
|
||||
from platformio import __version__, fs
|
||||
from platformio.compat import IS_MACOS, string_types
|
||||
from platformio.package.version import pepver_to_semver
|
||||
from platformio.proc import where_is_program
|
||||
|
||||
SRC_HEADER_EXT = ["h", "hpp"]
|
||||
SRC_ASM_EXT = ["S", "spp", "SPP", "sx", "s", "asm", "ASM"]
|
||||
SRC_C_EXT = ["c"]
|
||||
SRC_CXX_EXT = ["cc", "cpp", "cxx", "c++"]
|
||||
SRC_BUILD_EXT = SRC_C_EXT + SRC_CXX_EXT + SRC_ASM_EXT
|
||||
SRC_FILTER_DEFAULT = ["+<*>", "-<.git%s>" % os.sep, "-<.svn%s>" % os.sep]
|
||||
|
||||
|
||||
def scons_patched_match_splitext(path, suffixes=None):
|
||||
"""Patch SCons Builder, append $OBJSUFFIX to the end of each target"""
|
||||
tokens = Util.splitext(path)
|
||||
if suffixes and tokens[1] and tokens[1] in suffixes:
|
||||
return (path, tokens[1])
|
||||
return tokens
|
||||
|
||||
|
||||
def GetBuildType(env):
|
||||
modes = []
|
||||
if (
|
||||
set(["__debug", "sizedata"]) # sizedata = for memory inspection
|
||||
& set(COMMAND_LINE_TARGETS)
|
||||
or env.GetProjectOption("build_type") == "debug"
|
||||
):
|
||||
modes.append("debug")
|
||||
if "__test" in COMMAND_LINE_TARGETS or env.GetProjectOption("build_type") == "test":
|
||||
modes.append("test")
|
||||
return ", ".join(modes or ["release"])
|
||||
|
||||
|
||||
def BuildProgram(env):
|
||||
env.ProcessProgramDeps()
|
||||
env.ProcessCompileDbToolchainOption()
|
||||
env.ProcessProjectDeps()
|
||||
|
||||
# append into the beginning a main LD script
|
||||
if env.get("LDSCRIPT_PATH") and not any("-Wl,-T" in f for f in env["LINKFLAGS"]):
|
||||
env.Prepend(LINKFLAGS=["-T", env.subst("$LDSCRIPT_PATH")])
|
||||
|
||||
# enable "cyclic reference" for linker
|
||||
if (
|
||||
env.get("LIBS")
|
||||
and env.GetCompilerType() == "gcc"
|
||||
and (env.PioPlatform().is_embedded() or not IS_MACOS)
|
||||
):
|
||||
env.Prepend(_LIBFLAGS="-Wl,--start-group ")
|
||||
env.Append(_LIBFLAGS=" -Wl,--end-group")
|
||||
|
||||
program = env.Program(env.subst("$PROGPATH"), env["PIOBUILDFILES"])
|
||||
env.Replace(PIOMAINPROG=program)
|
||||
|
||||
AlwaysBuild(
|
||||
env.Alias(
|
||||
"checkprogsize",
|
||||
program,
|
||||
env.VerboseAction(env.CheckUploadSize, "Checking size $PIOMAINPROG"),
|
||||
)
|
||||
)
|
||||
|
||||
print("Building in %s mode" % env["BUILD_TYPE"])
|
||||
|
||||
return program
|
||||
|
||||
|
||||
def ProcessProgramDeps(env):
|
||||
def _append_pio_macros():
|
||||
core_version = pepver_to_semver(__version__)
|
||||
env.AppendUnique(
|
||||
CPPDEFINES=[
|
||||
(
|
||||
"PLATFORMIO",
|
||||
int(
|
||||
"{0:02d}{1:02d}{2:02d}".format(
|
||||
core_version.major, core_version.minor, core_version.patch
|
||||
)
|
||||
),
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
_append_pio_macros()
|
||||
|
||||
env.PrintConfiguration()
|
||||
|
||||
# process extra flags from board
|
||||
if "BOARD" in env and "build.extra_flags" in env.BoardConfig():
|
||||
env.ProcessFlags(env.BoardConfig().get("build.extra_flags"))
|
||||
|
||||
# apply user flags
|
||||
env.ProcessFlags(env.get("BUILD_FLAGS"))
|
||||
|
||||
# process framework scripts
|
||||
env.BuildFrameworks(env.get("PIOFRAMEWORK"))
|
||||
|
||||
if "debug" in env["BUILD_TYPE"]:
|
||||
env.ConfigureDebugTarget()
|
||||
|
||||
# remove specified flags
|
||||
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
|
||||
|
||||
|
||||
def ProcessCompileDbToolchainOption(env):
|
||||
if "compiledb" not in COMMAND_LINE_TARGETS:
|
||||
return
|
||||
|
||||
# Resolve absolute path of toolchain
|
||||
for cmd in ("CC", "CXX", "AS"):
|
||||
if cmd not in env:
|
||||
continue
|
||||
if os.path.isabs(env[cmd]) or '"' in env[cmd]:
|
||||
continue
|
||||
env[cmd] = where_is_program(env.subst("$%s" % cmd), env.subst("${ENV['PATH']}"))
|
||||
if " " in env[cmd]: # issue #4998: Space in compilator path
|
||||
env[cmd] = f'"{env[cmd]}"'
|
||||
|
||||
if env.get("COMPILATIONDB_INCLUDE_TOOLCHAIN"):
|
||||
print("Warning! `COMPILATIONDB_INCLUDE_TOOLCHAIN` is scoping")
|
||||
for scope, includes in env.DumpIntegrationIncludes().items():
|
||||
if scope in ("toolchain",):
|
||||
env.Append(CPPPATH=includes)
|
||||
|
||||
|
||||
def ProcessProjectDeps(env):
|
||||
plb = env.ConfigureProjectLibBuilder()
|
||||
|
||||
# prepend project libs to the beginning of list
|
||||
env.Prepend(LIBS=plb.build())
|
||||
# prepend extra linker related options from libs
|
||||
env.PrependUnique(
|
||||
**{
|
||||
key: plb.env.get(key)
|
||||
for key in ("LIBS", "LIBPATH", "LINKFLAGS")
|
||||
if plb.env.get(key)
|
||||
}
|
||||
)
|
||||
|
||||
if "test" in env["BUILD_TYPE"]:
|
||||
build_files_before_nums = len(env.get("PIOBUILDFILES", []))
|
||||
plb.env.BuildSources(
|
||||
"$BUILD_TEST_DIR", "$PROJECT_TEST_DIR", "$PIOTEST_SRC_FILTER"
|
||||
)
|
||||
if len(env.get("PIOBUILDFILES", [])) - build_files_before_nums < 1:
|
||||
sys.stderr.write(
|
||||
"Error: Nothing to build. Please put your test suites "
|
||||
"to the '%s' folder\n" % env.subst("$PROJECT_TEST_DIR")
|
||||
)
|
||||
env.Exit(1)
|
||||
|
||||
if "test" not in env["BUILD_TYPE"] or env.GetProjectOption("test_build_src"):
|
||||
plb.env.BuildSources(
|
||||
"$BUILD_SRC_DIR", "$PROJECT_SRC_DIR", env.get("SRC_FILTER")
|
||||
)
|
||||
|
||||
if not env.get("PIOBUILDFILES") and not COMMAND_LINE_TARGETS:
|
||||
sys.stderr.write(
|
||||
"Error: Nothing to build. Please put your source code files "
|
||||
"to the '%s' folder\n" % env.subst("$PROJECT_SRC_DIR")
|
||||
)
|
||||
env.Exit(1)
|
||||
|
||||
|
||||
def ParseFlagsExtended(env, flags): # pylint: disable=too-many-branches
|
||||
if not isinstance(flags, list):
|
||||
flags = [flags]
|
||||
result = {}
|
||||
for raw in flags:
|
||||
for key, value in env.ParseFlags(str(raw)).items():
|
||||
if key not in result:
|
||||
result[key] = []
|
||||
result[key].extend(value)
|
||||
|
||||
cppdefines = []
|
||||
for item in result["CPPDEFINES"]:
|
||||
if not Util.is_Sequence(item):
|
||||
cppdefines.append(item)
|
||||
continue
|
||||
name, value = item[:2]
|
||||
if '"' in value:
|
||||
value = value.replace('"', '\\"')
|
||||
elif value.isdigit():
|
||||
value = int(value)
|
||||
elif value.replace(".", "", 1).isdigit():
|
||||
value = float(value)
|
||||
cppdefines.append((name, value))
|
||||
result["CPPDEFINES"] = cppdefines
|
||||
|
||||
# fix relative CPPPATH & LIBPATH
|
||||
for k in ("CPPPATH", "LIBPATH"):
|
||||
for i, p in enumerate(result.get(k, [])):
|
||||
p = env.subst(p)
|
||||
if os.path.isdir(p):
|
||||
result[k][i] = os.path.abspath(p)
|
||||
|
||||
# fix relative LIBs
|
||||
for i, l in enumerate(result.get("LIBS", [])):
|
||||
if isinstance(l, FS.File):
|
||||
result["LIBS"][i] = os.path.abspath(l.get_path())
|
||||
|
||||
# fix relative path for "-include"
|
||||
for i, f in enumerate(result.get("CCFLAGS", [])):
|
||||
if isinstance(f, tuple) and f[0] == "-include":
|
||||
result["CCFLAGS"][i] = (f[0], env.subst(f[1].get_path()))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def ProcessFlags(env, flags): # pylint: disable=too-many-branches
|
||||
if not flags:
|
||||
return
|
||||
env.Append(**env.ParseFlagsExtended(flags))
|
||||
|
||||
# Cancel any previous definition of name, either built in or
|
||||
# provided with a -U option // Issue #191
|
||||
undefines = [
|
||||
u
|
||||
for u in env.get("CCFLAGS", [])
|
||||
if isinstance(u, string_types) and u.startswith("-U")
|
||||
]
|
||||
if undefines:
|
||||
for undef in undefines:
|
||||
env["CCFLAGS"].remove(undef)
|
||||
if undef[2:] in env["CPPDEFINES"]:
|
||||
env["CPPDEFINES"].remove(undef[2:])
|
||||
env.Append(_CPPDEFFLAGS=" %s" % " ".join(undefines))
|
||||
|
||||
|
||||
def ProcessUnFlags(env, flags):
|
||||
if not flags:
|
||||
return
|
||||
parsed = env.ParseFlagsExtended(flags)
|
||||
unflag_scopes = tuple(set(["ASPPFLAGS"] + list(parsed.keys())))
|
||||
for scope in unflag_scopes:
|
||||
for unflags in parsed.values():
|
||||
for unflag in unflags:
|
||||
for current in list(env.get(scope, [])):
|
||||
conditions = [
|
||||
unflag == current,
|
||||
not isinstance(unflag, (tuple, list))
|
||||
and isinstance(current, (tuple, list))
|
||||
and unflag == current[0],
|
||||
]
|
||||
if any(conditions):
|
||||
env[scope].remove(current)
|
||||
|
||||
|
||||
def StringifyMacro(env, value): # pylint: disable=unused-argument
|
||||
return '\\"%s\\"' % value.replace('"', '\\\\\\"')
|
||||
|
||||
|
||||
def MatchSourceFiles(env, src_dir, src_filter=None, src_exts=None):
|
||||
src_filter = env.subst(src_filter) if src_filter else None
|
||||
src_filter = src_filter or SRC_FILTER_DEFAULT
|
||||
src_exts = src_exts or (SRC_BUILD_EXT + SRC_HEADER_EXT)
|
||||
return fs.match_src_files(env.subst(src_dir), src_filter, src_exts)
|
||||
|
||||
|
||||
def CollectBuildFiles(
|
||||
env, variant_dir, src_dir, src_filter=None, duplicate=False
|
||||
): # pylint: disable=too-many-locals
|
||||
sources = []
|
||||
variants = []
|
||||
|
||||
src_dir = env.subst(src_dir)
|
||||
if src_dir.endswith(os.sep):
|
||||
src_dir = src_dir[:-1]
|
||||
|
||||
for item in env.MatchSourceFiles(src_dir, src_filter, SRC_BUILD_EXT):
|
||||
_reldir = os.path.dirname(item)
|
||||
_src_dir = os.path.join(src_dir, _reldir) if _reldir else src_dir
|
||||
_var_dir = os.path.join(variant_dir, _reldir) if _reldir else variant_dir
|
||||
|
||||
if _var_dir not in variants:
|
||||
variants.append(_var_dir)
|
||||
env.VariantDir(_var_dir, _src_dir, duplicate)
|
||||
|
||||
sources.append(env.File(os.path.join(_var_dir, os.path.basename(item))))
|
||||
|
||||
middlewares = env.get("__PIO_BUILD_MIDDLEWARES")
|
||||
if not middlewares:
|
||||
return sources
|
||||
|
||||
new_sources = []
|
||||
for node in sources:
|
||||
new_node = node
|
||||
for callback, pattern in middlewares:
|
||||
if pattern and not fnmatch.fnmatch(node.srcnode().get_path(), pattern):
|
||||
continue
|
||||
if callback.__code__.co_argcount == 2:
|
||||
new_node = callback(env, new_node)
|
||||
else:
|
||||
new_node = callback(new_node)
|
||||
if not new_node:
|
||||
break
|
||||
if new_node:
|
||||
new_sources.append(new_node)
|
||||
|
||||
return new_sources
|
||||
|
||||
|
||||
def AddBuildMiddleware(env, callback, pattern=None):
|
||||
env.Append(__PIO_BUILD_MIDDLEWARES=[(callback, pattern)])
|
||||
|
||||
|
||||
def BuildFrameworks(env, frameworks):
|
||||
if not frameworks:
|
||||
return
|
||||
|
||||
if "BOARD" not in env:
|
||||
sys.stderr.write(
|
||||
"Please specify `board` in `platformio.ini` to use "
|
||||
"with '%s' framework\n" % ", ".join(frameworks)
|
||||
)
|
||||
env.Exit(1)
|
||||
|
||||
supported_frameworks = env.BoardConfig().get("frameworks", [])
|
||||
for name in frameworks:
|
||||
if name == "arduino":
|
||||
# Arduino IDE appends .o to the end of filename
|
||||
Builder.match_splitext = scons_patched_match_splitext
|
||||
if "nobuild" not in COMMAND_LINE_TARGETS:
|
||||
env.ConvertInoToCpp()
|
||||
|
||||
if name in supported_frameworks:
|
||||
SConscript(env.GetFrameworkScript(name), exports="env")
|
||||
else:
|
||||
sys.stderr.write("Error: This board doesn't support %s framework!\n" % name)
|
||||
env.Exit(1)
|
||||
|
||||
|
||||
def BuildLibrary(env, variant_dir, src_dir, src_filter=None, nodes=None):
|
||||
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
|
||||
nodes = nodes or env.CollectBuildFiles(variant_dir, src_dir, src_filter)
|
||||
return env.StaticLibrary(env.subst(variant_dir), nodes)
|
||||
|
||||
|
||||
def BuildSources(env, variant_dir, src_dir, src_filter=None):
|
||||
if env.get("PIOMAINPROG"):
|
||||
sys.stderr.write(
|
||||
"Error: The main program is already constructed and the inline "
|
||||
"source files are not allowed. Please use `env.BuildLibrary(...)` "
|
||||
"or PRE-type script instead."
|
||||
)
|
||||
env.Exit(1)
|
||||
|
||||
nodes = env.CollectBuildFiles(variant_dir, src_dir, src_filter)
|
||||
DefaultEnvironment().Append(
|
||||
PIOBUILDFILES=[
|
||||
env.Object(node) if isinstance(node, FS.File) else node for node in nodes
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(GetBuildType)
|
||||
env.AddMethod(BuildProgram)
|
||||
env.AddMethod(ProcessProgramDeps)
|
||||
env.AddMethod(ProcessCompileDbToolchainOption)
|
||||
env.AddMethod(ProcessProjectDeps)
|
||||
env.AddMethod(ParseFlagsExtended)
|
||||
env.AddMethod(ProcessFlags)
|
||||
env.AddMethod(ProcessUnFlags)
|
||||
env.AddMethod(StringifyMacro)
|
||||
env.AddMethod(MatchSourceFiles)
|
||||
env.AddMethod(CollectBuildFiles)
|
||||
env.AddMethod(AddBuildMiddleware)
|
||||
env.AddMethod(BuildFrameworks)
|
||||
env.AddMethod(BuildLibrary)
|
||||
env.AddMethod(BuildSources)
|
||||
return env
|
50
platformio/builder/tools/piohooks.py
Normal file
50
platformio/builder/tools/piohooks.py
Normal file
@ -0,0 +1,50 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
def AddActionWrapper(handler):
|
||||
def wraps(env, files, action):
|
||||
if not isinstance(files, (list, tuple, set)):
|
||||
files = [files]
|
||||
known_nodes = []
|
||||
unknown_files = []
|
||||
for item in files:
|
||||
nodes = env.arg2nodes(item, env.fs.Entry)
|
||||
if nodes and nodes[0].exists():
|
||||
known_nodes.extend(nodes)
|
||||
else:
|
||||
unknown_files.append(item)
|
||||
if unknown_files:
|
||||
env.Append(**{"_PIO_DELAYED_ACTIONS": [(handler, unknown_files, action)]})
|
||||
if known_nodes:
|
||||
return handler(known_nodes, action)
|
||||
return []
|
||||
|
||||
return wraps
|
||||
|
||||
|
||||
def ProcessDelayedActions(env):
|
||||
for func, nodes, action in env.get("_PIO_DELAYED_ACTIONS", []):
|
||||
func(nodes, action)
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.Replace(**{"_PIO_DELAYED_ACTIONS": []})
|
||||
env.AddMethod(AddActionWrapper(env.AddPreAction), "AddPreAction")
|
||||
env.AddMethod(AddActionWrapper(env.AddPostAction), "AddPostAction")
|
||||
env.AddMethod(ProcessDelayedActions)
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
@ -1,194 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from glob import glob
|
||||
from os import environ
|
||||
from os.path import abspath, isfile, join
|
||||
|
||||
from SCons.Defaults import processDefines
|
||||
|
||||
from platformio import util
|
||||
from platformio.managers.core import get_core_package_dir
|
||||
|
||||
|
||||
def _dump_includes(env):
|
||||
includes = []
|
||||
|
||||
for item in env.get("CPPPATH", []):
|
||||
includes.append(env.subst(item))
|
||||
|
||||
# installed libs
|
||||
for lb in env.GetLibBuilders():
|
||||
includes.extend(lb.get_include_dirs())
|
||||
|
||||
# includes from toolchains
|
||||
p = env.PioPlatform()
|
||||
for name in p.get_installed_packages():
|
||||
if p.get_package_type(name) != "toolchain":
|
||||
continue
|
||||
toolchain_dir = util.glob_escape(p.get_package_dir(name))
|
||||
toolchain_incglobs = [
|
||||
join(toolchain_dir, "*", "include*"),
|
||||
join(toolchain_dir, "*", "include", "c++", "*"),
|
||||
join(toolchain_dir, "*", "include", "c++", "*", "*-*-*"),
|
||||
join(toolchain_dir, "lib", "gcc", "*", "*", "include*")
|
||||
]
|
||||
for g in toolchain_incglobs:
|
||||
includes.extend(glob(g))
|
||||
|
||||
unity_dir = get_core_package_dir("tool-unity")
|
||||
if unity_dir:
|
||||
includes.append(unity_dir)
|
||||
|
||||
includes.extend(
|
||||
[env.subst("$PROJECTINCLUDE_DIR"),
|
||||
env.subst("$PROJECTSRC_DIR")])
|
||||
|
||||
# remove duplicates
|
||||
result = []
|
||||
for item in includes:
|
||||
if item not in result:
|
||||
result.append(abspath(item))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _get_gcc_defines(env):
|
||||
items = []
|
||||
try:
|
||||
sysenv = environ.copy()
|
||||
sysenv['PATH'] = str(env['ENV']['PATH'])
|
||||
result = util.exec_command(
|
||||
"echo | %s -dM -E -" % env.subst("$CC"), env=sysenv, shell=True)
|
||||
except OSError:
|
||||
return items
|
||||
if result['returncode'] != 0:
|
||||
return items
|
||||
for line in result['out'].split("\n"):
|
||||
tokens = line.strip().split(" ", 2)
|
||||
if not tokens or tokens[0] != "#define":
|
||||
continue
|
||||
if len(tokens) > 2:
|
||||
items.append("%s=%s" % (tokens[1], tokens[2]))
|
||||
else:
|
||||
items.append(tokens[1])
|
||||
return items
|
||||
|
||||
|
||||
def _dump_defines(env):
|
||||
defines = []
|
||||
# global symbols
|
||||
for item in processDefines(env.get("CPPDEFINES", [])):
|
||||
defines.append(env.subst(item).replace('\\', ''))
|
||||
|
||||
# special symbol for Atmel AVR MCU
|
||||
if env['PIOPLATFORM'] == "atmelavr":
|
||||
board_mcu = env.get("BOARD_MCU")
|
||||
if not board_mcu and "BOARD" in env:
|
||||
board_mcu = env.BoardConfig().get("build.mcu")
|
||||
if board_mcu:
|
||||
defines.append(
|
||||
str("__AVR_%s__" % board_mcu.upper().replace(
|
||||
"ATMEGA", "ATmega").replace("ATTINY", "ATtiny")))
|
||||
|
||||
# built-in GCC marcos
|
||||
# if env.GetCompilerType() == "gcc":
|
||||
# defines.extend(_get_gcc_defines(env))
|
||||
|
||||
return defines
|
||||
|
||||
|
||||
def _get_svd_path(env):
|
||||
svd_path = env.subst("$DEBUG_SVD_PATH")
|
||||
if svd_path:
|
||||
return abspath(svd_path)
|
||||
|
||||
if "BOARD" not in env:
|
||||
return None
|
||||
try:
|
||||
svd_path = env.BoardConfig().get("debug.svd_path")
|
||||
assert svd_path
|
||||
except (AssertionError, KeyError):
|
||||
return None
|
||||
# custom path to SVD file
|
||||
if isfile(svd_path):
|
||||
return svd_path
|
||||
# default file from ./platform/misc/svd folder
|
||||
p = env.PioPlatform()
|
||||
if isfile(join(p.get_dir(), "misc", "svd", svd_path)):
|
||||
return abspath(join(p.get_dir(), "misc", "svd", svd_path))
|
||||
return None
|
||||
|
||||
|
||||
def DumpIDEData(env):
|
||||
LINTCCOM = "$CFLAGS $CCFLAGS $CPPFLAGS"
|
||||
LINTCXXCOM = "$CXXFLAGS $CCFLAGS $CPPFLAGS"
|
||||
|
||||
data = {
|
||||
"libsource_dirs":
|
||||
[env.subst(l) for l in env.get("LIBSOURCE_DIRS", [])],
|
||||
"defines":
|
||||
_dump_defines(env),
|
||||
"includes":
|
||||
_dump_includes(env),
|
||||
"cc_flags":
|
||||
env.subst(LINTCCOM),
|
||||
"cxx_flags":
|
||||
env.subst(LINTCXXCOM),
|
||||
"cc_path":
|
||||
util.where_is_program(env.subst("$CC"), env.subst("${ENV['PATH']}")),
|
||||
"cxx_path":
|
||||
util.where_is_program(env.subst("$CXX"), env.subst("${ENV['PATH']}")),
|
||||
"gdb_path":
|
||||
util.where_is_program(env.subst("$GDB"), env.subst("${ENV['PATH']}")),
|
||||
"prog_path":
|
||||
env.subst("$PROG_PATH"),
|
||||
"flash_extra_images": [{
|
||||
"offset": item[0],
|
||||
"path": env.subst(item[1])
|
||||
} for item in env.get("FLASH_EXTRA_IMAGES", [])],
|
||||
"svd_path":
|
||||
_get_svd_path(env),
|
||||
"compiler_type":
|
||||
env.GetCompilerType()
|
||||
}
|
||||
|
||||
env_ = env.Clone()
|
||||
# https://github.com/platformio/platformio-atom-ide/issues/34
|
||||
_new_defines = []
|
||||
for item in processDefines(env_.get("CPPDEFINES", [])):
|
||||
item = item.replace('\\"', '"')
|
||||
if " " in item:
|
||||
_new_defines.append(item.replace(" ", "\\\\ "))
|
||||
else:
|
||||
_new_defines.append(item)
|
||||
env_.Replace(CPPDEFINES=_new_defines)
|
||||
|
||||
data.update({
|
||||
"cc_flags": env_.subst(LINTCCOM),
|
||||
"cxx_flags": env_.subst(LINTCXXCOM)
|
||||
})
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(DumpIDEData)
|
||||
return env
|
256
platformio/builder/tools/pioino.py
Normal file
256
platformio/builder/tools/pioino.py
Normal file
@ -0,0 +1,256 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import atexit
|
||||
import glob
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
import tempfile
|
||||
|
||||
import click
|
||||
|
||||
from platformio.compat import get_filesystem_encoding, get_locale_encoding
|
||||
|
||||
|
||||
class InoToCPPConverter:
|
||||
PROTOTYPE_RE = re.compile(
|
||||
r"""^(
|
||||
(?:template\<.*\>\s*)? # template
|
||||
([a-z_\d\&]+\*?\s+){1,2} # return type
|
||||
([a-z_\d]+\s*) # name of prototype
|
||||
\([a-z_,\.\*\&\[\]\s\d]*\) # arguments
|
||||
)\s*(\{|;) # must end with `{` or `;`
|
||||
""",
|
||||
re.X | re.M | re.I,
|
||||
)
|
||||
DETECTMAIN_RE = re.compile(r"void\s+(setup|loop)\s*\(", re.M | re.I)
|
||||
PROTOPTRS_TPLRE = r"\([^&\(]*&(%s)[^\)]*\)"
|
||||
|
||||
def __init__(self, env):
|
||||
self.env = env
|
||||
self._main_ino = None
|
||||
self._safe_encoding = None
|
||||
|
||||
def read_safe_contents(self, path):
|
||||
error_reported = False
|
||||
for encoding in (
|
||||
"utf-8",
|
||||
None,
|
||||
get_filesystem_encoding(),
|
||||
get_locale_encoding(),
|
||||
"latin-1",
|
||||
):
|
||||
try:
|
||||
with io.open(path, encoding=encoding) as fp:
|
||||
contents = fp.read()
|
||||
self._safe_encoding = encoding
|
||||
return contents
|
||||
except UnicodeDecodeError:
|
||||
if not error_reported:
|
||||
error_reported = True
|
||||
click.secho(
|
||||
"Unicode decode error has occurred, please remove invalid "
|
||||
"(non-ASCII or non-UTF8) characters from %s file or convert it to UTF-8"
|
||||
% path,
|
||||
fg="yellow",
|
||||
err=True,
|
||||
)
|
||||
return ""
|
||||
|
||||
def write_safe_contents(self, path, contents):
|
||||
with io.open(
|
||||
path, "w", encoding=self._safe_encoding, errors="backslashreplace"
|
||||
) as fp:
|
||||
return fp.write(contents)
|
||||
|
||||
def is_main_node(self, contents):
|
||||
return self.DETECTMAIN_RE.search(contents)
|
||||
|
||||
def convert(self, nodes):
|
||||
contents = self.merge(nodes)
|
||||
if not contents:
|
||||
return None
|
||||
return self.process(contents)
|
||||
|
||||
def merge(self, nodes):
|
||||
assert nodes
|
||||
lines = []
|
||||
for node in nodes:
|
||||
contents = self.read_safe_contents(node.get_path())
|
||||
_lines = ['# 1 "%s"' % node.get_path().replace("\\", "/"), contents]
|
||||
if self.is_main_node(contents):
|
||||
lines = _lines + lines
|
||||
self._main_ino = node.get_path()
|
||||
else:
|
||||
lines.extend(_lines)
|
||||
|
||||
if not self._main_ino:
|
||||
self._main_ino = nodes[0].get_path()
|
||||
|
||||
return "\n".join(["#include <Arduino.h>"] + lines) if lines else None
|
||||
|
||||
def process(self, contents):
|
||||
out_file = re.sub(r"[\"\'\;]+", "", self._main_ino) + ".cpp"
|
||||
assert self._gcc_preprocess(contents, out_file)
|
||||
contents = self.read_safe_contents(out_file)
|
||||
contents = self._join_multiline_strings(contents)
|
||||
self.write_safe_contents(out_file, self.append_prototypes(contents))
|
||||
return out_file
|
||||
|
||||
def _gcc_preprocess(self, contents, out_file):
|
||||
tmp_path = tempfile.mkstemp()[1]
|
||||
self.write_safe_contents(tmp_path, contents)
|
||||
self.env.Execute(
|
||||
self.env.VerboseAction(
|
||||
'$CXX -o "{0}" -x c++ -fpreprocessed -dD -E "{1}"'.format(
|
||||
out_file, tmp_path
|
||||
),
|
||||
"Converting " + os.path.basename(out_file[:-4]),
|
||||
)
|
||||
)
|
||||
atexit.register(_delete_file, tmp_path)
|
||||
return os.path.isfile(out_file)
|
||||
|
||||
def _join_multiline_strings(self, contents):
|
||||
if "\\\n" not in contents:
|
||||
return contents
|
||||
newlines = []
|
||||
linenum = 0
|
||||
stropen = False
|
||||
for line in contents.split("\n"):
|
||||
_linenum = self._parse_preproc_line_num(line)
|
||||
if _linenum is not None:
|
||||
linenum = _linenum
|
||||
else:
|
||||
linenum += 1
|
||||
|
||||
if line.endswith("\\"):
|
||||
if line.startswith('"'):
|
||||
stropen = True
|
||||
newlines.append(line[:-1])
|
||||
continue
|
||||
if stropen:
|
||||
newlines[len(newlines) - 1] += line[:-1]
|
||||
continue
|
||||
elif stropen and line.endswith(('",', '";')):
|
||||
newlines[len(newlines) - 1] += line
|
||||
stropen = False
|
||||
newlines.append(
|
||||
'#line %d "%s"' % (linenum, self._main_ino.replace("\\", "/"))
|
||||
)
|
||||
continue
|
||||
|
||||
newlines.append(line)
|
||||
|
||||
return "\n".join(newlines)
|
||||
|
||||
@staticmethod
|
||||
def _parse_preproc_line_num(line):
|
||||
if not line.startswith("#"):
|
||||
return None
|
||||
tokens = line.split(" ", 3)
|
||||
if len(tokens) > 2 and tokens[1].isdigit():
|
||||
return int(tokens[1])
|
||||
return None
|
||||
|
||||
def _parse_prototypes(self, contents):
|
||||
prototypes = []
|
||||
reserved_keywords = set(["if", "else", "while"])
|
||||
for match in self.PROTOTYPE_RE.finditer(contents):
|
||||
if (
|
||||
set([match.group(2).strip(), match.group(3).strip()])
|
||||
& reserved_keywords
|
||||
):
|
||||
continue
|
||||
prototypes.append(match)
|
||||
return prototypes
|
||||
|
||||
def _get_total_lines(self, contents):
|
||||
total = 0
|
||||
if contents.endswith("\n"):
|
||||
contents = contents[:-1]
|
||||
for line in contents.split("\n")[::-1]:
|
||||
linenum = self._parse_preproc_line_num(line)
|
||||
if linenum is not None:
|
||||
return total + linenum
|
||||
total += 1
|
||||
return total
|
||||
|
||||
def append_prototypes(self, contents):
|
||||
prototypes = self._parse_prototypes(contents) or []
|
||||
|
||||
# skip already declared prototypes
|
||||
declared = set(m.group(1).strip() for m in prototypes if m.group(4) == ";")
|
||||
prototypes = [m for m in prototypes if m.group(1).strip() not in declared]
|
||||
|
||||
if not prototypes:
|
||||
return contents
|
||||
|
||||
prototype_names = set(m.group(3).strip() for m in prototypes)
|
||||
split_pos = prototypes[0].start()
|
||||
match_ptrs = re.search(
|
||||
self.PROTOPTRS_TPLRE % ("|".join(prototype_names)),
|
||||
contents[:split_pos],
|
||||
re.M,
|
||||
)
|
||||
if match_ptrs:
|
||||
split_pos = contents.rfind("\n", 0, match_ptrs.start()) + 1
|
||||
|
||||
result = []
|
||||
result.append(contents[:split_pos].strip())
|
||||
result.append("%s;" % ";\n".join([m.group(1) for m in prototypes]))
|
||||
result.append(
|
||||
'#line %d "%s"'
|
||||
% (
|
||||
self._get_total_lines(contents[:split_pos]),
|
||||
self._main_ino.replace("\\", "/"),
|
||||
)
|
||||
)
|
||||
result.append(contents[split_pos:].strip())
|
||||
return "\n".join(result)
|
||||
|
||||
|
||||
def FindInoNodes(env):
|
||||
src_dir = glob.escape(env.subst("$PROJECT_SRC_DIR"))
|
||||
return env.Glob(os.path.join(src_dir, "*.ino")) + env.Glob(
|
||||
os.path.join(src_dir, "*.pde")
|
||||
)
|
||||
|
||||
|
||||
def ConvertInoToCpp(env):
|
||||
ino_nodes = env.FindInoNodes()
|
||||
if not ino_nodes:
|
||||
return
|
||||
c = InoToCPPConverter(env)
|
||||
out_file = c.convert(ino_nodes)
|
||||
|
||||
atexit.register(_delete_file, out_file)
|
||||
|
||||
|
||||
def _delete_file(path):
|
||||
try:
|
||||
if os.path.isfile(path):
|
||||
os.remove(path)
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(FindInoNodes)
|
||||
env.AddMethod(ConvertInoToCpp)
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
188
platformio/builder/tools/piointegration.py
Normal file
188
platformio/builder/tools/piointegration.py
Normal file
@ -0,0 +1,188 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import glob
|
||||
import os
|
||||
|
||||
import SCons.Defaults # pylint: disable=import-error
|
||||
import SCons.Subst # pylint: disable=import-error
|
||||
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
|
||||
|
||||
from platformio.proc import exec_command, where_is_program
|
||||
|
||||
|
||||
def IsIntegrationDump(_):
|
||||
return set(["__idedata", "idedata"]) & set(COMMAND_LINE_TARGETS)
|
||||
|
||||
|
||||
def DumpIntegrationIncludes(env):
|
||||
result = dict(build=[], compatlib=[], toolchain=[])
|
||||
|
||||
# `env`(project) CPPPATH
|
||||
result["build"].extend(
|
||||
[os.path.abspath(env.subst(item)) for item in env.get("CPPPATH", [])]
|
||||
)
|
||||
|
||||
# installed libs
|
||||
for lb in env.GetLibBuilders():
|
||||
result["compatlib"].extend(
|
||||
[os.path.abspath(inc) for inc in lb.get_include_dirs()]
|
||||
)
|
||||
|
||||
# includes from toolchains
|
||||
p = env.PioPlatform()
|
||||
for pkg in p.get_installed_packages(with_optional=False):
|
||||
if p.get_package_type(pkg.metadata.name) != "toolchain":
|
||||
continue
|
||||
toolchain_dir = glob.escape(pkg.path)
|
||||
toolchain_incglobs = [
|
||||
os.path.join(toolchain_dir, "*", "include", "c++", "*"),
|
||||
os.path.join(toolchain_dir, "*", "include", "c++", "*", "*-*-*"),
|
||||
os.path.join(toolchain_dir, "lib", "gcc", "*", "*", "include*"),
|
||||
os.path.join(toolchain_dir, "*", "include*"),
|
||||
]
|
||||
for g in toolchain_incglobs:
|
||||
result["toolchain"].extend([os.path.abspath(inc) for inc in glob.glob(g)])
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def get_gcc_defines(env):
|
||||
items = []
|
||||
try:
|
||||
sysenv = os.environ.copy()
|
||||
sysenv["PATH"] = str(env["ENV"]["PATH"])
|
||||
result = exec_command(
|
||||
"echo | %s -dM -E -" % env.subst("$CC"), env=sysenv, shell=True
|
||||
)
|
||||
except OSError:
|
||||
return items
|
||||
if result["returncode"] != 0:
|
||||
return items
|
||||
for line in result["out"].split("\n"):
|
||||
tokens = line.strip().split(" ", 2)
|
||||
if not tokens or tokens[0] != "#define":
|
||||
continue
|
||||
if len(tokens) > 2:
|
||||
items.append("%s=%s" % (tokens[1], tokens[2]))
|
||||
else:
|
||||
items.append(tokens[1])
|
||||
return items
|
||||
|
||||
|
||||
def dump_defines(env):
|
||||
defines = []
|
||||
# global symbols
|
||||
for item in SCons.Defaults.processDefines(env.get("CPPDEFINES", [])):
|
||||
item = item.strip()
|
||||
if item:
|
||||
defines.append(env.subst(item).replace('\\"', '"'))
|
||||
|
||||
# special symbol for Atmel AVR MCU
|
||||
if env["PIOPLATFORM"] == "atmelavr":
|
||||
board_mcu = env.get("BOARD_MCU")
|
||||
if not board_mcu and "BOARD" in env:
|
||||
board_mcu = env.BoardConfig().get("build.mcu")
|
||||
if board_mcu:
|
||||
defines.append(
|
||||
str(
|
||||
"__AVR_%s__"
|
||||
% board_mcu.upper()
|
||||
.replace("ATMEGA", "ATmega")
|
||||
.replace("ATTINY", "ATtiny")
|
||||
)
|
||||
)
|
||||
|
||||
# built-in GCC marcos
|
||||
# if env.GetCompilerType() == "gcc":
|
||||
# defines.extend(get_gcc_defines(env))
|
||||
|
||||
return defines
|
||||
|
||||
|
||||
def dump_svd_path(env):
|
||||
svd_path = env.GetProjectOption("debug_svd_path")
|
||||
if svd_path:
|
||||
return os.path.abspath(svd_path)
|
||||
|
||||
if "BOARD" not in env:
|
||||
return None
|
||||
try:
|
||||
svd_path = env.BoardConfig().get("debug.svd_path")
|
||||
assert svd_path
|
||||
except (AssertionError, KeyError):
|
||||
return None
|
||||
# custom path to SVD file
|
||||
if os.path.isfile(svd_path):
|
||||
return svd_path
|
||||
# default file from ./platform/misc/svd folder
|
||||
p = env.PioPlatform()
|
||||
if os.path.isfile(os.path.join(p.get_dir(), "misc", "svd", svd_path)):
|
||||
return os.path.abspath(os.path.join(p.get_dir(), "misc", "svd", svd_path))
|
||||
return None
|
||||
|
||||
|
||||
def _split_flags_string(env, s):
|
||||
args = env.subst_list(s, SCons.Subst.SUBST_CMD)[0]
|
||||
return [str(arg) for arg in args]
|
||||
|
||||
|
||||
def DumpIntegrationData(*args):
|
||||
projenv, globalenv = args[0:2] # pylint: disable=unbalanced-tuple-unpacking
|
||||
data = {
|
||||
"build_type": globalenv.GetBuildType(),
|
||||
"env_name": globalenv["PIOENV"],
|
||||
"libsource_dirs": [
|
||||
globalenv.subst(item) for item in globalenv.GetLibSourceDirs()
|
||||
],
|
||||
"defines": dump_defines(projenv),
|
||||
"includes": projenv.DumpIntegrationIncludes(),
|
||||
"cc_flags": _split_flags_string(projenv, "$CFLAGS $CCFLAGS $CPPFLAGS"),
|
||||
"cxx_flags": _split_flags_string(projenv, "$CXXFLAGS $CCFLAGS $CPPFLAGS"),
|
||||
"cc_path": where_is_program(
|
||||
globalenv.subst("$CC"), globalenv.subst("${ENV['PATH']}")
|
||||
),
|
||||
"cxx_path": where_is_program(
|
||||
globalenv.subst("$CXX"), globalenv.subst("${ENV['PATH']}")
|
||||
),
|
||||
"gdb_path": where_is_program(
|
||||
globalenv.subst("$GDB"), globalenv.subst("${ENV['PATH']}")
|
||||
),
|
||||
"prog_path": globalenv.subst("$PROGPATH"),
|
||||
"svd_path": dump_svd_path(globalenv),
|
||||
"compiler_type": globalenv.GetCompilerType(),
|
||||
"targets": globalenv.DumpTargets(),
|
||||
"extra": dict(
|
||||
flash_images=[
|
||||
{"offset": item[0], "path": globalenv.subst(item[1])}
|
||||
for item in globalenv.get("FLASH_EXTRA_IMAGES", [])
|
||||
]
|
||||
),
|
||||
}
|
||||
for key in ("IDE_EXTRA_DATA", "INTEGRATION_EXTRA_DATA"):
|
||||
data["extra"].update(globalenv.get(key, {}))
|
||||
return data
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
||||
|
||||
|
||||
def generate(env):
|
||||
env["IDE_EXTRA_DATA"] = {} # legacy support
|
||||
env["INTEGRATION_EXTRA_DATA"] = {}
|
||||
env.AddMethod(IsIntegrationDump)
|
||||
env.AddMethod(DumpIntegrationIncludes)
|
||||
env.AddMethod(DumpIntegrationData)
|
||||
return env
|
File diff suppressed because it is too large
Load Diff
96
platformio/builder/tools/piomaxlen.py
Normal file
96
platformio/builder/tools/piomaxlen.py
Normal file
@ -0,0 +1,96 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
import re
|
||||
|
||||
from SCons.Platform import TempFileMunge # pylint: disable=import-error
|
||||
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
|
||||
from SCons.Subst import quote_spaces # pylint: disable=import-error
|
||||
|
||||
from platformio.compat import IS_WINDOWS, hashlib_encode_data
|
||||
|
||||
# There are the next limits depending on a platform:
|
||||
# - Windows = 8191
|
||||
# - Unix = 131072
|
||||
# We need ~512 characters for compiler and temporary file paths
|
||||
MAX_LINE_LENGTH = (8191 if IS_WINDOWS else 131072) - 512
|
||||
|
||||
WINPATHSEP_RE = re.compile(r"\\([^\"'\\]|$)")
|
||||
|
||||
|
||||
def tempfile_arg_esc_func(arg):
|
||||
arg = quote_spaces(arg)
|
||||
if not IS_WINDOWS:
|
||||
return arg
|
||||
# GCC requires double Windows slashes, let's use UNIX separator
|
||||
return WINPATHSEP_RE.sub(r"/\1", arg)
|
||||
|
||||
|
||||
def long_sources_hook(env, sources):
|
||||
_sources = str(sources).replace("\\", "/")
|
||||
if len(str(_sources)) < MAX_LINE_LENGTH:
|
||||
return sources
|
||||
|
||||
# fix space in paths
|
||||
data = []
|
||||
for line in _sources.split(".o "):
|
||||
line = line.strip()
|
||||
if not line.endswith(".o"):
|
||||
line += ".o"
|
||||
data.append('"%s"' % line)
|
||||
|
||||
return '@"%s"' % _file_long_data(env, " ".join(data))
|
||||
|
||||
|
||||
def _file_long_data(env, data):
|
||||
build_dir = env.subst("$BUILD_DIR")
|
||||
if not os.path.isdir(build_dir):
|
||||
os.makedirs(build_dir)
|
||||
tmp_file = os.path.join(
|
||||
build_dir, "longcmd-%s" % hashlib.md5(hashlib_encode_data(data)).hexdigest()
|
||||
)
|
||||
if os.path.isfile(tmp_file):
|
||||
return tmp_file
|
||||
with open(tmp_file, mode="w", encoding="utf8") as fp:
|
||||
fp.write(data)
|
||||
return tmp_file
|
||||
|
||||
|
||||
def exists(env):
|
||||
return "compiledb" not in COMMAND_LINE_TARGETS and not env.IsIntegrationDump()
|
||||
|
||||
|
||||
def generate(env):
|
||||
if not exists(env):
|
||||
return env
|
||||
kwargs = dict(
|
||||
_long_sources_hook=long_sources_hook,
|
||||
TEMPFILE=TempFileMunge,
|
||||
MAXLINELENGTH=MAX_LINE_LENGTH,
|
||||
TEMPFILEARGESCFUNC=tempfile_arg_esc_func,
|
||||
TEMPFILESUFFIX=".tmp",
|
||||
TEMPFILEDIR="$BUILD_DIR",
|
||||
)
|
||||
|
||||
for name in ("LINKCOM", "ASCOM", "ASPPCOM", "CCCOM", "CXXCOM"):
|
||||
kwargs[name] = "${TEMPFILE('%s','$%sSTR')}" % (env.get(name), name)
|
||||
|
||||
kwargs["ARCOM"] = env.get("ARCOM", "").replace(
|
||||
"$SOURCES", "${_long_sources_hook(__env__, SOURCES)}"
|
||||
)
|
||||
env.Replace(**kwargs)
|
||||
|
||||
return env
|
@ -12,228 +12,42 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import atexit
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
from os import environ, remove, walk
|
||||
from os.path import basename, isdir, isfile, join, realpath, relpath, sep
|
||||
from tempfile import mkstemp
|
||||
|
||||
from SCons.Action import Action
|
||||
from SCons.Script import ARGUMENTS
|
||||
|
||||
from platformio import util
|
||||
from platformio.managers.core import get_core_package_dir
|
||||
|
||||
|
||||
class InoToCPPConverter(object):
|
||||
|
||||
PROTOTYPE_RE = re.compile(
|
||||
r"""^(
|
||||
(?:template\<.*\>\s*)? # template
|
||||
([a-z_\d\&]+\*?\s+){1,2} # return type
|
||||
([a-z_\d]+\s*) # name of prototype
|
||||
\([a-z_,\.\*\&\[\]\s\d]*\) # arguments
|
||||
)\s*(\{|;) # must end with `{` or `;`
|
||||
""", re.X | re.M | re.I)
|
||||
DETECTMAIN_RE = re.compile(r"void\s+(setup|loop)\s*\(", re.M | re.I)
|
||||
PROTOPTRS_TPLRE = r"\([^&\(]*&(%s)[^\)]*\)"
|
||||
|
||||
def __init__(self, env):
|
||||
self.env = env
|
||||
self._main_ino = None
|
||||
|
||||
def is_main_node(self, contents):
|
||||
return self.DETECTMAIN_RE.search(contents)
|
||||
|
||||
def convert(self, nodes):
|
||||
contents = self.merge(nodes)
|
||||
if not contents:
|
||||
return None
|
||||
return self.process(contents)
|
||||
|
||||
def merge(self, nodes):
|
||||
assert nodes
|
||||
lines = []
|
||||
for node in nodes:
|
||||
contents = node.get_text_contents()
|
||||
_lines = [
|
||||
'# 1 "%s"' % node.get_path().replace("\\", "/"), contents
|
||||
]
|
||||
if self.is_main_node(contents):
|
||||
lines = _lines + lines
|
||||
self._main_ino = node.get_path()
|
||||
else:
|
||||
lines.extend(_lines)
|
||||
|
||||
if not self._main_ino:
|
||||
self._main_ino = nodes[0].get_path()
|
||||
|
||||
return "\n".join(["#include <Arduino.h>"] + lines) if lines else None
|
||||
|
||||
def process(self, contents):
|
||||
out_file = self._main_ino + ".cpp"
|
||||
assert self._gcc_preprocess(contents, out_file)
|
||||
with open(out_file) as fp:
|
||||
contents = fp.read()
|
||||
contents = self._join_multiline_strings(contents)
|
||||
with open(out_file, "w") as fp:
|
||||
fp.write(self.append_prototypes(contents))
|
||||
return out_file
|
||||
|
||||
def _gcc_preprocess(self, contents, out_file):
|
||||
tmp_path = mkstemp()[1]
|
||||
with open(tmp_path, "w") as fp:
|
||||
fp.write(contents)
|
||||
self.env.Execute(
|
||||
self.env.VerboseAction(
|
||||
'$CXX -o "{0}" -x c++ -fpreprocessed -dD -E "{1}"'.format(
|
||||
out_file, tmp_path),
|
||||
"Converting " + basename(out_file[:-4])))
|
||||
atexit.register(_delete_file, tmp_path)
|
||||
return isfile(out_file)
|
||||
|
||||
def _join_multiline_strings(self, contents):
|
||||
if "\\\n" not in contents:
|
||||
return contents
|
||||
newlines = []
|
||||
linenum = 0
|
||||
stropen = False
|
||||
for line in contents.split("\n"):
|
||||
_linenum = self._parse_preproc_line_num(line)
|
||||
if _linenum is not None:
|
||||
linenum = _linenum
|
||||
else:
|
||||
linenum += 1
|
||||
|
||||
if line.endswith("\\"):
|
||||
if line.startswith('"'):
|
||||
stropen = True
|
||||
newlines.append(line[:-1])
|
||||
continue
|
||||
elif stropen:
|
||||
newlines[len(newlines) - 1] += line[:-1]
|
||||
continue
|
||||
elif stropen and line.endswith(('",', '";')):
|
||||
newlines[len(newlines) - 1] += line
|
||||
stropen = False
|
||||
newlines.append('#line %d "%s"' %
|
||||
(linenum, self._main_ino.replace("\\", "/")))
|
||||
continue
|
||||
|
||||
newlines.append(line)
|
||||
|
||||
return "\n".join(newlines)
|
||||
|
||||
@staticmethod
|
||||
def _parse_preproc_line_num(line):
|
||||
if not line.startswith("#"):
|
||||
return None
|
||||
tokens = line.split(" ", 3)
|
||||
if len(tokens) > 2 and tokens[1].isdigit():
|
||||
return int(tokens[1])
|
||||
return None
|
||||
|
||||
def _parse_prototypes(self, contents):
|
||||
prototypes = []
|
||||
reserved_keywords = set(["if", "else", "while"])
|
||||
for match in self.PROTOTYPE_RE.finditer(contents):
|
||||
if (set([match.group(2).strip(),
|
||||
match.group(3).strip()]) & reserved_keywords):
|
||||
continue
|
||||
prototypes.append(match)
|
||||
return prototypes
|
||||
|
||||
def _get_total_lines(self, contents):
|
||||
total = 0
|
||||
if contents.endswith("\n"):
|
||||
contents = contents[:-1]
|
||||
for line in contents.split("\n")[::-1]:
|
||||
linenum = self._parse_preproc_line_num(line)
|
||||
if linenum is not None:
|
||||
return total + linenum
|
||||
total += 1
|
||||
return total
|
||||
|
||||
def append_prototypes(self, contents):
|
||||
prototypes = self._parse_prototypes(contents)
|
||||
if not prototypes:
|
||||
return contents
|
||||
|
||||
# skip already declared prototypes
|
||||
declared = set(
|
||||
m.group(1).strip() for m in prototypes if m.group(4) == ";")
|
||||
prototypes = [
|
||||
m for m in prototypes if m.group(1).strip() not in declared
|
||||
]
|
||||
|
||||
prototype_names = set(m.group(3).strip() for m in prototypes)
|
||||
split_pos = prototypes[0].start()
|
||||
match_ptrs = re.search(
|
||||
self.PROTOPTRS_TPLRE % ("|".join(prototype_names)),
|
||||
contents[:split_pos], re.M)
|
||||
if match_ptrs:
|
||||
split_pos = contents.rfind("\n", 0, match_ptrs.start()) + 1
|
||||
|
||||
result = []
|
||||
result.append(contents[:split_pos].strip())
|
||||
result.append("%s;" % ";\n".join([m.group(1) for m in prototypes]))
|
||||
result.append('#line %d "%s"' % (self._get_total_lines(
|
||||
contents[:split_pos]), self._main_ino.replace("\\", "/")))
|
||||
result.append(contents[split_pos:].strip())
|
||||
return "\n".join(result)
|
||||
|
||||
|
||||
def ConvertInoToCpp(env):
|
||||
src_dir = util.glob_escape(env.subst("$PROJECTSRC_DIR"))
|
||||
ino_nodes = (
|
||||
env.Glob(join(src_dir, "*.ino")) + env.Glob(join(src_dir, "*.pde")))
|
||||
if not ino_nodes:
|
||||
return
|
||||
c = InoToCPPConverter(env)
|
||||
out_file = c.convert(ino_nodes)
|
||||
|
||||
atexit.register(_delete_file, out_file)
|
||||
|
||||
|
||||
def _delete_file(path):
|
||||
try:
|
||||
if isfile(path):
|
||||
remove(path)
|
||||
except: # pylint: disable=bare-except
|
||||
pass
|
||||
from platformio import fs, util
|
||||
from platformio.proc import exec_command
|
||||
|
||||
|
||||
@util.memoized()
|
||||
def _get_compiler_type(env):
|
||||
def GetCompilerType(env): # pylint: disable=too-many-return-statements
|
||||
CC = env.subst("$CC")
|
||||
if CC.endswith("-gcc"):
|
||||
return "gcc"
|
||||
if os.path.basename(CC) == "clang":
|
||||
return "clang"
|
||||
try:
|
||||
sysenv = environ.copy()
|
||||
sysenv['PATH'] = str(env['ENV']['PATH'])
|
||||
result = util.exec_command([env.subst("$CC"), "-v"], env=sysenv)
|
||||
|
||||
sysenv = os.environ.copy()
|
||||
sysenv["PATH"] = str(env["ENV"]["PATH"])
|
||||
result = exec_command([CC, "-v"], env=sysenv)
|
||||
except OSError:
|
||||
return None
|
||||
if result['returncode'] != 0:
|
||||
if result["returncode"] != 0:
|
||||
return None
|
||||
output = "".join([result['out'], result['err']]).lower()
|
||||
if "clang" in output and "LLVM" in output:
|
||||
output = "".join([result["out"], result["err"]]).lower()
|
||||
if "clang version" in output:
|
||||
return "clang"
|
||||
elif "gcc" in output:
|
||||
if "gcc" in output:
|
||||
return "gcc"
|
||||
return None
|
||||
|
||||
|
||||
def GetCompilerType(env):
|
||||
return _get_compiler_type(env)
|
||||
|
||||
|
||||
def GetActualLDScript(env):
|
||||
|
||||
def _lookup_in_ldpath(script):
|
||||
for d in env.get("LIBPATH", []):
|
||||
path = join(env.subst(d), script)
|
||||
if isfile(path):
|
||||
path = os.path.join(env.subst(d), script)
|
||||
if os.path.isfile(path):
|
||||
return path
|
||||
return None
|
||||
|
||||
@ -244,7 +58,7 @@ def GetActualLDScript(env):
|
||||
if f == "-T":
|
||||
script_in_next = True
|
||||
continue
|
||||
elif script_in_next:
|
||||
if script_in_next:
|
||||
script_in_next = False
|
||||
raw_script = f
|
||||
elif f.startswith("-Wl,-T"):
|
||||
@ -252,7 +66,7 @@ def GetActualLDScript(env):
|
||||
else:
|
||||
continue
|
||||
script = env.subst(raw_script.replace('"', "").strip())
|
||||
if isfile(script):
|
||||
if os.path.isfile(script):
|
||||
return script
|
||||
path = _lookup_in_ldpath(script)
|
||||
if path:
|
||||
@ -260,12 +74,13 @@ def GetActualLDScript(env):
|
||||
|
||||
if script:
|
||||
sys.stderr.write(
|
||||
"Error: Could not find '%s' LD script in LDPATH '%s'\n" %
|
||||
(script, env.subst("$LIBPATH")))
|
||||
"Error: Could not find '%s' LD script in LDPATH '%s'\n"
|
||||
% (script, env.subst("$LIBPATH"))
|
||||
)
|
||||
env.Exit(1)
|
||||
|
||||
if not script and "LDSCRIPT_PATH" in env:
|
||||
path = _lookup_in_ldpath(env['LDSCRIPT_PATH'])
|
||||
path = _lookup_in_ldpath(env["LDSCRIPT_PATH"])
|
||||
if path:
|
||||
return path
|
||||
|
||||
@ -273,76 +88,66 @@ def GetActualLDScript(env):
|
||||
env.Exit(1)
|
||||
|
||||
|
||||
def VerboseAction(_, act, actstr):
|
||||
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
return act
|
||||
return Action(act, actstr)
|
||||
def ConfigureDebugTarget(env):
|
||||
def _cleanup_debug_flags(scope):
|
||||
if scope not in env:
|
||||
return
|
||||
unflags = ["-Os", "-g"]
|
||||
for level in [0, 1, 2, 3]:
|
||||
for flag in ("O", "g", "ggdb"):
|
||||
unflags.append("-%s%d" % (flag, level))
|
||||
env[scope] = [f for f in env.get(scope, []) if f not in unflags]
|
||||
|
||||
env.Append(CPPDEFINES=["__PLATFORMIO_BUILD_DEBUG__"])
|
||||
|
||||
def PioClean(env, clean_dir):
|
||||
if not isdir(clean_dir):
|
||||
print("Build environment is clean")
|
||||
env.Exit(0)
|
||||
for root, _, files in walk(clean_dir):
|
||||
for file_ in files:
|
||||
remove(join(root, file_))
|
||||
print("Removed %s" % relpath(join(root, file_)))
|
||||
print("Done cleaning")
|
||||
util.rmtree_(clean_dir)
|
||||
env.Exit(0)
|
||||
for scope in ("ASFLAGS", "CCFLAGS", "LINKFLAGS"):
|
||||
_cleanup_debug_flags(scope)
|
||||
|
||||
debug_flags = env.ParseFlags(
|
||||
env.get("PIODEBUGFLAGS")
|
||||
if env.get("PIODEBUGFLAGS")
|
||||
and not env.GetProjectOptions(as_dict=True).get("debug_build_flags")
|
||||
else env.GetProjectOption("debug_build_flags")
|
||||
)
|
||||
|
||||
def ProcessDebug(env):
|
||||
if not env.subst("$PIODEBUGFLAGS"):
|
||||
env.Replace(PIODEBUGFLAGS=["-Og", "-g3", "-ggdb3"])
|
||||
env.Append(
|
||||
PIODEBUGFLAGS=["-D__PLATFORMIO_DEBUG__"],
|
||||
BUILD_FLAGS=env.get("PIODEBUGFLAGS", []))
|
||||
unflags = ["-Os"]
|
||||
for level in [0, 1, 2]:
|
||||
for flag in ("O", "g", "ggdb"):
|
||||
unflags.append("-%s%d" % (flag, level))
|
||||
env.Append(BUILD_UNFLAGS=unflags)
|
||||
env.MergeFlags(debug_flags)
|
||||
optimization_flags = [
|
||||
f for f in debug_flags.get("CCFLAGS", []) if f.startswith(("-O", "-g"))
|
||||
]
|
||||
|
||||
|
||||
def ProcessTest(env):
|
||||
env.Append(
|
||||
CPPDEFINES=["UNIT_TEST", "UNITY_INCLUDE_CONFIG_H"],
|
||||
CPPPATH=[join("$BUILD_DIR", "UnityTestLib")])
|
||||
unitylib = env.BuildLibrary(
|
||||
join("$BUILD_DIR", "UnityTestLib"), get_core_package_dir("tool-unity"))
|
||||
env.Prepend(LIBS=[unitylib])
|
||||
|
||||
src_filter = ["+<*.cpp>", "+<*.c>"]
|
||||
if "PIOTEST" in env:
|
||||
src_filter.append("+<%s%s>" % (env['PIOTEST'], sep))
|
||||
env.Replace(PIOTEST_SRC_FILTER=src_filter)
|
||||
if optimization_flags:
|
||||
env.AppendUnique(
|
||||
ASFLAGS=[
|
||||
# skip -O flags for assembler
|
||||
f
|
||||
for f in optimization_flags
|
||||
if f.startswith("-g")
|
||||
],
|
||||
LINKFLAGS=optimization_flags,
|
||||
)
|
||||
|
||||
|
||||
def GetExtraScripts(env, scope):
|
||||
items = []
|
||||
for item in env.get("EXTRA_SCRIPTS", []):
|
||||
for item in env.GetProjectOption("extra_scripts", []):
|
||||
if scope == "post" and ":" not in item:
|
||||
items.append(item)
|
||||
elif item.startswith("%s:" % scope):
|
||||
items.append(item[len(scope) + 1:])
|
||||
items.append(item[len(scope) + 1 :])
|
||||
if not items:
|
||||
return items
|
||||
with util.cd(env.subst("$PROJECT_DIR")):
|
||||
return [realpath(item) for item in items]
|
||||
with fs.cd(env.subst("$PROJECT_DIR")):
|
||||
return [os.path.abspath(env.subst(item)) for item in items]
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(GetCompilerType)
|
||||
env.AddMethod(GetActualLDScript)
|
||||
env.AddMethod(ConfigureDebugTarget)
|
||||
env.AddMethod(GetExtraScripts)
|
||||
# bakward-compatibility with Zephyr build script
|
||||
env.AddMethod(ConfigureDebugTarget, "ConfigureDebugFlags")
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(ConvertInoToCpp)
|
||||
env.AddMethod(GetCompilerType)
|
||||
env.AddMethod(GetActualLDScript)
|
||||
env.AddMethod(VerboseAction)
|
||||
env.AddMethod(PioClean)
|
||||
env.AddMethod(ProcessDebug)
|
||||
env.AddMethod(ProcessTest)
|
||||
env.AddMethod(GetExtraScripts)
|
||||
return env
|
||||
|
@ -12,171 +12,230 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import base64
|
||||
import os
|
||||
import sys
|
||||
from os.path import isdir, isfile, join
|
||||
|
||||
from SCons.Script import COMMAND_LINE_TARGETS
|
||||
from SCons.Script import ARGUMENTS # pylint: disable=import-error
|
||||
from SCons.Script import COMMAND_LINE_TARGETS # pylint: disable=import-error
|
||||
from SCons.Script import DefaultEnvironment # pylint: disable=import-error
|
||||
|
||||
from platformio import exception, util
|
||||
from platformio.managers.platform import PlatformFactory
|
||||
from platformio import fs, util
|
||||
from platformio.compat import IS_MACOS, IS_WINDOWS
|
||||
from platformio.package.meta import PackageItem
|
||||
from platformio.package.version import get_original_version
|
||||
from platformio.platform.exception import UnknownBoard
|
||||
from platformio.platform.factory import PlatformFactory
|
||||
from platformio.project.config import ProjectOptions
|
||||
|
||||
# pylint: disable=too-many-branches, too-many-locals
|
||||
|
||||
|
||||
@util.memoized()
|
||||
def initPioPlatform(name):
|
||||
return PlatformFactory.newPlatform(name)
|
||||
def _PioPlatform():
|
||||
env = DefaultEnvironment()
|
||||
return PlatformFactory.from_env(env["PIOENV"], targets=COMMAND_LINE_TARGETS)
|
||||
|
||||
|
||||
def PioPlatform(env):
|
||||
variables = {}
|
||||
for name in env['PIOVARIABLES']:
|
||||
if name in env:
|
||||
variables[name.lower()] = env[name]
|
||||
p = initPioPlatform(env['PLATFORM_MANIFEST'])
|
||||
p.configure_default_packages(variables, COMMAND_LINE_TARGETS)
|
||||
return p
|
||||
def PioPlatform(_):
|
||||
return _PioPlatform()
|
||||
|
||||
|
||||
def BoardConfig(env, board=None):
|
||||
p = initPioPlatform(env['PLATFORM_MANIFEST'])
|
||||
try:
|
||||
board = board or env.get("BOARD")
|
||||
assert board, "BoardConfig: Board is not defined"
|
||||
config = p.board_config(board)
|
||||
except (AssertionError, exception.UnknownBoard) as e:
|
||||
sys.stderr.write("Error: %s\n" % str(e))
|
||||
env.Exit(1)
|
||||
return config
|
||||
with fs.cd(env.subst("$PROJECT_DIR")):
|
||||
try:
|
||||
p = env.PioPlatform()
|
||||
board = board or env.get("BOARD")
|
||||
assert board, "BoardConfig: Board is not defined"
|
||||
return p.board_config(board)
|
||||
except (AssertionError, UnknownBoard) as exc:
|
||||
sys.stderr.write("Error: %s\n" % str(exc))
|
||||
env.Exit(1)
|
||||
return None
|
||||
|
||||
|
||||
def GetFrameworkScript(env, framework):
|
||||
p = env.PioPlatform()
|
||||
assert p.frameworks and framework in p.frameworks
|
||||
script_path = env.subst(p.frameworks[framework]['script'])
|
||||
if not isfile(script_path):
|
||||
script_path = join(p.get_dir(), script_path)
|
||||
script_path = env.subst(p.frameworks[framework]["script"])
|
||||
if not os.path.isfile(script_path):
|
||||
script_path = os.path.join(p.get_dir(), script_path)
|
||||
return script_path
|
||||
|
||||
|
||||
def LoadPioPlatform(env, variables):
|
||||
def LoadPioPlatform(env):
|
||||
p = env.PioPlatform()
|
||||
installed_packages = p.get_installed_packages()
|
||||
|
||||
# Ensure real platform name
|
||||
env['PIOPLATFORM'] = p.name
|
||||
env["PIOPLATFORM"] = p.name
|
||||
|
||||
# Add toolchains and uploaders to $PATH and $*_LIBRARY_PATH
|
||||
systype = util.get_systype()
|
||||
for name in installed_packages:
|
||||
type_ = p.get_package_type(name)
|
||||
for pkg in p.get_installed_packages():
|
||||
type_ = p.get_package_type(pkg.metadata.name)
|
||||
if type_ not in ("toolchain", "uploader", "debugger"):
|
||||
continue
|
||||
pkg_dir = p.get_package_dir(name)
|
||||
env.PrependENVPath(
|
||||
"PATH",
|
||||
join(pkg_dir, "bin") if isdir(join(pkg_dir, "bin")) else pkg_dir)
|
||||
if ("windows" not in systype and isdir(join(pkg_dir, "lib"))
|
||||
and type_ != "toolchain"):
|
||||
(
|
||||
os.path.join(pkg.path, "bin")
|
||||
if os.path.isdir(os.path.join(pkg.path, "bin"))
|
||||
else pkg.path
|
||||
),
|
||||
)
|
||||
if (
|
||||
not IS_WINDOWS
|
||||
and os.path.isdir(os.path.join(pkg.path, "lib"))
|
||||
and type_ != "toolchain"
|
||||
):
|
||||
env.PrependENVPath(
|
||||
"DYLD_LIBRARY_PATH"
|
||||
if "darwin" in systype else "LD_LIBRARY_PATH",
|
||||
join(pkg_dir, "lib"))
|
||||
"DYLD_LIBRARY_PATH" if IS_MACOS else "LD_LIBRARY_PATH",
|
||||
os.path.join(pkg.path, "lib"),
|
||||
)
|
||||
|
||||
# Platform specific LD Scripts
|
||||
if isdir(join(p.get_dir(), "ldscripts")):
|
||||
env.Prepend(LIBPATH=[join(p.get_dir(), "ldscripts")])
|
||||
if os.path.isdir(os.path.join(p.get_dir(), "ldscripts")):
|
||||
env.Prepend(LIBPATH=[os.path.join(p.get_dir(), "ldscripts")])
|
||||
|
||||
if "BOARD" not in env:
|
||||
# handle _MCU and _F_CPU variables for AVR native
|
||||
for key, value in variables.UnknownVariables().items():
|
||||
if not key.startswith("BOARD_"):
|
||||
continue
|
||||
env.Replace(**{
|
||||
key.upper().replace("BUILD.", ""):
|
||||
base64.b64decode(value)
|
||||
})
|
||||
return
|
||||
|
||||
# update board manifest with a custom data
|
||||
# update board manifest with overridden data from INI config
|
||||
board_config = env.BoardConfig()
|
||||
for key, value in variables.UnknownVariables().items():
|
||||
if not key.startswith("BOARD_"):
|
||||
for option, value in env.GetProjectOptions():
|
||||
if not option.startswith("board_"):
|
||||
continue
|
||||
board_config.update(key.lower()[6:], base64.b64decode(value))
|
||||
option = option.lower()[6:]
|
||||
try:
|
||||
if isinstance(board_config.get(option), bool):
|
||||
value = str(value).lower() in ("1", "yes", "true")
|
||||
elif isinstance(board_config.get(option), int):
|
||||
value = int(value)
|
||||
except KeyError:
|
||||
pass
|
||||
board_config.update(option, value)
|
||||
|
||||
# update default environment variables
|
||||
for key in variables.keys():
|
||||
if key in env or \
|
||||
not any([key.startswith("BOARD_"), key.startswith("UPLOAD_")]):
|
||||
# load default variables from board config
|
||||
for option_meta in ProjectOptions.values():
|
||||
if not option_meta.buildenvvar or option_meta.buildenvvar in env:
|
||||
continue
|
||||
_opt, _val = key.lower().split("_", 1)
|
||||
if _opt == "board":
|
||||
_opt = "build"
|
||||
if _val in board_config.get(_opt):
|
||||
env.Replace(**{key: board_config.get("%s.%s" % (_opt, _val))})
|
||||
data_path = (
|
||||
option_meta.name[6:]
|
||||
if option_meta.name.startswith("board_")
|
||||
else option_meta.name.replace("_", ".")
|
||||
)
|
||||
try:
|
||||
env[option_meta.buildenvvar] = board_config.get(data_path)
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
if "build.ldscript" in board_config:
|
||||
env.Replace(LDSCRIPT_PATH=board_config.get("build.ldscript"))
|
||||
|
||||
|
||||
def PrintConfiguration(env):
|
||||
def PrintConfiguration(env): # pylint: disable=too-many-statements
|
||||
platform = env.PioPlatform()
|
||||
platform_data = ["PLATFORM: %s >" % platform.title]
|
||||
hardware_data = ["HARDWARE:"]
|
||||
configuration_data = ["CONFIGURATION:"]
|
||||
mcu = env.subst("$BOARD_MCU")
|
||||
f_cpu = env.subst("$BOARD_F_CPU")
|
||||
if mcu:
|
||||
hardware_data.append(mcu.upper())
|
||||
if f_cpu:
|
||||
f_cpu = int("".join([c for c in str(f_cpu) if c.isdigit()]))
|
||||
hardware_data.append("%dMHz" % (f_cpu / 1000000))
|
||||
pkg_metadata = PackageItem(platform.get_dir()).metadata
|
||||
board_config = env.BoardConfig() if "BOARD" in env else None
|
||||
|
||||
debug_tools = None
|
||||
if "BOARD" in env:
|
||||
board_config = env.BoardConfig()
|
||||
platform_data.append(board_config.get("name"))
|
||||
def _get_configuration_data():
|
||||
return (
|
||||
None
|
||||
if not board_config
|
||||
else [
|
||||
"CONFIGURATION:",
|
||||
"https://docs.platformio.org/page/boards/%s/%s.html"
|
||||
% (platform.name, board_config.id),
|
||||
]
|
||||
)
|
||||
|
||||
debug_tools = board_config.get("debug", {}).get("tools")
|
||||
def _get_plaform_data():
|
||||
data = [
|
||||
"PLATFORM: %s (%s)"
|
||||
% (
|
||||
platform.title,
|
||||
pkg_metadata.version if pkg_metadata else platform.version,
|
||||
)
|
||||
]
|
||||
if (
|
||||
int(ARGUMENTS.get("PIOVERBOSE", 0))
|
||||
and pkg_metadata
|
||||
and pkg_metadata.spec.external
|
||||
):
|
||||
data.append("(%s)" % pkg_metadata.spec.uri)
|
||||
if board_config:
|
||||
data.extend([">", board_config.get("name")])
|
||||
return data
|
||||
|
||||
def _get_hardware_data():
|
||||
data = ["HARDWARE:"]
|
||||
mcu = env.subst("$BOARD_MCU")
|
||||
f_cpu = env.subst("$BOARD_F_CPU")
|
||||
if mcu:
|
||||
data.append(mcu.upper())
|
||||
if f_cpu:
|
||||
f_cpu = int("".join([c for c in str(f_cpu) if c.isdigit()]))
|
||||
data.append("%dMHz," % (f_cpu / 1000000))
|
||||
if not board_config:
|
||||
return data
|
||||
ram = board_config.get("upload", {}).get("maximum_ram_size")
|
||||
flash = board_config.get("upload", {}).get("maximum_size")
|
||||
hardware_data.append(
|
||||
"%s RAM (%s Flash)" % (util.format_filesize(ram),
|
||||
util.format_filesize(flash)))
|
||||
configuration_data.append(
|
||||
"https://docs.platformio.org/page/boards/%s/%s.html" %
|
||||
(platform.name, board_config.id))
|
||||
data.append(
|
||||
"%s RAM, %s Flash"
|
||||
% (fs.humanize_file_size(ram), fs.humanize_file_size(flash))
|
||||
)
|
||||
return data
|
||||
|
||||
for data in (configuration_data, platform_data, hardware_data):
|
||||
if len(data) > 1:
|
||||
def _get_debug_data():
|
||||
debug_tools = (
|
||||
board_config.get("debug", {}).get("tools") if board_config else None
|
||||
)
|
||||
if not debug_tools:
|
||||
return None
|
||||
data = [
|
||||
"DEBUG:",
|
||||
"Current",
|
||||
"(%s)"
|
||||
% board_config.get_debug_tool_name(env.GetProjectOption("debug_tool")),
|
||||
]
|
||||
onboard = []
|
||||
external = []
|
||||
for key, value in debug_tools.items():
|
||||
if value.get("onboard"):
|
||||
onboard.append(key)
|
||||
else:
|
||||
external.append(key)
|
||||
if onboard:
|
||||
data.extend(["On-board", "(%s)" % ", ".join(sorted(onboard))])
|
||||
if external:
|
||||
data.extend(["External", "(%s)" % ", ".join(sorted(external))])
|
||||
return data
|
||||
|
||||
def _get_packages_data():
|
||||
data = []
|
||||
for item in platform.dump_used_packages():
|
||||
original_version = get_original_version(item["version"])
|
||||
info = "%s @ %s" % (item["name"], item["version"])
|
||||
extra = []
|
||||
if original_version:
|
||||
extra.append(original_version)
|
||||
if "src_url" in item and int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
extra.append(item["src_url"])
|
||||
if extra:
|
||||
info += " (%s)" % ", ".join(extra)
|
||||
data.append(info)
|
||||
if not data:
|
||||
return None
|
||||
return ["PACKAGES:"] + ["\n - %s" % d for d in sorted(data)]
|
||||
|
||||
for data in (
|
||||
_get_configuration_data(),
|
||||
_get_plaform_data(),
|
||||
_get_hardware_data(),
|
||||
_get_debug_data(),
|
||||
_get_packages_data(),
|
||||
):
|
||||
if data and len(data) > 1:
|
||||
print(" ".join(data))
|
||||
|
||||
# Debugging
|
||||
if not debug_tools:
|
||||
return
|
||||
|
||||
data = [
|
||||
"CURRENT(%s)" % board_config.get_debug_tool_name(
|
||||
env.subst("$DEBUG_TOOL"))
|
||||
]
|
||||
onboard = []
|
||||
external = []
|
||||
for key, value in debug_tools.items():
|
||||
if value.get("onboard"):
|
||||
onboard.append(key)
|
||||
else:
|
||||
external.append(key)
|
||||
if onboard:
|
||||
data.append("ON-BOARD(%s)" % ", ".join(sorted(onboard)))
|
||||
if external:
|
||||
data.append("EXTERNAL(%s)" % ", ".join(sorted(external)))
|
||||
|
||||
print("DEBUG: %s" % " ".join(data))
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
||||
|
54
platformio/builder/tools/pioproject.py
Normal file
54
platformio/builder/tools/pioproject.py
Normal file
@ -0,0 +1,54 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from platformio.compat import MISSING
|
||||
from platformio.project.config import ProjectConfig
|
||||
|
||||
|
||||
def GetProjectConfig(env):
|
||||
return ProjectConfig.get_instance(env["PROJECT_CONFIG"])
|
||||
|
||||
|
||||
def GetProjectOptions(env, as_dict=False):
|
||||
return env.GetProjectConfig().items(env=env["PIOENV"], as_dict=as_dict)
|
||||
|
||||
|
||||
def GetProjectOption(env, option, default=MISSING):
|
||||
return env.GetProjectConfig().get("env:" + env["PIOENV"], option, default)
|
||||
|
||||
|
||||
def LoadProjectOptions(env):
|
||||
config = env.GetProjectConfig()
|
||||
section = "env:" + env["PIOENV"]
|
||||
for option in config.options(section):
|
||||
option_meta = config.find_option_meta(section, option)
|
||||
if (
|
||||
not option_meta
|
||||
or not option_meta.buildenvvar
|
||||
or option_meta.buildenvvar in env
|
||||
):
|
||||
continue
|
||||
env[option_meta.buildenvvar] = config.get(section, option)
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(GetProjectConfig)
|
||||
env.AddMethod(GetProjectOptions)
|
||||
env.AddMethod(GetProjectOption)
|
||||
env.AddMethod(LoadProjectOptions)
|
||||
return env
|
266
platformio/builder/tools/piosize.py
Normal file
266
platformio/builder/tools/piosize.py
Normal file
@ -0,0 +1,266 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=too-many-locals
|
||||
|
||||
import json
|
||||
import sys
|
||||
from os import environ, makedirs, remove
|
||||
from os.path import isdir, join, splitdrive
|
||||
|
||||
from elftools.elf.descriptions import describe_sh_flags
|
||||
from elftools.elf.elffile import ELFFile
|
||||
|
||||
from platformio.compat import IS_WINDOWS
|
||||
from platformio.proc import exec_command
|
||||
|
||||
|
||||
def _run_tool(cmd, env, tool_args):
|
||||
sysenv = environ.copy()
|
||||
sysenv["PATH"] = str(env["ENV"]["PATH"])
|
||||
|
||||
build_dir = env.subst("$BUILD_DIR")
|
||||
if not isdir(build_dir):
|
||||
makedirs(build_dir)
|
||||
tmp_file = join(build_dir, "size-data-longcmd.txt")
|
||||
|
||||
with open(tmp_file, mode="w", encoding="utf8") as fp:
|
||||
fp.write("\n".join(tool_args))
|
||||
|
||||
cmd.append("@" + tmp_file)
|
||||
result = exec_command(cmd, env=sysenv)
|
||||
remove(tmp_file)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _get_symbol_locations(env, elf_path, addrs):
|
||||
if not addrs:
|
||||
return {}
|
||||
cmd = [env.subst("$CC").replace("-gcc", "-addr2line"), "-e", elf_path]
|
||||
result = _run_tool(cmd, env, addrs)
|
||||
locations = [line for line in result["out"].split("\n") if line]
|
||||
assert len(addrs) == len(locations)
|
||||
|
||||
return dict(zip(addrs, [loc.strip() for loc in locations]))
|
||||
|
||||
|
||||
def _get_demangled_names(env, mangled_names):
|
||||
if not mangled_names:
|
||||
return {}
|
||||
result = _run_tool(
|
||||
[env.subst("$CC").replace("-gcc", "-c++filt")], env, mangled_names
|
||||
)
|
||||
demangled_names = [line for line in result["out"].split("\n") if line]
|
||||
assert len(mangled_names) == len(demangled_names)
|
||||
|
||||
return dict(
|
||||
zip(
|
||||
mangled_names,
|
||||
[dn.strip().replace("::__FUNCTION__", "") for dn in demangled_names],
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def _collect_sections_info(env, elffile):
|
||||
sections = {}
|
||||
for section in elffile.iter_sections():
|
||||
if section.is_null() or section.name.startswith(".debug"):
|
||||
continue
|
||||
|
||||
section_type = section["sh_type"]
|
||||
section_flags = describe_sh_flags(section["sh_flags"])
|
||||
section_size = section.data_size
|
||||
|
||||
section_data = {
|
||||
"name": section.name,
|
||||
"size": section_size,
|
||||
"start_addr": section["sh_addr"],
|
||||
"type": section_type,
|
||||
"flags": section_flags,
|
||||
}
|
||||
|
||||
sections[section.name] = section_data
|
||||
sections[section.name]["in_flash"] = env.pioSizeIsFlashSection(section_data)
|
||||
sections[section.name]["in_ram"] = env.pioSizeIsRamSection(section_data)
|
||||
|
||||
return sections
|
||||
|
||||
|
||||
def _collect_symbols_info(env, elffile, elf_path, sections):
|
||||
symbols = []
|
||||
|
||||
symbol_section = elffile.get_section_by_name(".symtab")
|
||||
if symbol_section.is_null():
|
||||
sys.stderr.write("Couldn't find symbol table. Is ELF file stripped?")
|
||||
env.Exit(1)
|
||||
|
||||
sysenv = environ.copy()
|
||||
sysenv["PATH"] = str(env["ENV"]["PATH"])
|
||||
|
||||
symbol_addrs = []
|
||||
mangled_names = []
|
||||
for s in symbol_section.iter_symbols():
|
||||
symbol_info = s.entry["st_info"]
|
||||
symbol_addr = s["st_value"]
|
||||
symbol_size = s["st_size"]
|
||||
symbol_type = symbol_info["type"]
|
||||
|
||||
if not env.pioSizeIsValidSymbol(s.name, symbol_type, symbol_addr):
|
||||
continue
|
||||
|
||||
symbol = {
|
||||
"addr": symbol_addr,
|
||||
"bind": symbol_info["bind"],
|
||||
"name": s.name,
|
||||
"type": symbol_type,
|
||||
"size": symbol_size,
|
||||
"section": env.pioSizeDetermineSection(sections, symbol_addr),
|
||||
}
|
||||
|
||||
if s.name.startswith("_Z"):
|
||||
mangled_names.append(s.name)
|
||||
|
||||
symbol_addrs.append(hex(symbol_addr))
|
||||
symbols.append(symbol)
|
||||
|
||||
symbol_locations = _get_symbol_locations(env, elf_path, symbol_addrs)
|
||||
demangled_names = _get_demangled_names(env, mangled_names)
|
||||
for symbol in symbols:
|
||||
if symbol["name"].startswith("_Z"):
|
||||
symbol["demangled_name"] = demangled_names.get(symbol["name"])
|
||||
location = symbol_locations.get(hex(symbol["addr"]))
|
||||
if not location or "?" in location:
|
||||
continue
|
||||
if IS_WINDOWS:
|
||||
drive, tail = splitdrive(location)
|
||||
location = join(drive.upper(), tail)
|
||||
symbol["file"] = location
|
||||
symbol["line"] = 0
|
||||
if ":" in location:
|
||||
file_, line = location.rsplit(":", 1)
|
||||
if line.isdigit():
|
||||
symbol["file"] = file_
|
||||
symbol["line"] = int(line)
|
||||
return symbols
|
||||
|
||||
|
||||
def pioSizeDetermineSection(_, sections, symbol_addr):
|
||||
for section, info in sections.items():
|
||||
if not info.get("in_flash", False) and not info.get("in_ram", False):
|
||||
continue
|
||||
if symbol_addr in range(info["start_addr"], info["start_addr"] + info["size"]):
|
||||
return section
|
||||
return "unknown"
|
||||
|
||||
|
||||
def pioSizeIsValidSymbol(_, symbol_name, symbol_type, symbol_address):
|
||||
return symbol_name and symbol_address != 0 and symbol_type != "STT_NOTYPE"
|
||||
|
||||
|
||||
def pioSizeIsRamSection(_, section):
|
||||
return (
|
||||
section.get("type", "") in ("SHT_NOBITS", "SHT_PROGBITS")
|
||||
and section.get("flags", "") == "WA"
|
||||
)
|
||||
|
||||
|
||||
def pioSizeIsFlashSection(_, section):
|
||||
return section.get("type", "") == "SHT_PROGBITS" and "A" in section.get("flags", "")
|
||||
|
||||
|
||||
def pioSizeCalculateFirmwareSize(_, sections):
|
||||
flash_size = ram_size = 0
|
||||
for section_info in sections.values():
|
||||
if section_info.get("in_flash", False):
|
||||
flash_size += section_info.get("size", 0)
|
||||
if section_info.get("in_ram", False):
|
||||
ram_size += section_info.get("size", 0)
|
||||
|
||||
return ram_size, flash_size
|
||||
|
||||
|
||||
def DumpSizeData(_, target, source, env): # pylint: disable=unused-argument
|
||||
data = {"device": {}, "memory": {}, "version": 1}
|
||||
|
||||
board = env.BoardConfig()
|
||||
if board:
|
||||
data["device"] = {
|
||||
"mcu": board.get("build.mcu", ""),
|
||||
"cpu": board.get("build.cpu", ""),
|
||||
"frequency": board.get("build.f_cpu"),
|
||||
"flash": int(board.get("upload.maximum_size", 0)),
|
||||
"ram": int(board.get("upload.maximum_ram_size", 0)),
|
||||
}
|
||||
if data["device"]["frequency"] and data["device"]["frequency"].endswith("L"):
|
||||
data["device"]["frequency"] = int(data["device"]["frequency"][0:-1])
|
||||
|
||||
elf_path = env.subst("$PIOMAINPROG")
|
||||
|
||||
with open(elf_path, "rb") as fp:
|
||||
elffile = ELFFile(fp)
|
||||
|
||||
if not elffile.has_dwarf_info():
|
||||
sys.stderr.write("Elf file doesn't contain DWARF information")
|
||||
env.Exit(1)
|
||||
|
||||
sections = _collect_sections_info(env, elffile)
|
||||
firmware_ram, firmware_flash = env.pioSizeCalculateFirmwareSize(sections)
|
||||
data["memory"]["total"] = {
|
||||
"ram_size": firmware_ram,
|
||||
"flash_size": firmware_flash,
|
||||
"sections": sections,
|
||||
}
|
||||
|
||||
files = {}
|
||||
for symbol in _collect_symbols_info(env, elffile, elf_path, sections):
|
||||
file_path = symbol.get("file") or "unknown"
|
||||
if not files.get(file_path, {}):
|
||||
files[file_path] = {"symbols": [], "ram_size": 0, "flash_size": 0}
|
||||
|
||||
symbol_size = symbol.get("size", 0)
|
||||
section = sections.get(symbol.get("section", ""), {})
|
||||
if not section:
|
||||
continue
|
||||
if section.get("in_ram", False):
|
||||
files[file_path]["ram_size"] += symbol_size
|
||||
if section.get("in_flash", False):
|
||||
files[file_path]["flash_size"] += symbol_size
|
||||
|
||||
files[file_path]["symbols"].append(symbol)
|
||||
|
||||
data["memory"]["files"] = []
|
||||
for k, v in files.items():
|
||||
file_data = {"path": k}
|
||||
file_data.update(v)
|
||||
data["memory"]["files"].append(file_data)
|
||||
|
||||
with open(
|
||||
join(env.subst("$BUILD_DIR"), "sizedata.json"), mode="w", encoding="utf8"
|
||||
) as fp:
|
||||
fp.write(json.dumps(data))
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(pioSizeIsRamSection)
|
||||
env.AddMethod(pioSizeIsFlashSection)
|
||||
env.AddMethod(pioSizeCalculateFirmwareSize)
|
||||
env.AddMethod(pioSizeDetermineSection)
|
||||
env.AddMethod(pioSizeIsValidSymbol)
|
||||
env.AddMethod(DumpSizeData)
|
||||
return env
|
116
platformio/builder/tools/piotarget.py
Normal file
116
platformio/builder/tools/piotarget.py
Normal file
@ -0,0 +1,116 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
|
||||
from SCons.Action import Action # pylint: disable=import-error
|
||||
from SCons.Script import ARGUMENTS # pylint: disable=import-error
|
||||
from SCons.Script import AlwaysBuild # pylint: disable=import-error
|
||||
|
||||
from platformio import compat, fs
|
||||
|
||||
|
||||
def VerboseAction(_, act, actstr):
|
||||
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
return act
|
||||
return Action(act, actstr)
|
||||
|
||||
|
||||
def IsCleanTarget(env):
|
||||
return env.GetOption("clean")
|
||||
|
||||
|
||||
def CleanProject(env, fullclean=False):
|
||||
def _relpath(path):
|
||||
if compat.IS_WINDOWS:
|
||||
prefix = os.getcwd()[:2].lower()
|
||||
if (
|
||||
":" not in prefix
|
||||
or not path.lower().startswith(prefix)
|
||||
or os.path.relpath(path).startswith("..")
|
||||
):
|
||||
return path
|
||||
return os.path.relpath(path)
|
||||
|
||||
def _clean_dir(path):
|
||||
clean_rel_path = _relpath(path)
|
||||
print(f"Removing {clean_rel_path}")
|
||||
fs.rmtree(path)
|
||||
|
||||
build_dir = env.subst("$BUILD_DIR")
|
||||
libdeps_dir = env.subst(os.path.join("$PROJECT_LIBDEPS_DIR", "$PIOENV"))
|
||||
if os.path.isdir(build_dir):
|
||||
_clean_dir(build_dir)
|
||||
else:
|
||||
print("Build environment is clean")
|
||||
|
||||
if fullclean and os.path.isdir(libdeps_dir):
|
||||
_clean_dir(libdeps_dir)
|
||||
|
||||
print("Done cleaning")
|
||||
|
||||
|
||||
def AddTarget( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
env,
|
||||
name,
|
||||
dependencies,
|
||||
actions,
|
||||
title=None,
|
||||
description=None,
|
||||
group="General",
|
||||
always_build=True,
|
||||
):
|
||||
if "__PIO_TARGETS" not in env:
|
||||
env["__PIO_TARGETS"] = {}
|
||||
assert name not in env["__PIO_TARGETS"]
|
||||
env["__PIO_TARGETS"][name] = dict(
|
||||
name=name, title=title, description=description, group=group
|
||||
)
|
||||
target = env.Alias(name, dependencies, actions)
|
||||
if always_build:
|
||||
AlwaysBuild(target)
|
||||
return target
|
||||
|
||||
|
||||
def AddPlatformTarget(env, *args, **kwargs):
|
||||
return env.AddTarget(group="Platform", *args, **kwargs)
|
||||
|
||||
|
||||
def AddCustomTarget(env, *args, **kwargs):
|
||||
return env.AddTarget(group="Custom", *args, **kwargs)
|
||||
|
||||
|
||||
def DumpTargets(env):
|
||||
targets = env.get("__PIO_TARGETS") or {}
|
||||
# pre-fill default targets if embedded dev-platform
|
||||
if env.PioPlatform().is_embedded() and not any(
|
||||
t["group"] == "Platform" for t in targets.values()
|
||||
):
|
||||
targets["upload"] = dict(name="upload", group="Platform", title="Upload")
|
||||
return list(targets.values())
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(VerboseAction)
|
||||
env.AddMethod(IsCleanTarget)
|
||||
env.AddMethod(CleanProject)
|
||||
env.AddMethod(AddTarget)
|
||||
env.AddMethod(AddPlatformTarget)
|
||||
env.AddMethod(AddCustomTarget)
|
||||
env.AddMethod(DumpTargets)
|
||||
return env
|
61
platformio/builder/tools/piotest.py
Normal file
61
platformio/builder/tools/piotest.py
Normal file
@ -0,0 +1,61 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
|
||||
from platformio.builder.tools import piobuild
|
||||
from platformio.test.result import TestSuite
|
||||
from platformio.test.runners.factory import TestRunnerFactory
|
||||
|
||||
|
||||
def ConfigureTestTarget(env):
|
||||
env.Append(
|
||||
CPPDEFINES=["UNIT_TEST"], # deprecated, use PIO_UNIT_TESTING
|
||||
PIOTEST_SRC_FILTER=[f"+<*.{ext}>" for ext in piobuild.SRC_BUILD_EXT],
|
||||
)
|
||||
env.Prepend(CPPPATH=["$PROJECT_TEST_DIR"])
|
||||
|
||||
if "PIOTEST_RUNNING_NAME" in env:
|
||||
test_name = env["PIOTEST_RUNNING_NAME"]
|
||||
while True:
|
||||
test_name = os.path.dirname(test_name) # parent dir
|
||||
# skip nested tests (user's side issue?)
|
||||
if not test_name or os.path.basename(test_name).startswith("test_"):
|
||||
break
|
||||
env.Prepend(
|
||||
PIOTEST_SRC_FILTER=[
|
||||
f"+<{test_name}{os.path.sep}*.{ext}>"
|
||||
for ext in piobuild.SRC_BUILD_EXT
|
||||
],
|
||||
CPPPATH=[os.path.join("$PROJECT_TEST_DIR", test_name)],
|
||||
)
|
||||
|
||||
env.Prepend(
|
||||
PIOTEST_SRC_FILTER=[f"+<$PIOTEST_RUNNING_NAME{os.path.sep}>"],
|
||||
CPPPATH=[os.path.join("$PROJECT_TEST_DIR", "$PIOTEST_RUNNING_NAME")],
|
||||
)
|
||||
|
||||
test_runner = TestRunnerFactory.new(
|
||||
TestSuite(env["PIOENV"], env.get("PIOTEST_RUNNING_NAME", "*")),
|
||||
env.GetProjectConfig(),
|
||||
)
|
||||
test_runner.configure_build_env(env)
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(ConfigureTestTarget)
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
@ -12,22 +12,21 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
# pylint: disable=unused-argument
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from fnmatch import fnmatch
|
||||
from os import environ
|
||||
from os.path import isfile, join
|
||||
from shutil import copyfile
|
||||
from time import sleep
|
||||
|
||||
from SCons.Script import ARGUMENTS
|
||||
from SCons.Script import ARGUMENTS # pylint: disable=import-error
|
||||
from serial import Serial, SerialException
|
||||
|
||||
from platformio import exception, util
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
from platformio import exception, fs
|
||||
from platformio.device.finder import SerialPortFinder, find_mbed_disk, is_pattern_port
|
||||
from platformio.device.list.util import list_serial_ports
|
||||
from platformio.proc import exec_command
|
||||
|
||||
|
||||
def FlushSerialBuffer(env, port):
|
||||
@ -58,9 +57,9 @@ def WaitForNewSerialPort(env, before):
|
||||
prev_port = env.subst("$UPLOAD_PORT")
|
||||
new_port = None
|
||||
elapsed = 0
|
||||
before = [p['port'] for p in before]
|
||||
before = [p["port"] for p in before]
|
||||
while elapsed < 5 and new_port is None:
|
||||
now = [p['port'] for p in util.get_serial_ports()]
|
||||
now = [p["port"] for p in list_serial_ports()]
|
||||
for p in now:
|
||||
if p not in before:
|
||||
new_port = p
|
||||
@ -82,10 +81,12 @@ def WaitForNewSerialPort(env, before):
|
||||
sleep(1)
|
||||
|
||||
if not new_port:
|
||||
sys.stderr.write("Error: Couldn't find a board on the selected port. "
|
||||
"Check that you have the correct port selected. "
|
||||
"If it is correct, try pressing the board's reset "
|
||||
"button after initiating the upload.\n")
|
||||
sys.stderr.write(
|
||||
"Error: Couldn't find a board on the selected port. "
|
||||
"Check that you have the correct port selected. "
|
||||
"If it is correct, try pressing the board's reset "
|
||||
"button after initiating the upload.\n"
|
||||
)
|
||||
env.Exit(1)
|
||||
|
||||
return new_port
|
||||
@ -93,72 +94,29 @@ def WaitForNewSerialPort(env, before):
|
||||
|
||||
def AutodetectUploadPort(*args, **kwargs):
|
||||
env = args[0]
|
||||
|
||||
def _get_pattern():
|
||||
if "UPLOAD_PORT" not in env:
|
||||
return None
|
||||
if set(["*", "?", "[", "]"]) & set(env['UPLOAD_PORT']):
|
||||
return env['UPLOAD_PORT']
|
||||
return None
|
||||
|
||||
def _is_match_pattern(port):
|
||||
pattern = _get_pattern()
|
||||
if not pattern:
|
||||
return True
|
||||
return fnmatch(port, pattern)
|
||||
|
||||
def _look_for_mbed_disk():
|
||||
msdlabels = ("mbed", "nucleo", "frdm", "microbit")
|
||||
for item in util.get_logical_devices():
|
||||
if item['path'].startswith("/net") or not _is_match_pattern(
|
||||
item['path']):
|
||||
continue
|
||||
mbed_pages = [
|
||||
join(item['path'], n) for n in ("mbed.htm", "mbed.html")
|
||||
]
|
||||
if any(isfile(p) for p in mbed_pages):
|
||||
return item['path']
|
||||
if item['name'] \
|
||||
and any(l in item['name'].lower() for l in msdlabels):
|
||||
return item['path']
|
||||
return None
|
||||
|
||||
def _look_for_serial_port():
|
||||
port = None
|
||||
board_hwids = []
|
||||
upload_protocol = env.subst("$UPLOAD_PROTOCOL")
|
||||
if "BOARD" in env and "build.hwids" in env.BoardConfig():
|
||||
board_hwids = env.BoardConfig().get("build.hwids")
|
||||
for item in util.get_serial_ports(filter_hwid=True):
|
||||
if not _is_match_pattern(item['port']):
|
||||
continue
|
||||
port = item['port']
|
||||
if upload_protocol.startswith("blackmagic"):
|
||||
if "windows" in util.get_systype() and \
|
||||
port.startswith("COM") and len(port) > 4:
|
||||
port = "\\\\.\\%s" % port
|
||||
if "GDB" in item['description']:
|
||||
return port
|
||||
for hwid in board_hwids:
|
||||
hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "")
|
||||
if hwid_str in item['hwid']:
|
||||
return port
|
||||
return port
|
||||
|
||||
if "UPLOAD_PORT" in env and not _get_pattern():
|
||||
print(env.subst("Use manually specified: $UPLOAD_PORT"))
|
||||
initial_port = env.subst("$UPLOAD_PORT")
|
||||
upload_protocol = env.subst("$UPLOAD_PROTOCOL")
|
||||
if initial_port and not is_pattern_port(initial_port):
|
||||
print(env.subst("Using manually specified: $UPLOAD_PORT"))
|
||||
return
|
||||
|
||||
if (env.subst("$UPLOAD_PROTOCOL") == "mbed"
|
||||
or ("mbed" in env.subst("$PIOFRAMEWORK")
|
||||
and not env.subst("$UPLOAD_PROTOCOL"))):
|
||||
env.Replace(UPLOAD_PORT=_look_for_mbed_disk())
|
||||
if upload_protocol == "mbed" or (
|
||||
"mbed" in env.subst("$PIOFRAMEWORK") and not upload_protocol
|
||||
):
|
||||
env.Replace(UPLOAD_PORT=find_mbed_disk(initial_port))
|
||||
else:
|
||||
try:
|
||||
util.ensure_udev_rules()
|
||||
except exception.InvalidUdevRules as e:
|
||||
sys.stderr.write("\n%s\n\n" % e)
|
||||
env.Replace(UPLOAD_PORT=_look_for_serial_port())
|
||||
fs.ensure_udev_rules()
|
||||
except exception.InvalidUdevRules as exc:
|
||||
sys.stderr.write("\n%s\n\n" % exc)
|
||||
env.Replace(
|
||||
UPLOAD_PORT=SerialPortFinder(
|
||||
board_config=env.BoardConfig() if "BOARD" in env else None,
|
||||
upload_protocol=upload_protocol,
|
||||
prefer_gdb_port="blackmagic" in upload_protocol,
|
||||
verbose=int(ARGUMENTS.get("PIOVERBOSE", 0)),
|
||||
).find(initial_port)
|
||||
)
|
||||
|
||||
if env.subst("$UPLOAD_PORT"):
|
||||
print(env.subst("Auto-detected: $UPLOAD_PORT"))
|
||||
@ -167,7 +125,8 @@ def AutodetectUploadPort(*args, **kwargs):
|
||||
"Error: Please specify `upload_port` for environment or use "
|
||||
"global `--upload-port` option.\n"
|
||||
"For some development platforms it can be a USB flash "
|
||||
"drive (i.e. /media/<user>/<device name>)\n")
|
||||
"drive (i.e. /media/<user>/<device name>)\n"
|
||||
)
|
||||
env.Exit(1)
|
||||
|
||||
|
||||
@ -175,19 +134,22 @@ def UploadToDisk(_, target, source, env):
|
||||
assert "UPLOAD_PORT" in env
|
||||
progname = env.subst("$PROGNAME")
|
||||
for ext in ("bin", "hex"):
|
||||
fpath = join(env.subst("$BUILD_DIR"), "%s.%s" % (progname, ext))
|
||||
if not isfile(fpath):
|
||||
fpath = os.path.join(env.subst("$BUILD_DIR"), "%s.%s" % (progname, ext))
|
||||
if not os.path.isfile(fpath):
|
||||
continue
|
||||
copyfile(fpath,
|
||||
join(env.subst("$UPLOAD_PORT"), "%s.%s" % (progname, ext)))
|
||||
print("Firmware has been successfully uploaded.\n"
|
||||
"(Some boards may require manual hard reset)")
|
||||
copyfile(
|
||||
fpath, os.path.join(env.subst("$UPLOAD_PORT"), "%s.%s" % (progname, ext))
|
||||
)
|
||||
print(
|
||||
"Firmware has been successfully uploaded.\n"
|
||||
"(Some boards may require manual hard reset)"
|
||||
)
|
||||
|
||||
|
||||
def CheckUploadSize(_, target, source, env):
|
||||
check_conditions = [
|
||||
env.get("BOARD"),
|
||||
env.get("SIZETOOL") or env.get("SIZECHECKCMD")
|
||||
env.get("SIZETOOL") or env.get("SIZECHECKCMD"),
|
||||
]
|
||||
if not all(check_conditions):
|
||||
return
|
||||
@ -200,7 +162,8 @@ def CheckUploadSize(_, target, source, env):
|
||||
env.Replace(
|
||||
SIZECHECKCMD="$SIZETOOL -B -d $SOURCES",
|
||||
SIZEPROGREGEXP=r"^(\d+)\s+(\d+)\s+\d+\s",
|
||||
SIZEDATAREGEXP=r"^\d+\s+(\d+)\s+(\d+)\s+\d+")
|
||||
SIZEDATAREGEXP=r"^\d+\s+(\d+)\s+(\d+)\s+\d+",
|
||||
)
|
||||
|
||||
def _get_size_output():
|
||||
cmd = env.get("SIZECHECKCMD")
|
||||
@ -209,12 +172,12 @@ def CheckUploadSize(_, target, source, env):
|
||||
if not isinstance(cmd, list):
|
||||
cmd = cmd.split()
|
||||
cmd = [arg.replace("$SOURCES", str(source[0])) for arg in cmd if arg]
|
||||
sysenv = environ.copy()
|
||||
sysenv['PATH'] = str(env['ENV']['PATH'])
|
||||
result = util.exec_command(env.subst(cmd), env=sysenv)
|
||||
if result['returncode'] != 0:
|
||||
sysenv = os.environ.copy()
|
||||
sysenv["PATH"] = str(env["ENV"]["PATH"])
|
||||
result = exec_command(env.subst(cmd), env=sysenv)
|
||||
if result["returncode"] != 0:
|
||||
return None
|
||||
return result['out'].strip()
|
||||
return result["out"].strip()
|
||||
|
||||
def _calculate_size(output, pattern):
|
||||
if not output or not pattern:
|
||||
@ -234,11 +197,12 @@ def CheckUploadSize(_, target, source, env):
|
||||
def _format_availale_bytes(value, total):
|
||||
percent_raw = float(value) / float(total)
|
||||
blocks_per_progress = 10
|
||||
used_blocks = int(round(blocks_per_progress * percent_raw))
|
||||
if used_blocks > blocks_per_progress:
|
||||
used_blocks = blocks_per_progress
|
||||
used_blocks = min(
|
||||
int(round(blocks_per_progress * percent_raw)), blocks_per_progress
|
||||
)
|
||||
return "[{:{}}] {: 6.1%} (used {:d} bytes from {:d} bytes)".format(
|
||||
"=" * used_blocks, blocks_per_progress, percent_raw, value, total)
|
||||
"=" * used_blocks, blocks_per_progress, percent_raw, value, total
|
||||
)
|
||||
|
||||
if not env.get("SIZECHECKCMD") and not env.get("SIZEPROGREGEXP"):
|
||||
_configure_defaults()
|
||||
@ -246,25 +210,24 @@ def CheckUploadSize(_, target, source, env):
|
||||
program_size = _calculate_size(output, env.get("SIZEPROGREGEXP"))
|
||||
data_size = _calculate_size(output, env.get("SIZEDATAREGEXP"))
|
||||
|
||||
print("Memory Usage -> http://bit.ly/pio-memory-usage")
|
||||
print('Advanced Memory Usage is available via "PlatformIO Home > Project Inspect"')
|
||||
if data_max_size and data_size > -1:
|
||||
print("DATA: %s" % _format_availale_bytes(data_size, data_max_size))
|
||||
print("RAM: %s" % _format_availale_bytes(data_size, data_max_size))
|
||||
if program_size > -1:
|
||||
print("PROGRAM: %s" % _format_availale_bytes(program_size,
|
||||
program_max_size))
|
||||
print("Flash: %s" % _format_availale_bytes(program_size, program_max_size))
|
||||
if int(ARGUMENTS.get("PIOVERBOSE", 0)):
|
||||
print(output)
|
||||
|
||||
# raise error
|
||||
# if data_max_size and data_size > data_max_size:
|
||||
# sys.stderr.write(
|
||||
# "Error: The data size (%d bytes) is greater "
|
||||
# "than maximum allowed (%s bytes)\n" % (data_size, data_max_size))
|
||||
# env.Exit(1)
|
||||
if data_max_size and data_size > data_max_size:
|
||||
sys.stderr.write(
|
||||
"Warning! The data size (%d bytes) is greater "
|
||||
"than maximum allowed (%s bytes)\n" % (data_size, data_max_size)
|
||||
)
|
||||
if program_size > program_max_size:
|
||||
sys.stderr.write("Error: The program size (%d bytes) is greater "
|
||||
"than maximum allowed (%s bytes)\n" %
|
||||
(program_size, program_max_size))
|
||||
sys.stderr.write(
|
||||
"Error: The program size (%d bytes) is greater "
|
||||
"than maximum allowed (%s bytes)\n" % (program_size, program_max_size)
|
||||
)
|
||||
env.Exit(1)
|
||||
|
||||
|
||||
@ -272,8 +235,7 @@ def PrintUploadInfo(env):
|
||||
configured = env.subst("$UPLOAD_PROTOCOL")
|
||||
available = [configured] if configured else []
|
||||
if "BOARD" in env:
|
||||
available.extend(env.BoardConfig().get("upload", {}).get(
|
||||
"protocols", []))
|
||||
available.extend(env.BoardConfig().get("upload", {}).get("protocols", []))
|
||||
if available:
|
||||
print("AVAILABLE: %s" % ", ".join(sorted(set(available))))
|
||||
if configured:
|
||||
|
@ -1,88 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from hashlib import md5
|
||||
from os import makedirs
|
||||
from os.path import isdir, isfile, join
|
||||
from platform import system
|
||||
|
||||
# Windows CLI has limit with command length to 8192
|
||||
# Leave 2000 chars for flags and other options
|
||||
MAX_SOURCES_LENGTH = 6000
|
||||
|
||||
|
||||
def long_sources_hook(env, sources):
|
||||
_sources = str(sources).replace("\\", "/")
|
||||
if len(str(_sources)) < MAX_SOURCES_LENGTH:
|
||||
return sources
|
||||
|
||||
# fix space in paths
|
||||
data = []
|
||||
for line in _sources.split(".o "):
|
||||
line = line.strip()
|
||||
if not line.endswith(".o"):
|
||||
line += ".o"
|
||||
data.append('"%s"' % line)
|
||||
|
||||
return '@"%s"' % _file_long_data(env, " ".join(data))
|
||||
|
||||
|
||||
def long_incflags_hook(env, incflags):
|
||||
_incflags = env.subst(incflags).replace("\\", "/")
|
||||
if len(_incflags) < MAX_SOURCES_LENGTH:
|
||||
return incflags
|
||||
|
||||
# fix space in paths
|
||||
data = []
|
||||
for line in _incflags.split(" -I"):
|
||||
line = line.strip()
|
||||
if not line.startswith("-I"):
|
||||
line = "-I" + line
|
||||
data.append('-I"%s"' % line[2:])
|
||||
|
||||
return '@"%s"' % _file_long_data(env, " ".join(data))
|
||||
|
||||
|
||||
def _file_long_data(env, data):
|
||||
build_dir = env.subst("$BUILD_DIR")
|
||||
if not isdir(build_dir):
|
||||
makedirs(build_dir)
|
||||
tmp_file = join(build_dir, "longcmd-%s" % md5(data).hexdigest())
|
||||
if isfile(tmp_file):
|
||||
return tmp_file
|
||||
with open(tmp_file, "w") as fp:
|
||||
fp.write(data)
|
||||
return tmp_file
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
||||
|
||||
|
||||
def generate(env):
|
||||
if system() != "Windows":
|
||||
return None
|
||||
|
||||
env.Replace(_long_sources_hook=long_sources_hook)
|
||||
env.Replace(_long_incflags_hook=long_incflags_hook)
|
||||
coms = {}
|
||||
for key in ("ARCOM", "LINKCOM"):
|
||||
coms[key] = env.get(key, "").replace(
|
||||
"$SOURCES", "${_long_sources_hook(__env__, SOURCES)}")
|
||||
for key in ("_CCCOMCOM", "ASPPCOM"):
|
||||
coms[key] = env.get(key, "").replace(
|
||||
"$_CPPINCFLAGS", "${_long_incflags_hook(__env__, _CPPINCFLAGS)}")
|
||||
env.Replace(**coms)
|
||||
|
||||
return env
|
@ -1,354 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import re
|
||||
import sys
|
||||
from glob import glob
|
||||
from os import sep, walk
|
||||
from os.path import basename, dirname, isdir, join, realpath
|
||||
|
||||
from SCons import Builder, Util
|
||||
from SCons.Script import (COMMAND_LINE_TARGETS, AlwaysBuild,
|
||||
DefaultEnvironment, Export, SConscript)
|
||||
|
||||
from platformio.util import glob_escape, pioversion_to_intstr
|
||||
|
||||
SRC_HEADER_EXT = ["h", "hpp"]
|
||||
SRC_C_EXT = ["c", "cc", "cpp"]
|
||||
SRC_BUILD_EXT = SRC_C_EXT + ["S", "spp", "SPP", "sx", "s", "asm", "ASM"]
|
||||
SRC_FILTER_DEFAULT = ["+<*>", "-<.git%s>" % sep, "-<svn%s>" % sep]
|
||||
SRC_FILTER_PATTERNS_RE = re.compile(r"(\+|\-)<([^>]+)>")
|
||||
|
||||
|
||||
def scons_patched_match_splitext(path, suffixes=None):
|
||||
"""Patch SCons Builder, append $OBJSUFFIX to the end of each target"""
|
||||
tokens = Util.splitext(path)
|
||||
if suffixes and tokens[1] and tokens[1] in suffixes:
|
||||
return (path, tokens[1])
|
||||
return tokens
|
||||
|
||||
|
||||
def _build_project_deps(env):
|
||||
project_lib_builder = env.ConfigureProjectLibBuilder()
|
||||
|
||||
# prepend project libs to the beginning of list
|
||||
env.Prepend(LIBS=project_lib_builder.build())
|
||||
# prepend extra linker related options from libs
|
||||
env.PrependUnique(
|
||||
**{
|
||||
key: project_lib_builder.env.get(key)
|
||||
for key in ("LIBS", "LIBPATH", "LINKFLAGS")
|
||||
if project_lib_builder.env.get(key)
|
||||
})
|
||||
|
||||
projenv = env.Clone()
|
||||
|
||||
# CPPPATH from dependencies
|
||||
projenv.PrependUnique(CPPPATH=project_lib_builder.env.get("CPPPATH"))
|
||||
# extra build flags from `platformio.ini`
|
||||
projenv.ProcessFlags(env.get("SRC_BUILD_FLAGS"))
|
||||
|
||||
is_test = "__test" in COMMAND_LINE_TARGETS
|
||||
if is_test:
|
||||
projenv.BuildSources("$BUILDTEST_DIR", "$PROJECTTEST_DIR",
|
||||
"$PIOTEST_SRC_FILTER")
|
||||
if not is_test or env.get("TEST_BUILD_PROJECT_SRC") == "true":
|
||||
projenv.BuildSources("$BUILDSRC_DIR", "$PROJECTSRC_DIR",
|
||||
env.get("SRC_FILTER"))
|
||||
|
||||
if not env.get("PIOBUILDFILES") and not COMMAND_LINE_TARGETS:
|
||||
sys.stderr.write(
|
||||
"Error: Nothing to build. Please put your source code files "
|
||||
"to '%s' folder\n" % env.subst("$PROJECTSRC_DIR"))
|
||||
env.Exit(1)
|
||||
|
||||
Export("projenv")
|
||||
|
||||
|
||||
def BuildProgram(env):
|
||||
|
||||
def _append_pio_macros():
|
||||
env.AppendUnique(CPPDEFINES=[(
|
||||
"PLATFORMIO",
|
||||
int("{0:02d}{1:02d}{2:02d}".format(*pioversion_to_intstr())))])
|
||||
|
||||
_append_pio_macros()
|
||||
|
||||
env.PrintConfiguration()
|
||||
|
||||
# fix ASM handling under non case-sensitive OS
|
||||
if not Util.case_sensitive_suffixes(".s", ".S"):
|
||||
env.Replace(AS="$CC", ASCOM="$ASPPCOM")
|
||||
|
||||
if set(["__debug", "debug"]) & set(COMMAND_LINE_TARGETS):
|
||||
env.ProcessDebug()
|
||||
|
||||
# process extra flags from board
|
||||
if "BOARD" in env and "build.extra_flags" in env.BoardConfig():
|
||||
env.ProcessFlags(env.BoardConfig().get("build.extra_flags"))
|
||||
|
||||
# apply user flags
|
||||
env.ProcessFlags(env.get("BUILD_FLAGS"))
|
||||
|
||||
# process framework scripts
|
||||
env.BuildFrameworks(env.get("PIOFRAMEWORK"))
|
||||
|
||||
# restore PIO macros if it was deleted by framework
|
||||
_append_pio_macros()
|
||||
|
||||
# remove specified flags
|
||||
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
|
||||
|
||||
if "__test" in COMMAND_LINE_TARGETS:
|
||||
env.ProcessTest()
|
||||
|
||||
# build project with dependencies
|
||||
_build_project_deps(env)
|
||||
|
||||
# append into the beginning a main LD script
|
||||
if (env.get("LDSCRIPT_PATH")
|
||||
and not any("-Wl,-T" in f for f in env['LINKFLAGS'])):
|
||||
env.Prepend(LINKFLAGS=["-T", "$LDSCRIPT_PATH"])
|
||||
|
||||
# enable "cyclic reference" for linker
|
||||
if env.get("LIBS") and env.GetCompilerType() == "gcc":
|
||||
env.Prepend(_LIBFLAGS="-Wl,--start-group ")
|
||||
env.Append(_LIBFLAGS=" -Wl,--end-group")
|
||||
|
||||
program = env.Program(
|
||||
join("$BUILD_DIR", env.subst("$PROGNAME")), env['PIOBUILDFILES'])
|
||||
env.Replace(PIOMAINPROG=program)
|
||||
|
||||
AlwaysBuild(
|
||||
env.Alias(
|
||||
"checkprogsize", program,
|
||||
env.VerboseAction(env.CheckUploadSize,
|
||||
"Checking size $PIOMAINPROG")))
|
||||
|
||||
return program
|
||||
|
||||
|
||||
def ParseFlagsExtended(env, flags): # pylint: disable=too-many-branches
|
||||
if not isinstance(flags, list):
|
||||
flags = [flags]
|
||||
result = {}
|
||||
for raw in flags:
|
||||
for key, value in env.ParseFlags(str(raw)).items():
|
||||
if key not in result:
|
||||
result[key] = []
|
||||
result[key].extend(value)
|
||||
|
||||
cppdefines = []
|
||||
for item in result['CPPDEFINES']:
|
||||
if not Util.is_Sequence(item):
|
||||
cppdefines.append(item)
|
||||
continue
|
||||
name, value = item[:2]
|
||||
if '\"' in value:
|
||||
value = value.replace('\"', '\\\"')
|
||||
elif value.isdigit():
|
||||
value = int(value)
|
||||
elif value.replace(".", "", 1).isdigit():
|
||||
value = float(value)
|
||||
cppdefines.append((name, value))
|
||||
result['CPPDEFINES'] = cppdefines
|
||||
|
||||
# fix relative CPPPATH & LIBPATH
|
||||
for k in ("CPPPATH", "LIBPATH"):
|
||||
for i, p in enumerate(result.get(k, [])):
|
||||
if isdir(p):
|
||||
result[k][i] = realpath(p)
|
||||
|
||||
# fix relative path for "-include"
|
||||
for i, f in enumerate(result.get("CCFLAGS", [])):
|
||||
if isinstance(f, tuple) and f[0] == "-include":
|
||||
result['CCFLAGS'][i] = (f[0], env.File(realpath(f[1].get_path())))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def ProcessFlags(env, flags): # pylint: disable=too-many-branches
|
||||
if not flags:
|
||||
return
|
||||
env.Append(**env.ParseFlagsExtended(flags))
|
||||
|
||||
# Cancel any previous definition of name, either built in or
|
||||
# provided with a -U option // Issue #191
|
||||
undefines = [
|
||||
u for u in env.get("CCFLAGS", [])
|
||||
if isinstance(u, basestring) and u.startswith("-U")
|
||||
]
|
||||
if undefines:
|
||||
for undef in undefines:
|
||||
env['CCFLAGS'].remove(undef)
|
||||
env.Append(_CPPDEFFLAGS=" %s" % " ".join(undefines))
|
||||
|
||||
|
||||
def ProcessUnFlags(env, flags):
|
||||
if not flags:
|
||||
return
|
||||
parsed = env.ParseFlagsExtended(flags)
|
||||
|
||||
# get all flags and copy them to each "*FLAGS" variable
|
||||
all_flags = []
|
||||
for key, unflags in parsed.items():
|
||||
if key.endswith("FLAGS"):
|
||||
all_flags.extend(unflags)
|
||||
for key, unflags in parsed.items():
|
||||
if key.endswith("FLAGS"):
|
||||
parsed[key].extend(all_flags)
|
||||
|
||||
for key, unflags in parsed.items():
|
||||
for unflag in unflags:
|
||||
for current in env.get(key, []):
|
||||
conditions = [
|
||||
unflag == current,
|
||||
isinstance(current, (tuple, list))
|
||||
and unflag[0] == current[0]
|
||||
]
|
||||
if any(conditions):
|
||||
env[key].remove(current)
|
||||
|
||||
|
||||
def IsFileWithExt(env, file_, ext): # pylint: disable=W0613
|
||||
if basename(file_).startswith("."):
|
||||
return False
|
||||
for e in ext:
|
||||
if file_.endswith(".%s" % e):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def MatchSourceFiles(env, src_dir, src_filter=None):
|
||||
|
||||
def _append_build_item(items, item, src_dir):
|
||||
if env.IsFileWithExt(item, SRC_BUILD_EXT + SRC_HEADER_EXT):
|
||||
items.add(item.replace(src_dir + sep, ""))
|
||||
|
||||
src_dir = env.subst(src_dir)
|
||||
src_filter = env.subst(src_filter) if src_filter else None
|
||||
src_filter = src_filter or SRC_FILTER_DEFAULT
|
||||
if isinstance(src_filter, (list, tuple)):
|
||||
src_filter = " ".join(src_filter)
|
||||
|
||||
matches = set()
|
||||
# correct fs directory separator
|
||||
src_filter = src_filter.replace("/", sep).replace("\\", sep)
|
||||
for (action, pattern) in SRC_FILTER_PATTERNS_RE.findall(src_filter):
|
||||
items = set()
|
||||
for item in glob(join(glob_escape(src_dir), pattern)):
|
||||
if isdir(item):
|
||||
for root, _, files in walk(item, followlinks=True):
|
||||
for f in files:
|
||||
_append_build_item(items, join(root, f), src_dir)
|
||||
else:
|
||||
_append_build_item(items, item, src_dir)
|
||||
if action == "+":
|
||||
matches |= items
|
||||
else:
|
||||
matches -= items
|
||||
return sorted(list(matches))
|
||||
|
||||
|
||||
def CollectBuildFiles(env,
|
||||
variant_dir,
|
||||
src_dir,
|
||||
src_filter=None,
|
||||
duplicate=False):
|
||||
sources = []
|
||||
variants = []
|
||||
|
||||
src_dir = env.subst(src_dir)
|
||||
if src_dir.endswith(sep):
|
||||
src_dir = src_dir[:-1]
|
||||
|
||||
for item in env.MatchSourceFiles(src_dir, src_filter):
|
||||
_reldir = dirname(item)
|
||||
_src_dir = join(src_dir, _reldir) if _reldir else src_dir
|
||||
_var_dir = join(variant_dir, _reldir) if _reldir else variant_dir
|
||||
|
||||
if _var_dir not in variants:
|
||||
variants.append(_var_dir)
|
||||
env.VariantDir(_var_dir, _src_dir, duplicate)
|
||||
|
||||
if env.IsFileWithExt(item, SRC_BUILD_EXT):
|
||||
sources.append(env.File(join(_var_dir, basename(item))))
|
||||
|
||||
return sources
|
||||
|
||||
|
||||
def BuildFrameworks(env, frameworks):
|
||||
if not frameworks:
|
||||
return
|
||||
|
||||
if "BOARD" not in env:
|
||||
sys.stderr.write("Please specify `board` in `platformio.ini` to use "
|
||||
"with '%s' framework\n" % ", ".join(frameworks))
|
||||
env.Exit(1)
|
||||
|
||||
board_frameworks = env.BoardConfig().get("frameworks", [])
|
||||
if frameworks == ["platformio"]:
|
||||
if board_frameworks:
|
||||
frameworks.insert(0, board_frameworks[0])
|
||||
else:
|
||||
sys.stderr.write(
|
||||
"Error: Please specify `board` in `platformio.ini`\n")
|
||||
env.Exit(1)
|
||||
|
||||
for f in frameworks:
|
||||
if f in ("arduino", "energia"):
|
||||
# Arduino IDE appends .o the end of filename
|
||||
Builder.match_splitext = scons_patched_match_splitext
|
||||
if "nobuild" not in COMMAND_LINE_TARGETS:
|
||||
env.ConvertInoToCpp()
|
||||
|
||||
if f in board_frameworks:
|
||||
SConscript(env.GetFrameworkScript(f), exports="env")
|
||||
else:
|
||||
sys.stderr.write(
|
||||
"Error: This board doesn't support %s framework!\n" % f)
|
||||
env.Exit(1)
|
||||
|
||||
|
||||
def BuildLibrary(env, variant_dir, src_dir, src_filter=None):
|
||||
env.ProcessUnFlags(env.get("BUILD_UNFLAGS"))
|
||||
return env.StaticLibrary(
|
||||
env.subst(variant_dir),
|
||||
env.CollectBuildFiles(variant_dir, src_dir, src_filter))
|
||||
|
||||
|
||||
def BuildSources(env, variant_dir, src_dir, src_filter=None):
|
||||
nodes = env.CollectBuildFiles(variant_dir, src_dir, src_filter)
|
||||
DefaultEnvironment().Append(
|
||||
PIOBUILDFILES=[env.Object(node) for node in nodes])
|
||||
|
||||
|
||||
def exists(_):
|
||||
return True
|
||||
|
||||
|
||||
def generate(env):
|
||||
env.AddMethod(BuildProgram)
|
||||
env.AddMethod(ParseFlagsExtended)
|
||||
env.AddMethod(ProcessFlags)
|
||||
env.AddMethod(ProcessUnFlags)
|
||||
env.AddMethod(IsFileWithExt)
|
||||
env.AddMethod(MatchSourceFiles)
|
||||
env.AddMethod(CollectBuildFiles)
|
||||
env.AddMethod(BuildFrameworks)
|
||||
env.AddMethod(BuildLibrary)
|
||||
env.AddMethod(BuildSources)
|
||||
return env
|
165
platformio/cache.py
Normal file
165
platformio/cache.py
Normal file
@ -0,0 +1,165 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import codecs
|
||||
import hashlib
|
||||
import os
|
||||
from time import time
|
||||
|
||||
from platformio import app, fs
|
||||
from platformio.compat import hashlib_encode_data
|
||||
from platformio.package.lockfile import LockFile
|
||||
from platformio.project.helpers import get_project_cache_dir
|
||||
|
||||
|
||||
class ContentCache:
|
||||
def __init__(self, namespace=None):
|
||||
self.cache_dir = os.path.join(get_project_cache_dir(), namespace or "content")
|
||||
self._db_path = os.path.join(self.cache_dir, "db.data")
|
||||
self._lockfile = None
|
||||
if not os.path.isdir(self.cache_dir):
|
||||
os.makedirs(self.cache_dir)
|
||||
|
||||
def __enter__(self):
|
||||
# cleanup obsolete items
|
||||
self.delete()
|
||||
return self
|
||||
|
||||
def __exit__(self, type_, value, traceback):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def key_from_args(*args):
|
||||
h = hashlib.sha1()
|
||||
for arg in args:
|
||||
if arg:
|
||||
h.update(hashlib_encode_data(arg))
|
||||
return h.hexdigest()
|
||||
|
||||
def get_cache_path(self, key):
|
||||
assert "/" not in key and "\\" not in key
|
||||
key = str(key)
|
||||
assert len(key) > 3
|
||||
return os.path.join(self.cache_dir, key)
|
||||
|
||||
def get(self, key):
|
||||
cache_path = self.get_cache_path(key)
|
||||
if not os.path.isfile(cache_path):
|
||||
return None
|
||||
with codecs.open(cache_path, "rb", encoding="utf8") as fp:
|
||||
return fp.read()
|
||||
|
||||
def set(self, key, data, valid):
|
||||
if not app.get_setting("enable_cache"):
|
||||
return False
|
||||
cache_path = self.get_cache_path(key)
|
||||
if os.path.isfile(cache_path):
|
||||
self.delete(key)
|
||||
if not data:
|
||||
return False
|
||||
tdmap = {"s": 1, "m": 60, "h": 3600, "d": 86400}
|
||||
assert valid.endswith(tuple(tdmap))
|
||||
expire_time = int(time() + tdmap[valid[-1]] * int(valid[:-1]))
|
||||
|
||||
if not self._lock_dbindex():
|
||||
return False
|
||||
|
||||
if not os.path.isdir(os.path.dirname(cache_path)):
|
||||
os.makedirs(os.path.dirname(cache_path))
|
||||
try:
|
||||
with codecs.open(cache_path, mode="wb", encoding="utf8") as fp:
|
||||
fp.write(data)
|
||||
with open(self._db_path, mode="a", encoding="utf8") as fp:
|
||||
fp.write("%s=%s\n" % (str(expire_time), os.path.basename(cache_path)))
|
||||
except UnicodeError:
|
||||
if os.path.isfile(cache_path):
|
||||
try:
|
||||
os.remove(cache_path)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
return self._unlock_dbindex()
|
||||
|
||||
def delete(self, keys=None):
|
||||
"""Keys=None, delete expired items"""
|
||||
if not os.path.isfile(self._db_path):
|
||||
return None
|
||||
if not keys:
|
||||
keys = []
|
||||
if not isinstance(keys, list):
|
||||
keys = [keys]
|
||||
paths_for_delete = [self.get_cache_path(k) for k in keys]
|
||||
found = False
|
||||
newlines = []
|
||||
with open(self._db_path, encoding="utf8") as fp:
|
||||
for line in fp.readlines():
|
||||
line = line.strip()
|
||||
if "=" not in line:
|
||||
continue
|
||||
expire, fname = line.split("=")
|
||||
path = os.path.join(self.cache_dir, fname)
|
||||
try:
|
||||
if (
|
||||
time() < int(expire)
|
||||
and os.path.isfile(path)
|
||||
and path not in paths_for_delete
|
||||
):
|
||||
newlines.append(line)
|
||||
continue
|
||||
except ValueError:
|
||||
pass
|
||||
found = True
|
||||
if os.path.isfile(path):
|
||||
try:
|
||||
os.remove(path)
|
||||
if not os.listdir(os.path.dirname(path)):
|
||||
fs.rmtree(os.path.dirname(path))
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
if found and self._lock_dbindex():
|
||||
with open(self._db_path, mode="w", encoding="utf8") as fp:
|
||||
fp.write("\n".join(newlines) + "\n")
|
||||
self._unlock_dbindex()
|
||||
|
||||
return True
|
||||
|
||||
def clean(self):
|
||||
if not os.path.isdir(self.cache_dir):
|
||||
return
|
||||
fs.rmtree(self.cache_dir)
|
||||
|
||||
def _lock_dbindex(self):
|
||||
self._lockfile = LockFile(self.cache_dir)
|
||||
try:
|
||||
self._lockfile.acquire()
|
||||
except: # pylint: disable=bare-except
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def _unlock_dbindex(self):
|
||||
if self._lockfile:
|
||||
self._lockfile.release()
|
||||
return True
|
||||
|
||||
|
||||
#
|
||||
# Helpers
|
||||
#
|
||||
|
||||
|
||||
def cleanup_content_cache(namespace=None):
|
||||
with ContentCache(namespace) as cc:
|
||||
cc.clean()
|
13
platformio/check/__init__.py
Normal file
13
platformio/check/__init__.py
Normal file
@ -0,0 +1,13 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
348
platformio/check/cli.py
Normal file
348
platformio/check/cli.py
Normal file
@ -0,0 +1,348 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=too-many-arguments,too-many-locals,too-many-branches
|
||||
# pylint: disable=redefined-builtin,too-many-statements
|
||||
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
from collections import Counter
|
||||
from time import time
|
||||
|
||||
import click
|
||||
from tabulate import tabulate
|
||||
|
||||
from platformio import app, exception, fs, util
|
||||
from platformio.check.defect import DefectItem
|
||||
from platformio.check.tools import CheckToolFactory
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.helpers import find_project_dir_above, get_project_dir
|
||||
|
||||
|
||||
@click.command("check", short_help="Static Code Analysis")
|
||||
@click.option("-e", "--environment", multiple=True)
|
||||
@click.option(
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=os.getcwd,
|
||||
type=click.Path(exists=True, file_okay=True, dir_okay=True, writable=True),
|
||||
)
|
||||
@click.option(
|
||||
"-c",
|
||||
"--project-conf",
|
||||
type=click.Path(exists=True, file_okay=True, dir_okay=False, readable=True),
|
||||
)
|
||||
@click.option("--pattern", multiple=True, hidden=True)
|
||||
@click.option("-f", "--src-filters", multiple=True)
|
||||
@click.option("--flags", multiple=True)
|
||||
@click.option(
|
||||
"--severity", multiple=True, type=click.Choice(DefectItem.SEVERITY_LABELS.values())
|
||||
)
|
||||
@click.option("-s", "--silent", is_flag=True)
|
||||
@click.option("-v", "--verbose", is_flag=True)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@click.option(
|
||||
"--fail-on-defect",
|
||||
multiple=True,
|
||||
type=click.Choice(DefectItem.SEVERITY_LABELS.values()),
|
||||
)
|
||||
@click.option("--skip-packages", is_flag=True)
|
||||
def cli( # pylint: disable=too-many-positional-arguments
|
||||
environment,
|
||||
project_dir,
|
||||
project_conf,
|
||||
src_filters,
|
||||
pattern,
|
||||
flags,
|
||||
severity,
|
||||
silent,
|
||||
verbose,
|
||||
json_output,
|
||||
fail_on_defect,
|
||||
skip_packages,
|
||||
):
|
||||
app.set_session_var("custom_project_conf", project_conf)
|
||||
|
||||
# find project directory on upper level
|
||||
if os.path.isfile(project_dir):
|
||||
project_dir = find_project_dir_above(project_dir)
|
||||
|
||||
results = []
|
||||
with fs.cd(project_dir):
|
||||
config = ProjectConfig.get_instance(project_conf)
|
||||
config.validate(environment)
|
||||
|
||||
default_envs = config.default_envs()
|
||||
for envname in config.envs():
|
||||
skipenv = any(
|
||||
[
|
||||
environment and envname not in environment,
|
||||
not environment and default_envs and envname not in default_envs,
|
||||
]
|
||||
)
|
||||
|
||||
env_options = config.items(env=envname, as_dict=True)
|
||||
env_dump = []
|
||||
for k, v in env_options.items():
|
||||
if k not in ("platform", "framework", "board"):
|
||||
continue
|
||||
env_dump.append(
|
||||
"%s: %s" % (k, ", ".join(v) if isinstance(v, list) else v)
|
||||
)
|
||||
|
||||
default_src_filters = []
|
||||
for d in (
|
||||
config.get("platformio", "src_dir"),
|
||||
config.get("platformio", "include_dir"),
|
||||
):
|
||||
try:
|
||||
default_src_filters.append("+<%s>" % os.path.relpath(d))
|
||||
except ValueError as exc:
|
||||
# On Windows if sources are located on a different logical drive
|
||||
if not json_output and not silent:
|
||||
click.echo(
|
||||
"Error: Project cannot be analyzed! The project folder `%s`"
|
||||
" is located on a different logical drive\n" % d
|
||||
)
|
||||
raise exception.ReturnErrorCode(1) from exc
|
||||
|
||||
env_src_filters = (
|
||||
src_filters
|
||||
or pattern
|
||||
or env_options.get(
|
||||
"check_src_filters",
|
||||
env_options.get("check_patterns", default_src_filters),
|
||||
)
|
||||
)
|
||||
|
||||
tool_options = dict(
|
||||
verbose=verbose,
|
||||
silent=silent,
|
||||
src_filters=env_src_filters,
|
||||
flags=flags or env_options.get("check_flags"),
|
||||
severity=(
|
||||
[DefectItem.SEVERITY_LABELS[DefectItem.SEVERITY_HIGH]]
|
||||
if silent
|
||||
else severity or config.get("env:" + envname, "check_severity")
|
||||
),
|
||||
skip_packages=skip_packages or env_options.get("check_skip_packages"),
|
||||
platform_packages=env_options.get("platform_packages"),
|
||||
)
|
||||
|
||||
for tool in config.get("env:" + envname, "check_tool"):
|
||||
if skipenv:
|
||||
results.append({"env": envname, "tool": tool})
|
||||
continue
|
||||
if not silent and not json_output:
|
||||
print_processing_header(tool, envname, env_dump)
|
||||
|
||||
ct = CheckToolFactory.new(
|
||||
tool, os.getcwd(), config, envname, tool_options
|
||||
)
|
||||
|
||||
result = {"env": envname, "tool": tool, "duration": time()}
|
||||
rc = ct.check(
|
||||
on_defect_callback=(
|
||||
None
|
||||
if (json_output or verbose)
|
||||
else lambda defect: click.echo(repr(defect))
|
||||
)
|
||||
)
|
||||
|
||||
result["defects"] = ct.get_defects()
|
||||
result["duration"] = time() - result["duration"]
|
||||
|
||||
result["succeeded"] = rc == 0
|
||||
if fail_on_defect:
|
||||
result["succeeded"] = rc == 0 and not any(
|
||||
DefectItem.SEVERITY_LABELS[d.severity] in fail_on_defect
|
||||
for d in result["defects"]
|
||||
)
|
||||
result["stats"] = collect_component_stats(result)
|
||||
results.append(result)
|
||||
|
||||
if verbose:
|
||||
click.echo("\n".join(repr(d) for d in result["defects"]))
|
||||
|
||||
if not json_output and not silent:
|
||||
if rc != 0:
|
||||
click.echo(
|
||||
"Error: %s failed to perform check! Please "
|
||||
"examine tool output in verbose mode." % tool
|
||||
)
|
||||
elif not result["defects"]:
|
||||
click.echo("No defects found")
|
||||
print_processing_footer(result)
|
||||
|
||||
if json_output:
|
||||
click.echo(json.dumps(results_to_json(results)))
|
||||
elif not silent:
|
||||
print_check_summary(results, verbose=verbose)
|
||||
|
||||
# Reset custom project config
|
||||
app.set_session_var("custom_project_conf", None)
|
||||
|
||||
command_failed = any(r.get("succeeded") is False for r in results)
|
||||
if command_failed:
|
||||
raise exception.ReturnErrorCode(1)
|
||||
|
||||
|
||||
def results_to_json(raw):
|
||||
results = []
|
||||
for item in raw:
|
||||
if item.get("succeeded") is None:
|
||||
continue
|
||||
item.update(
|
||||
{
|
||||
"succeeded": bool(item.get("succeeded")),
|
||||
"defects": [d.as_dict() for d in item.get("defects", [])],
|
||||
}
|
||||
)
|
||||
results.append(item)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def print_processing_header(tool, envname, envdump):
|
||||
click.echo(
|
||||
"Checking %s > %s (%s)"
|
||||
% (click.style(envname, fg="cyan", bold=True), tool, "; ".join(envdump))
|
||||
)
|
||||
terminal_width = shutil.get_terminal_size().columns
|
||||
click.secho("-" * terminal_width, bold=True)
|
||||
|
||||
|
||||
def print_processing_footer(result):
|
||||
is_failed = not result.get("succeeded")
|
||||
util.print_labeled_bar(
|
||||
"[%s] Took %.2f seconds"
|
||||
% (
|
||||
(
|
||||
click.style("FAILED", fg="red", bold=True)
|
||||
if is_failed
|
||||
else click.style("PASSED", fg="green", bold=True)
|
||||
),
|
||||
result["duration"],
|
||||
),
|
||||
is_error=is_failed,
|
||||
)
|
||||
|
||||
|
||||
def collect_component_stats(result):
|
||||
components = {}
|
||||
|
||||
def _append_defect(component, defect):
|
||||
if not components.get(component):
|
||||
components[component] = Counter()
|
||||
components[component].update({DefectItem.SEVERITY_LABELS[defect.severity]: 1})
|
||||
|
||||
for defect in result.get("defects", []):
|
||||
component = os.path.dirname(defect.file) or defect.file
|
||||
_append_defect(component, defect)
|
||||
|
||||
if component.lower().startswith(get_project_dir().lower()):
|
||||
while os.sep in component:
|
||||
component = os.path.dirname(component)
|
||||
_append_defect(component, defect)
|
||||
|
||||
return components
|
||||
|
||||
|
||||
def print_defects_stats(results):
|
||||
if not results:
|
||||
return
|
||||
|
||||
component_stats = {}
|
||||
for r in results:
|
||||
for k, v in r.get("stats", {}).items():
|
||||
if not component_stats.get(k):
|
||||
component_stats[k] = Counter()
|
||||
component_stats[k].update(r["stats"][k])
|
||||
|
||||
if not component_stats:
|
||||
return
|
||||
|
||||
severity_labels = list(DefectItem.SEVERITY_LABELS.values())
|
||||
severity_labels.reverse()
|
||||
tabular_data = []
|
||||
for k, v in component_stats.items():
|
||||
tool_defect = [v.get(s, 0) for s in severity_labels]
|
||||
tabular_data.append([k] + tool_defect)
|
||||
|
||||
total = ["Total"] + [sum(d) for d in list(zip(*tabular_data))[1:]]
|
||||
tabular_data.sort()
|
||||
tabular_data.append([]) # Empty line as delimiter
|
||||
tabular_data.append(total)
|
||||
|
||||
headers = ["Component"]
|
||||
headers.extend([label.upper() for label in severity_labels])
|
||||
headers = [click.style(h, bold=True) for h in headers]
|
||||
click.echo(tabulate(tabular_data, headers=headers, numalign="center"))
|
||||
click.echo()
|
||||
|
||||
|
||||
def print_check_summary(results, verbose=False):
|
||||
click.echo()
|
||||
|
||||
tabular_data = []
|
||||
succeeded_nums = 0
|
||||
failed_nums = 0
|
||||
duration = 0
|
||||
|
||||
print_defects_stats(results)
|
||||
|
||||
for result in results:
|
||||
duration += result.get("duration", 0)
|
||||
if result.get("succeeded") is False:
|
||||
failed_nums += 1
|
||||
status_str = click.style("FAILED", fg="red")
|
||||
elif result.get("succeeded") is None:
|
||||
status_str = "IGNORED"
|
||||
if not verbose:
|
||||
continue
|
||||
else:
|
||||
succeeded_nums += 1
|
||||
status_str = click.style("PASSED", fg="green")
|
||||
|
||||
tabular_data.append(
|
||||
(
|
||||
click.style(result["env"], fg="cyan"),
|
||||
result["tool"],
|
||||
status_str,
|
||||
util.humanize_duration_time(result.get("duration")),
|
||||
)
|
||||
)
|
||||
|
||||
click.echo(
|
||||
tabulate(
|
||||
tabular_data,
|
||||
headers=[
|
||||
click.style(s, bold=True)
|
||||
for s in ("Environment", "Tool", "Status", "Duration")
|
||||
],
|
||||
),
|
||||
err=failed_nums,
|
||||
)
|
||||
|
||||
util.print_labeled_bar(
|
||||
"%s%d succeeded in %s"
|
||||
% (
|
||||
"%d failed, " % failed_nums if failed_nums else "",
|
||||
succeeded_nums,
|
||||
util.humanize_duration_time(duration),
|
||||
),
|
||||
is_error=failed_nums,
|
||||
fg="red" if failed_nums else "green",
|
||||
)
|
95
platformio/check/defect.py
Normal file
95
platformio/check/defect.py
Normal file
@ -0,0 +1,95 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
|
||||
import click
|
||||
|
||||
from platformio.exception import PlatformioException
|
||||
from platformio.project.helpers import get_project_dir
|
||||
|
||||
# pylint: disable=too-many-instance-attributes, redefined-builtin
|
||||
# pylint: disable=too-many-arguments
|
||||
|
||||
|
||||
class DefectItem:
|
||||
SEVERITY_HIGH = 1
|
||||
SEVERITY_MEDIUM = 2
|
||||
SEVERITY_LOW = 4
|
||||
SEVERITY_LABELS = {4: "low", 2: "medium", 1: "high"}
|
||||
|
||||
def __init__( # pylint: disable=too-many-positional-arguments
|
||||
self,
|
||||
severity,
|
||||
category,
|
||||
message,
|
||||
file=None,
|
||||
line=0,
|
||||
column=0,
|
||||
id=None,
|
||||
callstack=None,
|
||||
cwe=None,
|
||||
):
|
||||
assert severity in (self.SEVERITY_HIGH, self.SEVERITY_MEDIUM, self.SEVERITY_LOW)
|
||||
self.severity = severity
|
||||
self.category = category
|
||||
self.message = message
|
||||
self.line = int(line)
|
||||
self.column = int(column)
|
||||
self.callstack = callstack
|
||||
self.cwe = cwe
|
||||
self.id = id
|
||||
self.file = file or "unknown"
|
||||
if file.lower().startswith(get_project_dir().lower()):
|
||||
self.file = os.path.relpath(file, get_project_dir())
|
||||
|
||||
def __repr__(self):
|
||||
defect_color = None
|
||||
if self.severity == self.SEVERITY_HIGH:
|
||||
defect_color = "red"
|
||||
elif self.severity == self.SEVERITY_MEDIUM:
|
||||
defect_color = "yellow"
|
||||
|
||||
format_str = "{file}:{line}: [{severity}:{category}] {message} {id}"
|
||||
return format_str.format(
|
||||
severity=click.style(self.SEVERITY_LABELS[self.severity], fg=defect_color),
|
||||
category=click.style(self.category.lower(), fg=defect_color),
|
||||
file=click.style(self.file, bold=True),
|
||||
message=self.message,
|
||||
line=self.line,
|
||||
id="%s" % "[%s]" % self.id if self.id else "",
|
||||
)
|
||||
|
||||
def __or__(self, defect):
|
||||
return self.severity | defect.severity
|
||||
|
||||
@staticmethod
|
||||
def severity_to_int(label):
|
||||
for key, value in DefectItem.SEVERITY_LABELS.items():
|
||||
if label == value:
|
||||
return key
|
||||
raise PlatformioException("Unknown severity label -> %s" % label)
|
||||
|
||||
def as_dict(self):
|
||||
return {
|
||||
"severity": self.SEVERITY_LABELS[self.severity],
|
||||
"category": self.category,
|
||||
"message": self.message,
|
||||
"file": os.path.abspath(self.file),
|
||||
"line": self.line,
|
||||
"column": self.column,
|
||||
"callstack": self.callstack,
|
||||
"id": self.id,
|
||||
"cwe": self.cwe,
|
||||
}
|
33
platformio/check/tools/__init__.py
Normal file
33
platformio/check/tools/__init__.py
Normal file
@ -0,0 +1,33 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from platformio import exception
|
||||
from platformio.check.tools.clangtidy import ClangtidyCheckTool
|
||||
from platformio.check.tools.cppcheck import CppcheckCheckTool
|
||||
from platformio.check.tools.pvsstudio import PvsStudioCheckTool
|
||||
|
||||
|
||||
class CheckToolFactory:
|
||||
@staticmethod
|
||||
def new(tool, project_dir, config, envname, options):
|
||||
cls = None
|
||||
if tool == "cppcheck":
|
||||
cls = CppcheckCheckTool
|
||||
elif tool == "clangtidy":
|
||||
cls = ClangtidyCheckTool
|
||||
elif tool == "pvs-studio":
|
||||
cls = PvsStudioCheckTool
|
||||
else:
|
||||
raise exception.PlatformioException("Unknown check tool `%s`" % tool)
|
||||
return cls(project_dir, config, envname, options)
|
267
platformio/check/tools/base.py
Normal file
267
platformio/check/tools/base.py
Normal file
@ -0,0 +1,267 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
import click
|
||||
|
||||
from platformio import fs, proc
|
||||
from platformio.check.defect import DefectItem
|
||||
from platformio.package.manager.core import get_core_package_dir
|
||||
from platformio.package.meta import PackageSpec
|
||||
from platformio.project.helpers import load_build_metadata
|
||||
|
||||
|
||||
class CheckToolBase: # pylint: disable=too-many-instance-attributes
|
||||
def __init__(self, project_dir, config, envname, options):
|
||||
self.config = config
|
||||
self.envname = envname
|
||||
self.options = options
|
||||
self.project_dir = project_dir
|
||||
self.cc_flags = []
|
||||
self.cxx_flags = []
|
||||
self.cpp_includes = []
|
||||
self.cpp_defines = []
|
||||
self.toolchain_defines = []
|
||||
self._tmp_files = []
|
||||
self.cc_path = None
|
||||
self.cxx_path = None
|
||||
self._defects = []
|
||||
self._on_defect_callback = None
|
||||
self._bad_input = False
|
||||
self._load_cpp_data()
|
||||
|
||||
# detect all defects by default
|
||||
if not self.options.get("severity"):
|
||||
self.options["severity"] = [
|
||||
DefectItem.SEVERITY_LOW,
|
||||
DefectItem.SEVERITY_MEDIUM,
|
||||
DefectItem.SEVERITY_HIGH,
|
||||
]
|
||||
# cast to severity by ids
|
||||
self.options["severity"] = [
|
||||
s if isinstance(s, int) else DefectItem.severity_to_int(s)
|
||||
for s in self.options["severity"]
|
||||
]
|
||||
|
||||
def _load_cpp_data(self):
|
||||
data = load_build_metadata(self.project_dir, self.envname)
|
||||
if not data:
|
||||
return
|
||||
self.cc_flags = data.get("cc_flags", [])
|
||||
self.cxx_flags = data.get("cxx_flags", [])
|
||||
self.cpp_includes = self._dump_includes(data.get("includes", {}))
|
||||
self.cpp_defines = data.get("defines", [])
|
||||
self.cc_path = data.get("cc_path")
|
||||
self.cxx_path = data.get("cxx_path")
|
||||
self.toolchain_defines = self._get_toolchain_defines()
|
||||
|
||||
def get_tool_dir(self, pkg_name):
|
||||
for spec in self.options["platform_packages"] or []:
|
||||
spec = PackageSpec(spec)
|
||||
if spec.name == pkg_name:
|
||||
return get_core_package_dir(pkg_name, spec=spec)
|
||||
return get_core_package_dir(pkg_name)
|
||||
|
||||
def get_flags(self, tool):
|
||||
result = []
|
||||
flags = self.options.get("flags") or []
|
||||
for flag in flags:
|
||||
if ":" not in flag or flag.startswith("-"):
|
||||
result.extend([f for f in flag.split(" ") if f])
|
||||
elif flag.startswith("%s:" % tool):
|
||||
result.extend([f for f in flag.split(":", 1)[1].split(" ") if f])
|
||||
|
||||
return result
|
||||
|
||||
def _get_toolchain_defines(self):
|
||||
def _extract_defines(language, includes_file):
|
||||
build_flags = self.cxx_flags if language == "c++" else self.cc_flags
|
||||
defines = []
|
||||
cmd = "echo | %s -x %s %s %s -dM -E -" % (
|
||||
self.cc_path,
|
||||
language,
|
||||
" ".join(
|
||||
[f for f in build_flags if f.startswith(("-m", "-f", "-std"))]
|
||||
),
|
||||
includes_file,
|
||||
)
|
||||
result = proc.exec_command(cmd, shell=True)
|
||||
|
||||
if result["returncode"] != 0:
|
||||
click.echo("Warning: Failed to extract toolchain defines!")
|
||||
if self.options.get("verbose"):
|
||||
click.echo(result["out"])
|
||||
click.echo(result["err"])
|
||||
|
||||
for line in result["out"].split("\n"):
|
||||
tokens = line.strip().split(" ", 2)
|
||||
if not tokens or tokens[0] != "#define":
|
||||
continue
|
||||
if len(tokens) > 2:
|
||||
defines.append("%s=%s" % (tokens[1], tokens[2]))
|
||||
else:
|
||||
defines.append(tokens[1])
|
||||
|
||||
return defines
|
||||
|
||||
incflags_file = self._long_includes_hook(self.cpp_includes)
|
||||
return {lang: _extract_defines(lang, incflags_file) for lang in ("c", "c++")}
|
||||
|
||||
def _create_tmp_file(self, data):
|
||||
with tempfile.NamedTemporaryFile("w", delete=False) as fp:
|
||||
fp.write(data)
|
||||
self._tmp_files.append(fp.name)
|
||||
return fp.name
|
||||
|
||||
def _long_includes_hook(self, includes):
|
||||
data = []
|
||||
for inc in includes:
|
||||
data.append('-I"%s"' % fs.to_unix_path(inc))
|
||||
|
||||
return '@"%s"' % self._create_tmp_file(" ".join(data))
|
||||
|
||||
@staticmethod
|
||||
def _dump_includes(includes_map):
|
||||
result = []
|
||||
for includes in includes_map.values():
|
||||
for include in includes:
|
||||
if include not in result:
|
||||
result.append(include)
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def is_flag_set(flag, flags):
|
||||
return any(flag in f for f in flags)
|
||||
|
||||
def get_defects(self):
|
||||
return self._defects
|
||||
|
||||
def configure_command(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def on_tool_output(self, line):
|
||||
line = self.tool_output_filter(line)
|
||||
if not line:
|
||||
return
|
||||
|
||||
defect = self.parse_defect(line)
|
||||
|
||||
if not isinstance(defect, DefectItem):
|
||||
if self.options.get("verbose"):
|
||||
click.echo(line)
|
||||
return
|
||||
|
||||
if defect.severity not in self.options["severity"]:
|
||||
return
|
||||
|
||||
self._defects.append(defect)
|
||||
if self._on_defect_callback:
|
||||
self._on_defect_callback(defect)
|
||||
|
||||
@staticmethod
|
||||
def tool_output_filter(line):
|
||||
return line
|
||||
|
||||
@staticmethod
|
||||
def parse_defect(raw_line):
|
||||
return raw_line
|
||||
|
||||
def clean_up(self):
|
||||
for f in self._tmp_files:
|
||||
if os.path.isfile(f):
|
||||
os.remove(f)
|
||||
|
||||
@staticmethod
|
||||
def is_check_successful(cmd_result):
|
||||
return cmd_result["returncode"] == 0
|
||||
|
||||
def execute_check_cmd(self, cmd):
|
||||
result = proc.exec_command(
|
||||
cmd,
|
||||
stdout=proc.LineBufferedAsyncPipe(self.on_tool_output),
|
||||
stderr=proc.LineBufferedAsyncPipe(self.on_tool_output),
|
||||
)
|
||||
|
||||
if not self.is_check_successful(result):
|
||||
click.echo(
|
||||
"\nError: Failed to execute check command! Exited with code %d."
|
||||
% result["returncode"]
|
||||
)
|
||||
if self.options.get("verbose"):
|
||||
click.echo(result["out"])
|
||||
click.echo(result["err"])
|
||||
self._bad_input = True
|
||||
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def get_project_target_files(project_dir, src_filters):
|
||||
c_extension = (".c",)
|
||||
cpp_extensions = (".cc", ".cpp", ".cxx", ".ino")
|
||||
header_extensions = (".h", ".hh", ".hpp", ".hxx")
|
||||
|
||||
result = {"c": [], "c++": [], "headers": []}
|
||||
|
||||
def _add_file(path):
|
||||
if path.endswith(header_extensions):
|
||||
result["headers"].append(os.path.abspath(path))
|
||||
elif path.endswith(c_extension):
|
||||
result["c"].append(os.path.abspath(path))
|
||||
elif path.endswith(cpp_extensions):
|
||||
result["c++"].append(os.path.abspath(path))
|
||||
|
||||
src_filters = normalize_src_filters(src_filters)
|
||||
for f in fs.match_src_files(project_dir, src_filters):
|
||||
_add_file(f)
|
||||
|
||||
return result
|
||||
|
||||
def check(self, on_defect_callback=None):
|
||||
self._on_defect_callback = on_defect_callback
|
||||
cmd = self.configure_command()
|
||||
if cmd:
|
||||
if self.options.get("verbose"):
|
||||
click.echo(" ".join(cmd))
|
||||
|
||||
self.execute_check_cmd(cmd)
|
||||
|
||||
else:
|
||||
if self.options.get("verbose"):
|
||||
click.echo("Error: Couldn't configure command")
|
||||
self._bad_input = True
|
||||
|
||||
self.clean_up()
|
||||
|
||||
return self._bad_input
|
||||
|
||||
|
||||
#
|
||||
# Helpers
|
||||
#
|
||||
|
||||
|
||||
def normalize_src_filters(src_filters):
|
||||
def _normalize(src_filters):
|
||||
return (
|
||||
src_filters
|
||||
if src_filters.startswith(("+<", "-<"))
|
||||
else "+<%s>" % src_filters
|
||||
)
|
||||
|
||||
if isinstance(src_filters, (list, tuple)):
|
||||
return " ".join([_normalize(f) for f in src_filters])
|
||||
|
||||
return _normalize(src_filters)
|
90
platformio/check/tools/clangtidy.py
Normal file
90
platformio/check/tools/clangtidy.py
Normal file
@ -0,0 +1,90 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import re
|
||||
from os.path import join
|
||||
|
||||
from platformio.check.defect import DefectItem
|
||||
from platformio.check.tools.base import CheckToolBase
|
||||
|
||||
|
||||
class ClangtidyCheckTool(CheckToolBase):
|
||||
def tool_output_filter(self, line): # pylint: disable=arguments-differ
|
||||
if not self.options.get("verbose") and "[clang-diagnostic-error]" in line:
|
||||
return ""
|
||||
|
||||
if "[CommonOptionsParser]" in line:
|
||||
self._bad_input = True
|
||||
return line
|
||||
|
||||
if any(d in line for d in ("note: ", "error: ", "warning: ")):
|
||||
return line
|
||||
|
||||
return ""
|
||||
|
||||
def parse_defect(self, raw_line): # pylint: disable=arguments-differ
|
||||
match = re.match(r"^(.*):(\d+):(\d+):\s+([^:]+):\s(.+)\[([^]]+)\]$", raw_line)
|
||||
if not match:
|
||||
return raw_line
|
||||
|
||||
file_, line, column, category, message, defect_id = match.groups()
|
||||
|
||||
severity = DefectItem.SEVERITY_LOW
|
||||
if category == "error":
|
||||
severity = DefectItem.SEVERITY_HIGH
|
||||
elif category == "warning":
|
||||
severity = DefectItem.SEVERITY_MEDIUM
|
||||
|
||||
return DefectItem(severity, category, message, file_, line, column, defect_id)
|
||||
|
||||
@staticmethod
|
||||
def is_check_successful(cmd_result):
|
||||
# Note: Clang-Tidy returns 1 for not critical compilation errors,
|
||||
# so 0 and 1 are only acceptable values
|
||||
return cmd_result["returncode"] < 2
|
||||
|
||||
def configure_command(self):
|
||||
tool_path = join(self.get_tool_dir("tool-clangtidy"), "clang-tidy")
|
||||
|
||||
cmd = [tool_path, "--quiet"]
|
||||
flags = self.get_flags("clangtidy")
|
||||
if not (
|
||||
self.is_flag_set("--checks", flags) or self.is_flag_set("--config", flags)
|
||||
):
|
||||
cmd.append("--checks=*")
|
||||
|
||||
project_files = self.get_project_target_files(
|
||||
self.project_dir, self.options["src_filters"]
|
||||
)
|
||||
|
||||
src_files = []
|
||||
for items in project_files.values():
|
||||
src_files.extend(items)
|
||||
|
||||
cmd.extend(flags + src_files + ["--"])
|
||||
cmd.extend(
|
||||
["-D%s" % d for d in self.cpp_defines + self.toolchain_defines["c++"]]
|
||||
)
|
||||
|
||||
includes = []
|
||||
for inc in self.cpp_includes:
|
||||
if self.options.get("skip_packages") and inc.lower().startswith(
|
||||
self.config.get("platformio", "packages_dir").lower()
|
||||
):
|
||||
continue
|
||||
includes.append(inc)
|
||||
|
||||
cmd.extend(["-I%s" % inc for inc in includes])
|
||||
|
||||
return cmd
|
271
platformio/check/tools/cppcheck.py
Normal file
271
platformio/check/tools/cppcheck.py
Normal file
@ -0,0 +1,271 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
|
||||
import click
|
||||
|
||||
from platformio import proc
|
||||
from platformio.check.defect import DefectItem
|
||||
from platformio.check.tools.base import CheckToolBase
|
||||
|
||||
|
||||
class CppcheckCheckTool(CheckToolBase):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._field_delimiter = "<&PIO&>"
|
||||
self._buffer = ""
|
||||
self.defect_fields = [
|
||||
"severity",
|
||||
"message",
|
||||
"file",
|
||||
"line",
|
||||
"column",
|
||||
"callstack",
|
||||
"cwe",
|
||||
"id",
|
||||
]
|
||||
|
||||
def tool_output_filter(self, line): # pylint: disable=arguments-differ
|
||||
if (
|
||||
not self.options.get("verbose")
|
||||
and "--suppress=unmatchedSuppression:" in line
|
||||
):
|
||||
return ""
|
||||
|
||||
if any(
|
||||
msg in line
|
||||
for msg in (
|
||||
"No C or C++ source files found",
|
||||
"unrecognized command line option",
|
||||
"there was an internal error",
|
||||
)
|
||||
):
|
||||
self._bad_input = True
|
||||
|
||||
return line
|
||||
|
||||
def parse_defect(self, raw_line): # pylint: disable=arguments-differ
|
||||
if self._field_delimiter not in raw_line:
|
||||
return None
|
||||
|
||||
self._buffer += raw_line
|
||||
if any(f not in self._buffer for f in self.defect_fields):
|
||||
return None
|
||||
|
||||
args = {}
|
||||
for field in self._buffer.split(self._field_delimiter):
|
||||
field = field.strip().replace('"', "")
|
||||
name, value = field.split("=", 1)
|
||||
args[name] = value
|
||||
|
||||
args["category"] = args["severity"]
|
||||
if args["severity"] == "error":
|
||||
args["severity"] = DefectItem.SEVERITY_HIGH
|
||||
elif args["severity"] == "warning":
|
||||
args["severity"] = DefectItem.SEVERITY_MEDIUM
|
||||
else:
|
||||
args["severity"] = DefectItem.SEVERITY_LOW
|
||||
|
||||
# Skip defects found in third-party software, but keep in mind that such defects
|
||||
# might break checking process so defects from project files are not reported
|
||||
breaking_defect_ids = ("preprocessorErrorDirective", "syntaxError")
|
||||
if (
|
||||
args.get("file", "")
|
||||
.lower()
|
||||
.startswith(self.config.get("platformio", "packages_dir").lower())
|
||||
):
|
||||
if args["id"] in breaking_defect_ids:
|
||||
if self.options.get("verbose"):
|
||||
click.echo(
|
||||
"Error: Found a breaking defect '%s' in %s:%s\n"
|
||||
"Please note: check results might not be valid!\n"
|
||||
"Try adding --skip-packages"
|
||||
% (args.get("message"), args.get("file"), args.get("line"))
|
||||
)
|
||||
click.echo()
|
||||
self._bad_input = True
|
||||
self._buffer = ""
|
||||
return None
|
||||
|
||||
self._buffer = ""
|
||||
return DefectItem(**args)
|
||||
|
||||
def configure_command(self, language, src_file): # pylint: disable=arguments-differ
|
||||
tool_path = os.path.join(self.get_tool_dir("tool-cppcheck"), "cppcheck")
|
||||
|
||||
cmd = [
|
||||
tool_path,
|
||||
"--addon-python=%s" % proc.get_pythonexe_path(),
|
||||
"--error-exitcode=3",
|
||||
"--verbose" if self.options.get("verbose") else "--quiet",
|
||||
]
|
||||
|
||||
cmd.append(
|
||||
'--template="%s"'
|
||||
% self._field_delimiter.join(
|
||||
["{0}={{{0}}}".format(f) for f in self.defect_fields]
|
||||
)
|
||||
)
|
||||
|
||||
flags = self.get_flags("cppcheck")
|
||||
if not flags:
|
||||
# by default user can suppress reporting individual defects
|
||||
# directly in code // cppcheck-suppress warningID
|
||||
cmd.append("--inline-suppr")
|
||||
if not self.is_flag_set("--platform", flags):
|
||||
cmd.append("--platform=unspecified")
|
||||
if not self.is_flag_set("--enable", flags):
|
||||
enabled_checks = [
|
||||
"warning",
|
||||
"style",
|
||||
"performance",
|
||||
"portability",
|
||||
"unusedFunction",
|
||||
]
|
||||
cmd.append("--enable=%s" % ",".join(enabled_checks))
|
||||
|
||||
if not self.is_flag_set("--language", flags):
|
||||
cmd.append("--language=" + language)
|
||||
|
||||
build_flags = self.cxx_flags if language == "c++" else self.cc_flags
|
||||
|
||||
if not self.is_flag_set("--std", flags):
|
||||
# Try to guess the standard version from the build flags
|
||||
for flag in build_flags:
|
||||
if "-std" in flag:
|
||||
cmd.append("-" + self.convert_language_standard(flag))
|
||||
|
||||
cmd.extend(
|
||||
["-D%s" % d for d in self.cpp_defines + self.toolchain_defines[language]]
|
||||
)
|
||||
|
||||
cmd.extend(flags)
|
||||
|
||||
cmd.extend(
|
||||
"--include=" + inc
|
||||
for inc in self.get_forced_includes(build_flags, self.cpp_includes)
|
||||
)
|
||||
cmd.append("--includes-file=%s" % self._generate_inc_file())
|
||||
cmd.append('"%s"' % src_file)
|
||||
|
||||
return cmd
|
||||
|
||||
@staticmethod
|
||||
def get_forced_includes(build_flags, includes):
|
||||
def _extract_filepath(flag, include_options, build_flags):
|
||||
path = ""
|
||||
for option in include_options:
|
||||
if not flag.startswith(option):
|
||||
continue
|
||||
if flag.split(option)[1].strip():
|
||||
path = flag.split(option)[1].strip()
|
||||
elif build_flags.index(flag) + 1 < len(build_flags):
|
||||
path = build_flags[build_flags.index(flag) + 1]
|
||||
return path
|
||||
|
||||
def _search_include_dir(filepath, include_paths):
|
||||
for inc_path in include_paths:
|
||||
path = os.path.join(inc_path, filepath)
|
||||
if os.path.isfile(path):
|
||||
return path
|
||||
return ""
|
||||
|
||||
result = []
|
||||
include_options = ("-include", "-imacros")
|
||||
for f in build_flags:
|
||||
if f.startswith(include_options):
|
||||
filepath = _extract_filepath(f, include_options, build_flags)
|
||||
if not os.path.isabs(filepath):
|
||||
filepath = _search_include_dir(filepath, includes)
|
||||
if os.path.isfile(filepath):
|
||||
result.append(filepath)
|
||||
|
||||
return result
|
||||
|
||||
def _generate_src_file(self, src_files):
|
||||
return self._create_tmp_file("\n".join(src_files))
|
||||
|
||||
def _generate_inc_file(self):
|
||||
result = []
|
||||
for inc in self.cpp_includes:
|
||||
if self.options.get("skip_packages") and inc.lower().startswith(
|
||||
self.config.get("platformio", "packages_dir").lower()
|
||||
):
|
||||
continue
|
||||
result.append(inc)
|
||||
return self._create_tmp_file("\n".join(result))
|
||||
|
||||
def clean_up(self):
|
||||
super().clean_up()
|
||||
|
||||
# delete temporary dump files generated by addons
|
||||
if not self.is_flag_set("--addon", self.get_flags("cppcheck")):
|
||||
return
|
||||
|
||||
for files in self.get_project_target_files(
|
||||
self.project_dir, self.options["src_filters"]
|
||||
).values():
|
||||
for f in files:
|
||||
dump_file = f + ".dump"
|
||||
if os.path.isfile(dump_file):
|
||||
os.remove(dump_file)
|
||||
|
||||
@staticmethod
|
||||
def is_check_successful(cmd_result):
|
||||
# Cppcheck is configured to return '3' if a defect is found
|
||||
return cmd_result["returncode"] in (0, 3)
|
||||
|
||||
@staticmethod
|
||||
def convert_language_standard(flag):
|
||||
cpp_standards_map = {
|
||||
"0x": "11",
|
||||
"1y": "14",
|
||||
"1z": "17",
|
||||
"2a": "20",
|
||||
}
|
||||
|
||||
standard = flag[-2:]
|
||||
# Note: GNU extensions are not supported and converted to regular standards
|
||||
return flag.replace("gnu", "c").replace(
|
||||
standard, cpp_standards_map.get(standard, standard)
|
||||
)
|
||||
|
||||
def check(self, on_defect_callback=None):
|
||||
self._on_defect_callback = on_defect_callback
|
||||
|
||||
project_files = self.get_project_target_files(
|
||||
self.project_dir, self.options["src_filters"]
|
||||
)
|
||||
src_files_scope = ("c", "c++")
|
||||
if not any(project_files[t] for t in src_files_scope):
|
||||
click.echo("Error: Nothing to check.")
|
||||
return True
|
||||
|
||||
for scope, files in project_files.items():
|
||||
if scope not in src_files_scope:
|
||||
continue
|
||||
for src_file in files:
|
||||
cmd = self.configure_command(scope, src_file)
|
||||
if not cmd:
|
||||
self._bad_input = True
|
||||
continue
|
||||
if self.options.get("verbose"):
|
||||
click.echo(" ".join(cmd))
|
||||
|
||||
self.execute_check_cmd(cmd)
|
||||
|
||||
self.clean_up()
|
||||
|
||||
return self._bad_input
|
251
platformio/check/tools/pvsstudio.py
Normal file
251
platformio/check/tools/pvsstudio.py
Normal file
@ -0,0 +1,251 @@
|
||||
# Copyright (c) 2020-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
from xml.etree.ElementTree import fromstring
|
||||
|
||||
import click
|
||||
|
||||
from platformio import proc
|
||||
from platformio.check.defect import DefectItem
|
||||
from platformio.check.tools.base import CheckToolBase
|
||||
from platformio.compat import IS_WINDOWS
|
||||
|
||||
|
||||
class PvsStudioCheckTool(CheckToolBase): # pylint: disable=too-many-instance-attributes
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._tmp_dir = tempfile.mkdtemp(prefix="piocheck")
|
||||
self._tmp_preprocessed_file = self._generate_tmp_file_path() + ".i"
|
||||
self._tmp_output_file = self._generate_tmp_file_path() + ".pvs"
|
||||
self._tmp_cfg_file = self._generate_tmp_file_path() + ".cfg"
|
||||
self._tmp_cmd_file = self._generate_tmp_file_path() + ".cmd"
|
||||
self.tool_path = os.path.join(
|
||||
self.get_tool_dir("tool-pvs-studio"),
|
||||
"x64" if IS_WINDOWS else "bin",
|
||||
"pvs-studio",
|
||||
)
|
||||
|
||||
with open(self._tmp_cfg_file, mode="w", encoding="utf8") as fp:
|
||||
fp.write(
|
||||
"exclude-path = "
|
||||
+ self.config.get("platformio", "packages_dir").replace("\\", "/")
|
||||
)
|
||||
|
||||
with open(self._tmp_cmd_file, mode="w", encoding="utf8") as fp:
|
||||
fp.write(
|
||||
" ".join(
|
||||
['-I"%s"' % inc.replace("\\", "/") for inc in self.cpp_includes]
|
||||
)
|
||||
)
|
||||
|
||||
def tool_output_filter(self, line): # pylint: disable=arguments-differ
|
||||
if any(
|
||||
err_msg in line.lower()
|
||||
for err_msg in (
|
||||
"license was not entered",
|
||||
"license information is incorrect",
|
||||
)
|
||||
):
|
||||
self._bad_input = True
|
||||
return line
|
||||
|
||||
def _process_defects(self, defects):
|
||||
for defect in defects:
|
||||
if not isinstance(defect, DefectItem):
|
||||
return
|
||||
if defect.severity not in self.options["severity"]:
|
||||
return
|
||||
self._defects.append(defect)
|
||||
if self._on_defect_callback:
|
||||
self._on_defect_callback(defect)
|
||||
|
||||
def _demangle_report(self, output_file):
|
||||
converter_tool = os.path.join(
|
||||
self.get_tool_dir("tool-pvs-studio"),
|
||||
"HtmlGenerator" if IS_WINDOWS else os.path.join("bin", "plog-converter"),
|
||||
)
|
||||
|
||||
cmd = (
|
||||
converter_tool,
|
||||
"-t",
|
||||
"xml",
|
||||
output_file,
|
||||
"-m",
|
||||
"cwe",
|
||||
"-m",
|
||||
"misra",
|
||||
"-a",
|
||||
# Enable all possible analyzers and defect levels
|
||||
"GA:1,2,3;64:1,2,3;OP:1,2,3;CS:1,2,3;MISRA:1,2,3",
|
||||
"--cerr",
|
||||
)
|
||||
|
||||
result = proc.exec_command(cmd)
|
||||
if result["returncode"] != 0:
|
||||
click.echo(result["err"])
|
||||
self._bad_input = True
|
||||
|
||||
return result["err"]
|
||||
|
||||
def parse_defects(self, output_file):
|
||||
defects = []
|
||||
|
||||
report = self._demangle_report(output_file)
|
||||
if not report:
|
||||
self._bad_input = True
|
||||
return []
|
||||
|
||||
try:
|
||||
defects_data = fromstring(report)
|
||||
except: # pylint: disable=bare-except
|
||||
click.echo("Error: Couldn't decode generated report!")
|
||||
self._bad_input = True
|
||||
return []
|
||||
|
||||
for table in defects_data.iter("PVS-Studio_Analysis_Log"):
|
||||
message = table.find("Message").text
|
||||
category = table.find("ErrorType").text
|
||||
line = table.find("Line").text
|
||||
file_ = table.find("File").text
|
||||
defect_id = table.find("ErrorCode").text
|
||||
cwe = table.find("CWECode")
|
||||
cwe_id = None
|
||||
if cwe is not None:
|
||||
cwe_id = cwe.text.lower().replace("cwe-", "")
|
||||
misra = table.find("MISRA")
|
||||
if misra is not None:
|
||||
message += " [%s]" % misra.text
|
||||
|
||||
severity = DefectItem.SEVERITY_LOW
|
||||
if category == "error":
|
||||
severity = DefectItem.SEVERITY_HIGH
|
||||
elif category == "warning":
|
||||
severity = DefectItem.SEVERITY_MEDIUM
|
||||
|
||||
defects.append(
|
||||
DefectItem(
|
||||
severity, category, message, file_, line, id=defect_id, cwe=cwe_id
|
||||
)
|
||||
)
|
||||
|
||||
return defects
|
||||
|
||||
def configure_command(self, src_file): # pylint: disable=arguments-differ
|
||||
if os.path.isfile(self._tmp_output_file):
|
||||
os.remove(self._tmp_output_file)
|
||||
|
||||
if not os.path.isfile(self._tmp_preprocessed_file):
|
||||
click.echo("Error: Missing preprocessed file for '%s'" % src_file)
|
||||
return ""
|
||||
|
||||
cmd = [
|
||||
self.tool_path,
|
||||
"--skip-cl-exe",
|
||||
"yes",
|
||||
"--language",
|
||||
"C" if src_file.endswith(".c") else "C++",
|
||||
"--preprocessor",
|
||||
"gcc",
|
||||
"--cfg",
|
||||
self._tmp_cfg_file,
|
||||
"--source-file",
|
||||
src_file,
|
||||
"--i-file",
|
||||
self._tmp_preprocessed_file,
|
||||
"--output-file",
|
||||
self._tmp_output_file,
|
||||
]
|
||||
|
||||
flags = self.get_flags("pvs-studio")
|
||||
if not self.is_flag_set("--platform", flags):
|
||||
cmd.append("--platform=arm")
|
||||
cmd.extend(flags)
|
||||
|
||||
return cmd
|
||||
|
||||
def _generate_tmp_file_path(self):
|
||||
# pylint: disable=protected-access
|
||||
return os.path.join(self._tmp_dir, next(tempfile._get_candidate_names()))
|
||||
|
||||
def _prepare_preprocessed_file(self, src_file):
|
||||
if os.path.isfile(self._tmp_preprocessed_file):
|
||||
os.remove(self._tmp_preprocessed_file)
|
||||
|
||||
flags = self.cxx_flags
|
||||
compiler = self.cxx_path
|
||||
if src_file.endswith(".c"):
|
||||
flags = self.cc_flags
|
||||
compiler = self.cc_path
|
||||
|
||||
cmd = [
|
||||
compiler,
|
||||
'"%s"' % src_file,
|
||||
"-E",
|
||||
"-o",
|
||||
'"%s"' % self._tmp_preprocessed_file,
|
||||
]
|
||||
cmd.extend([f for f in flags if f])
|
||||
cmd.extend(['"-D%s"' % d.replace('"', '\\"') for d in self.cpp_defines])
|
||||
cmd.append('@"%s"' % self._tmp_cmd_file)
|
||||
|
||||
# Explicitly specify C++ as the language used in .ino files
|
||||
if src_file.endswith(".ino"):
|
||||
cmd.insert(1, "-xc++")
|
||||
|
||||
result = proc.exec_command(" ".join(cmd), shell=True)
|
||||
if result["returncode"] != 0 or result["err"]:
|
||||
if self.options.get("verbose"):
|
||||
click.echo(" ".join(cmd))
|
||||
click.echo(result["err"])
|
||||
self._bad_input = True
|
||||
|
||||
def clean_up(self):
|
||||
super().clean_up()
|
||||
if os.path.isdir(self._tmp_dir):
|
||||
shutil.rmtree(self._tmp_dir)
|
||||
|
||||
@staticmethod
|
||||
def is_check_successful(cmd_result):
|
||||
return (
|
||||
"license" not in cmd_result["err"].lower() and cmd_result["returncode"] == 0
|
||||
)
|
||||
|
||||
def check(self, on_defect_callback=None):
|
||||
self._on_defect_callback = on_defect_callback
|
||||
for scope, files in self.get_project_target_files(
|
||||
self.project_dir, self.options["src_filters"]
|
||||
).items():
|
||||
if scope not in ("c", "c++"):
|
||||
continue
|
||||
for src_file in files:
|
||||
self._prepare_preprocessed_file(src_file)
|
||||
cmd = self.configure_command(src_file)
|
||||
if self.options.get("verbose"):
|
||||
click.echo(" ".join(cmd))
|
||||
if not cmd:
|
||||
self._bad_input = True
|
||||
continue
|
||||
|
||||
result = self.execute_check_cmd(cmd)
|
||||
if result["returncode"] != 0:
|
||||
continue
|
||||
|
||||
self._process_defects(self.parse_defects(self._tmp_output_file))
|
||||
|
||||
self.clean_up()
|
||||
|
||||
return self._bad_input
|
110
platformio/cli.py
Normal file
110
platformio/cli.py
Normal file
@ -0,0 +1,110 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import importlib
|
||||
from pathlib import Path
|
||||
|
||||
import click
|
||||
|
||||
|
||||
class PlatformioCLI(click.MultiCommand):
|
||||
leftover_args = []
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._pio_root_path = Path(__file__).parent
|
||||
self._pio_cmd_aliases = dict(package="pkg")
|
||||
|
||||
def _find_pio_commands(self):
|
||||
def _to_module_path(p):
|
||||
return (
|
||||
"platformio." + ".".join(p.relative_to(self._pio_root_path).parts)[:-3]
|
||||
)
|
||||
|
||||
result = {}
|
||||
for p in self._pio_root_path.rglob("cli.py"):
|
||||
# skip this module
|
||||
if p.parent == self._pio_root_path:
|
||||
continue
|
||||
cmd_name = p.parent.name
|
||||
result[self._pio_cmd_aliases.get(cmd_name, cmd_name)] = _to_module_path(p)
|
||||
|
||||
# find legacy commands
|
||||
for p in (self._pio_root_path / "commands").iterdir():
|
||||
if p.name.startswith("_"):
|
||||
continue
|
||||
if (p / "command.py").is_file():
|
||||
result[p.name] = _to_module_path(p / "command.py")
|
||||
elif p.name.endswith(".py"):
|
||||
result[p.name[:-3]] = _to_module_path(p)
|
||||
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def in_silence():
|
||||
args = PlatformioCLI.leftover_args
|
||||
return args and any(
|
||||
[
|
||||
args[0] == "debug" and "--interpreter" in " ".join(args),
|
||||
args[0] == "upgrade",
|
||||
"--json-output" in args,
|
||||
"--version" in args,
|
||||
]
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def reveal_cmd_path_args(cls, ctx):
|
||||
result = []
|
||||
group = ctx.command
|
||||
args = cls.leftover_args[::]
|
||||
while args:
|
||||
cmd_name = args.pop(0)
|
||||
next_group = group.get_command(ctx, cmd_name)
|
||||
if next_group:
|
||||
group = next_group
|
||||
result.append(cmd_name)
|
||||
if not hasattr(group, "get_command"):
|
||||
break
|
||||
return result
|
||||
|
||||
def invoke(self, ctx):
|
||||
PlatformioCLI.leftover_args = ctx.args
|
||||
if hasattr(ctx, "protected_args"):
|
||||
PlatformioCLI.leftover_args = ctx.protected_args + ctx.args
|
||||
return super().invoke(ctx)
|
||||
|
||||
def list_commands(self, ctx):
|
||||
return sorted(list(self._find_pio_commands()))
|
||||
|
||||
def get_command(self, ctx, cmd_name):
|
||||
commands = self._find_pio_commands()
|
||||
if cmd_name not in commands:
|
||||
return self._handle_obsolate_command(ctx, cmd_name)
|
||||
module = importlib.import_module(commands[cmd_name])
|
||||
return getattr(module, "cli")
|
||||
|
||||
@staticmethod
|
||||
def _handle_obsolate_command(ctx, cmd_name):
|
||||
# pylint: disable=import-outside-toplevel
|
||||
if cmd_name == "init":
|
||||
from platformio.project.commands.init import project_init_cmd
|
||||
|
||||
return project_init_cmd
|
||||
|
||||
if cmd_name == "package":
|
||||
from platformio.package.cli import cli
|
||||
|
||||
return cli
|
||||
|
||||
raise click.UsageError('No such command "%s"' % cmd_name, ctx)
|
@ -1,72 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
|
||||
import sys
|
||||
|
||||
import click
|
||||
|
||||
from platformio.managers.core import pioplus_call
|
||||
|
||||
|
||||
@click.group("account", short_help="Manage PIO Account")
|
||||
def cli():
|
||||
pass
|
||||
|
||||
|
||||
@cli.command("register", short_help="Create new PIO Account")
|
||||
@click.option("-u", "--username")
|
||||
def account_register(**kwargs):
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@cli.command("login", short_help="Log in to PIO Account")
|
||||
@click.option("-u", "--username")
|
||||
@click.option("-p", "--password")
|
||||
def account_login(**kwargs):
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@cli.command("logout", short_help="Log out of PIO Account")
|
||||
def account_logout():
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@cli.command("password", short_help="Change password")
|
||||
@click.option("--old-password")
|
||||
@click.option("--new-password")
|
||||
def account_password(**kwargs):
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@cli.command("token", short_help="Get or regenerate Authentication Token")
|
||||
@click.option("-p", "--password")
|
||||
@click.option("--regenerate", is_flag=True)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def account_token(**kwargs):
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@cli.command("forgot", short_help="Forgot password")
|
||||
@click.option("-u", "--username")
|
||||
def account_forgot(**kwargs):
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@cli.command("show", short_help="PIO Account information")
|
||||
@click.option("--offline", is_flag=True)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def account_show(**kwargs):
|
||||
pioplus_call(sys.argv[1:])
|
@ -13,14 +13,16 @@
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import shutil
|
||||
|
||||
import click
|
||||
from tabulate import tabulate
|
||||
|
||||
from platformio import util
|
||||
from platformio.managers.platform import PlatformManager
|
||||
from platformio import fs
|
||||
from platformio.package.manager.platform import PlatformPackageManager
|
||||
|
||||
|
||||
@click.command("boards", short_help="Embedded Board Explorer")
|
||||
@click.command("boards", short_help="Board Explorer")
|
||||
@click.argument("query", required=False)
|
||||
@click.option("--installed", is_flag=True)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@ -30,49 +32,46 @@ def cli(query, installed, json_output): # pylint: disable=R0912
|
||||
|
||||
grpboards = {}
|
||||
for board in _get_boards(installed):
|
||||
if query and query.lower() not in json.dumps(board).lower():
|
||||
if query and not any(
|
||||
query.lower() in str(board.get(k, "")).lower()
|
||||
for k in ("id", "name", "mcu", "vendor", "platform", "frameworks")
|
||||
):
|
||||
continue
|
||||
if board['platform'] not in grpboards:
|
||||
grpboards[board['platform']] = []
|
||||
grpboards[board['platform']].append(board)
|
||||
if board["platform"] not in grpboards:
|
||||
grpboards[board["platform"]] = []
|
||||
grpboards[board["platform"]].append(board)
|
||||
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
for (platform, boards) in sorted(grpboards.items()):
|
||||
terminal_width = shutil.get_terminal_size().columns
|
||||
for platform, boards in sorted(grpboards.items()):
|
||||
click.echo("")
|
||||
click.echo("Platform: ", nl=False)
|
||||
click.secho(platform, bold=True)
|
||||
click.echo("-" * terminal_width)
|
||||
click.echo("=" * terminal_width)
|
||||
print_boards(boards)
|
||||
return True
|
||||
|
||||
|
||||
def print_boards(boards):
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
BOARDLIST_TPL = ("{type:<30} {mcu:<14} {frequency:<8} "
|
||||
" {flash:<7} {ram:<6} {name}")
|
||||
click.echo(
|
||||
BOARDLIST_TPL.format(
|
||||
type=click.style("ID", fg="cyan"),
|
||||
mcu="MCU",
|
||||
frequency="Frequency",
|
||||
flash="Flash",
|
||||
ram="RAM",
|
||||
name="Name"))
|
||||
click.echo("-" * terminal_width)
|
||||
|
||||
for board in boards:
|
||||
click.echo(
|
||||
BOARDLIST_TPL.format(
|
||||
type=click.style(board['id'], fg="cyan"),
|
||||
mcu=board['mcu'],
|
||||
frequency="%dMHz" % (board['fcpu'] / 1000000),
|
||||
flash=util.format_filesize(board['rom']),
|
||||
ram=util.format_filesize(board['ram']),
|
||||
name=board['name']))
|
||||
tabulate(
|
||||
[
|
||||
(
|
||||
click.style(b["id"], fg="cyan"),
|
||||
b["mcu"],
|
||||
"%dMHz" % (b["fcpu"] / 1000000),
|
||||
fs.humanize_file_size(b["rom"]),
|
||||
fs.humanize_file_size(b["ram"]),
|
||||
b["name"],
|
||||
)
|
||||
for b in boards
|
||||
],
|
||||
headers=["ID", "MCU", "Frequency", "Flash", "RAM", "Name"],
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def _get_boards(installed=False):
|
||||
pm = PlatformManager()
|
||||
pm = PlatformPackageManager()
|
||||
return pm.get_installed_boards() if installed else pm.get_all_boards()
|
||||
|
||||
|
||||
@ -80,7 +79,7 @@ def _print_boards_json(query, installed=False):
|
||||
result = []
|
||||
for board in _get_boards(installed):
|
||||
if query:
|
||||
search_data = "%s %s" % (board['id'], json.dumps(board).lower())
|
||||
search_data = "%s %s" % (board["id"], json.dumps(board).lower())
|
||||
if query.lower() not in search_data.lower():
|
||||
continue
|
||||
result.append(board)
|
||||
|
@ -12,19 +12,18 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from glob import glob
|
||||
from os import getenv, makedirs, remove
|
||||
from os.path import abspath, basename, expanduser, isdir, isfile, join
|
||||
from shutil import copyfile, copytree
|
||||
from tempfile import mkdtemp
|
||||
import glob
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
import click
|
||||
|
||||
from platformio import app, util
|
||||
from platformio.commands.init import cli as cmd_init
|
||||
from platformio.commands.init import validate_boards
|
||||
from platformio.commands.run import cli as cmd_run
|
||||
from platformio import fs
|
||||
from platformio.exception import CIBuildEnvsEmpty
|
||||
from platformio.project.commands.init import project_init_cmd, validate_boards
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.run.cli import cli as cmd_run
|
||||
|
||||
|
||||
def validate_path(ctx, param, value): # pylint: disable=unused-argument
|
||||
@ -32,72 +31,71 @@ def validate_path(ctx, param, value): # pylint: disable=unused-argument
|
||||
value = list(value)
|
||||
for i, p in enumerate(value):
|
||||
if p.startswith("~"):
|
||||
value[i] = expanduser(p)
|
||||
value[i] = abspath(value[i])
|
||||
if not glob(value[i]):
|
||||
value[i] = fs.expanduser(p)
|
||||
value[i] = os.path.abspath(value[i])
|
||||
if not glob.glob(value[i], recursive=True):
|
||||
invalid_path = p
|
||||
break
|
||||
try:
|
||||
assert invalid_path is None
|
||||
return value
|
||||
except AssertionError:
|
||||
raise click.BadParameter("Found invalid path: %s" % invalid_path)
|
||||
except AssertionError as exc:
|
||||
raise click.BadParameter("Found invalid path: %s" % invalid_path) from exc
|
||||
|
||||
|
||||
@click.command("ci", short_help="Continuous Integration")
|
||||
@click.argument("src", nargs=-1, callback=validate_path)
|
||||
@click.option(
|
||||
"-l", "--lib", multiple=True, callback=validate_path, metavar="DIRECTORY")
|
||||
@click.option("-l", "--lib", multiple=True, callback=validate_path, metavar="DIRECTORY")
|
||||
@click.option("--exclude", multiple=True)
|
||||
@click.option(
|
||||
"-b", "--board", multiple=True, metavar="ID", callback=validate_boards)
|
||||
@click.option("-b", "--board", multiple=True, metavar="ID", callback=validate_boards)
|
||||
@click.option(
|
||||
"--build-dir",
|
||||
default=mkdtemp,
|
||||
type=click.Path(
|
||||
file_okay=False,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
resolve_path=True))
|
||||
default=tempfile.mkdtemp,
|
||||
type=click.Path(file_okay=False, dir_okay=True, writable=True),
|
||||
)
|
||||
@click.option("--keep-build-dir", is_flag=True)
|
||||
@click.option(
|
||||
"-C",
|
||||
"-c",
|
||||
"--project-conf",
|
||||
type=click.Path(
|
||||
exists=True,
|
||||
file_okay=True,
|
||||
dir_okay=False,
|
||||
readable=True,
|
||||
resolve_path=True))
|
||||
type=click.Path(exists=True, file_okay=True, dir_okay=False, readable=True),
|
||||
)
|
||||
@click.option("-O", "--project-option", multiple=True)
|
||||
@click.option("-e", "--environment", "environments", multiple=True)
|
||||
@click.option("-v", "--verbose", is_flag=True)
|
||||
@click.pass_context
|
||||
def cli( # pylint: disable=too-many-arguments, too-many-branches
|
||||
ctx, src, lib, exclude, board, build_dir, keep_build_dir, project_conf,
|
||||
project_option, verbose):
|
||||
|
||||
if not src and getenv("PLATFORMIO_CI_SRC"):
|
||||
src = validate_path(ctx, None, getenv("PLATFORMIO_CI_SRC").split(":"))
|
||||
def cli( # pylint: disable=too-many-arguments,too-many-positional-arguments, too-many-branches
|
||||
ctx,
|
||||
src,
|
||||
lib,
|
||||
exclude,
|
||||
board,
|
||||
build_dir,
|
||||
keep_build_dir,
|
||||
project_conf,
|
||||
project_option,
|
||||
environments,
|
||||
verbose,
|
||||
):
|
||||
if not src and os.getenv("PLATFORMIO_CI_SRC"):
|
||||
src = validate_path(ctx, None, os.getenv("PLATFORMIO_CI_SRC").split(":"))
|
||||
if not src:
|
||||
raise click.BadParameter("Missing argument 'src'")
|
||||
|
||||
try:
|
||||
app.set_session_var("force_option", True)
|
||||
|
||||
if not keep_build_dir and isdir(build_dir):
|
||||
util.rmtree_(build_dir)
|
||||
if not isdir(build_dir):
|
||||
makedirs(build_dir)
|
||||
if not keep_build_dir and os.path.isdir(build_dir):
|
||||
fs.rmtree(build_dir)
|
||||
if not os.path.isdir(build_dir):
|
||||
os.makedirs(build_dir)
|
||||
|
||||
for dir_name, patterns in dict(lib=lib, src=src).items():
|
||||
if not patterns:
|
||||
continue
|
||||
contents = []
|
||||
for p in patterns:
|
||||
contents += glob(p)
|
||||
_copy_contents(join(build_dir, dir_name), contents)
|
||||
contents += glob.glob(p, recursive=True)
|
||||
_copy_contents(os.path.join(build_dir, dir_name), contents)
|
||||
|
||||
if project_conf and isfile(project_conf):
|
||||
if project_conf and os.path.isfile(project_conf):
|
||||
_copy_project_conf(build_dir, project_conf)
|
||||
elif not board:
|
||||
raise CIBuildEnvsEmpty()
|
||||
@ -107,65 +105,70 @@ def cli( # pylint: disable=too-many-arguments, too-many-branches
|
||||
|
||||
# initialise project
|
||||
ctx.invoke(
|
||||
cmd_init,
|
||||
project_init_cmd,
|
||||
project_dir=build_dir,
|
||||
board=board,
|
||||
project_option=project_option)
|
||||
boards=board,
|
||||
project_options=project_option,
|
||||
)
|
||||
|
||||
# process project
|
||||
ctx.invoke(cmd_run, project_dir=build_dir, verbose=verbose)
|
||||
ctx.invoke(
|
||||
cmd_run, project_dir=build_dir, environment=environments, verbose=verbose
|
||||
)
|
||||
finally:
|
||||
if not keep_build_dir:
|
||||
util.rmtree_(build_dir)
|
||||
fs.rmtree(build_dir)
|
||||
|
||||
|
||||
def _copy_contents(dst_dir, contents):
|
||||
def _copy_contents(dst_dir, contents): # pylint: disable=too-many-branches
|
||||
items = {"dirs": set(), "files": set()}
|
||||
|
||||
for path in contents:
|
||||
if isdir(path):
|
||||
items['dirs'].add(path)
|
||||
elif isfile(path):
|
||||
items['files'].add(path)
|
||||
if os.path.isdir(path):
|
||||
items["dirs"].add(path)
|
||||
elif os.path.isfile(path):
|
||||
items["files"].add(path)
|
||||
|
||||
dst_dir_name = basename(dst_dir)
|
||||
dst_dir_name = os.path.basename(dst_dir)
|
||||
|
||||
if dst_dir_name == "src" and len(items['dirs']) == 1:
|
||||
copytree(list(items['dirs']).pop(), dst_dir, symlinks=True)
|
||||
if dst_dir_name == "src" and len(items["dirs"]) == 1:
|
||||
if not os.path.isdir(dst_dir):
|
||||
shutil.copytree(list(items["dirs"]).pop(), dst_dir, symlinks=True)
|
||||
else:
|
||||
if not isdir(dst_dir):
|
||||
makedirs(dst_dir)
|
||||
for d in items['dirs']:
|
||||
copytree(d, join(dst_dir, basename(d)), symlinks=True)
|
||||
if not os.path.isdir(dst_dir):
|
||||
os.makedirs(dst_dir)
|
||||
for d in items["dirs"]:
|
||||
src_dst_dir = os.path.join(dst_dir, os.path.basename(d))
|
||||
if not os.path.isdir(src_dst_dir):
|
||||
shutil.copytree(d, src_dst_dir, symlinks=True)
|
||||
|
||||
if not items['files']:
|
||||
if not items["files"]:
|
||||
return
|
||||
|
||||
if dst_dir_name == "lib":
|
||||
dst_dir = join(dst_dir, mkdtemp(dir=dst_dir))
|
||||
dst_dir = os.path.join(dst_dir, tempfile.mkdtemp(dir=dst_dir))
|
||||
|
||||
for f in items['files']:
|
||||
dst_file = join(dst_dir, basename(f))
|
||||
for f in items["files"]:
|
||||
dst_file = os.path.join(dst_dir, os.path.basename(f))
|
||||
if f == dst_file:
|
||||
continue
|
||||
copyfile(f, dst_file)
|
||||
shutil.copyfile(f, dst_file)
|
||||
|
||||
|
||||
def _exclude_contents(dst_dir, patterns):
|
||||
contents = []
|
||||
for p in patterns:
|
||||
contents += glob(join(util.glob_escape(dst_dir), p))
|
||||
contents += glob.glob(os.path.join(glob.escape(dst_dir), p), recursive=True)
|
||||
for path in contents:
|
||||
path = abspath(path)
|
||||
if isdir(path):
|
||||
util.rmtree_(path)
|
||||
elif isfile(path):
|
||||
remove(path)
|
||||
path = os.path.abspath(path)
|
||||
if os.path.isdir(path):
|
||||
fs.rmtree(path)
|
||||
elif os.path.isfile(path):
|
||||
os.remove(path)
|
||||
|
||||
|
||||
def _copy_project_conf(build_dir, project_conf):
|
||||
config = util.load_project_config(project_conf)
|
||||
config = ProjectConfig(project_conf, parse_extra=False)
|
||||
if config.has_section("platformio"):
|
||||
config.remove_section("platformio")
|
||||
with open(join(build_dir, "platformio.ini"), "w") as fp:
|
||||
config.write(fp)
|
||||
config.save(os.path.join(build_dir, "platformio.ini"))
|
||||
|
@ -1,42 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
from os import getcwd
|
||||
|
||||
import click
|
||||
|
||||
from platformio.managers.core import pioplus_call
|
||||
|
||||
|
||||
@click.command(
|
||||
"debug",
|
||||
context_settings=dict(ignore_unknown_options=True),
|
||||
short_help="PIO Unified Debugger")
|
||||
@click.option(
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=getcwd,
|
||||
type=click.Path(
|
||||
exists=True,
|
||||
file_okay=False,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
resolve_path=True))
|
||||
@click.option("--environment", "-e", metavar="<environment>")
|
||||
@click.option("--verbose", "-v", is_flag=True)
|
||||
@click.option("--interface", type=click.Choice(["gdb"]))
|
||||
@click.argument("__unprocessed", nargs=-1, type=click.UNPROCESSED)
|
||||
def cli(*args, **kwargs): # pylint: disable=unused-argument
|
||||
pioplus_call(sys.argv[1:])
|
@ -1,228 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import sys
|
||||
from os import getcwd
|
||||
|
||||
import click
|
||||
from serial.tools import miniterm
|
||||
|
||||
from platformio import exception, util
|
||||
|
||||
|
||||
@click.group(short_help="Monitor device or list existing")
|
||||
def cli():
|
||||
pass
|
||||
|
||||
|
||||
@cli.command("list", short_help="List devices")
|
||||
@click.option("--serial", is_flag=True, help="List serial ports, default")
|
||||
@click.option("--logical", is_flag=True, help="List logical devices")
|
||||
@click.option("--mdns", is_flag=True, help="List multicast DNS services")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def device_list( # pylint: disable=too-many-branches
|
||||
serial, logical, mdns, json_output):
|
||||
if not logical and not mdns:
|
||||
serial = True
|
||||
data = {}
|
||||
if serial:
|
||||
data['serial'] = util.get_serial_ports()
|
||||
if logical:
|
||||
data['logical'] = util.get_logical_devices()
|
||||
if mdns:
|
||||
data['mdns'] = util.get_mdns_services()
|
||||
|
||||
single_key = data.keys()[0] if len(data.keys()) == 1 else None
|
||||
|
||||
if json_output:
|
||||
return click.echo(json.dumps(data[single_key] if single_key else data))
|
||||
|
||||
titles = {
|
||||
"serial": "Serial Ports",
|
||||
"logical": "Logical Devices",
|
||||
"mdns": "Multicast DNS Services"
|
||||
}
|
||||
|
||||
for key, value in data.items():
|
||||
if not single_key:
|
||||
click.secho(titles[key], bold=True)
|
||||
click.echo("=" * len(titles[key]))
|
||||
|
||||
if key == "serial":
|
||||
for item in value:
|
||||
click.secho(item['port'], fg="cyan")
|
||||
click.echo("-" * len(item['port']))
|
||||
click.echo("Hardware ID: %s" % item['hwid'])
|
||||
click.echo("Description: %s" % item['description'])
|
||||
click.echo("")
|
||||
|
||||
if key == "logical":
|
||||
for item in value:
|
||||
click.secho(item['path'], fg="cyan")
|
||||
click.echo("-" * len(item['path']))
|
||||
click.echo("Name: %s" % item['name'])
|
||||
click.echo("")
|
||||
|
||||
if key == "mdns":
|
||||
for item in value:
|
||||
click.secho(item['name'], fg="cyan")
|
||||
click.echo("-" * len(item['name']))
|
||||
click.echo("Type: %s" % item['type'])
|
||||
click.echo("IP: %s" % item['ip'])
|
||||
click.echo("Port: %s" % item['port'])
|
||||
if item['properties']:
|
||||
click.echo("Properties: %s" % ("; ".join([
|
||||
"%s=%s" % (k, v)
|
||||
for k, v in item['properties'].items()
|
||||
])))
|
||||
click.echo("")
|
||||
|
||||
if single_key:
|
||||
click.echo("")
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@cli.command("monitor", short_help="Monitor device (Serial)")
|
||||
@click.option("--port", "-p", help="Port, a number or a device name")
|
||||
@click.option("--baud", "-b", type=int, help="Set baud rate, default=9600")
|
||||
@click.option(
|
||||
"--parity",
|
||||
default="N",
|
||||
type=click.Choice(["N", "E", "O", "S", "M"]),
|
||||
help="Set parity, default=N")
|
||||
@click.option(
|
||||
"--rtscts", is_flag=True, help="Enable RTS/CTS flow control, default=Off")
|
||||
@click.option(
|
||||
"--xonxoff",
|
||||
is_flag=True,
|
||||
help="Enable software flow control, default=Off")
|
||||
@click.option(
|
||||
"--rts",
|
||||
default=None,
|
||||
type=click.IntRange(0, 1),
|
||||
help="Set initial RTS line state")
|
||||
@click.option(
|
||||
"--dtr",
|
||||
default=None,
|
||||
type=click.IntRange(0, 1),
|
||||
help="Set initial DTR line state")
|
||||
@click.option("--echo", is_flag=True, help="Enable local echo, default=Off")
|
||||
@click.option(
|
||||
"--encoding",
|
||||
default="UTF-8",
|
||||
help="Set the encoding for the serial port (e.g. hexlify, "
|
||||
"Latin1, UTF-8), default: UTF-8")
|
||||
@click.option("--filter", "-f", multiple=True, help="Add text transformation")
|
||||
@click.option(
|
||||
"--eol",
|
||||
default="CRLF",
|
||||
type=click.Choice(["CR", "LF", "CRLF"]),
|
||||
help="End of line mode, default=CRLF")
|
||||
@click.option(
|
||||
"--raw", is_flag=True, help="Do not apply any encodings/transformations")
|
||||
@click.option(
|
||||
"--exit-char",
|
||||
type=int,
|
||||
default=3,
|
||||
help="ASCII code of special character that is used to exit "
|
||||
"the application, default=3 (Ctrl+C)")
|
||||
@click.option(
|
||||
"--menu-char",
|
||||
type=int,
|
||||
default=20,
|
||||
help="ASCII code of special character that is used to "
|
||||
"control miniterm (menu), default=20 (DEC)")
|
||||
@click.option(
|
||||
"--quiet",
|
||||
is_flag=True,
|
||||
help="Diagnostics: suppress non-error messages, default=Off")
|
||||
@click.option(
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=getcwd,
|
||||
type=click.Path(
|
||||
exists=True, file_okay=False, dir_okay=True, resolve_path=True))
|
||||
@click.option(
|
||||
"-e",
|
||||
"--environment",
|
||||
help="Load configuration from `platformio.ini` and specified environment")
|
||||
def device_monitor(**kwargs): # pylint: disable=too-many-branches
|
||||
try:
|
||||
project_options = get_project_options(kwargs['project_dir'],
|
||||
kwargs['environment'])
|
||||
monitor_options = {k: v for k, v in project_options or []}
|
||||
if monitor_options:
|
||||
for k in ("port", "baud", "speed", "rts", "dtr"):
|
||||
k2 = "monitor_%s" % k
|
||||
if k == "speed":
|
||||
k = "baud"
|
||||
if kwargs[k] is None and k2 in monitor_options:
|
||||
kwargs[k] = monitor_options[k2]
|
||||
if k != "port":
|
||||
kwargs[k] = int(kwargs[k])
|
||||
except exception.NotPlatformIOProject:
|
||||
pass
|
||||
|
||||
if not kwargs['port']:
|
||||
ports = util.get_serial_ports(filter_hwid=True)
|
||||
if len(ports) == 1:
|
||||
kwargs['port'] = ports[0]['port']
|
||||
|
||||
sys.argv = ["monitor"]
|
||||
for k, v in kwargs.items():
|
||||
if k in ("port", "baud", "rts", "dtr", "environment", "project_dir"):
|
||||
continue
|
||||
k = "--" + k.replace("_", "-")
|
||||
if isinstance(v, bool):
|
||||
if v:
|
||||
sys.argv.append(k)
|
||||
elif isinstance(v, tuple):
|
||||
for i in v:
|
||||
sys.argv.extend([k, i])
|
||||
else:
|
||||
sys.argv.extend([k, str(v)])
|
||||
|
||||
try:
|
||||
miniterm.main(
|
||||
default_port=kwargs['port'],
|
||||
default_baudrate=kwargs['baud'] or 9600,
|
||||
default_rts=kwargs['rts'],
|
||||
default_dtr=kwargs['dtr'])
|
||||
except Exception as e:
|
||||
raise exception.MinitermException(e)
|
||||
|
||||
|
||||
def get_project_options(project_dir, environment):
|
||||
config = util.load_project_config(project_dir)
|
||||
if not config.sections():
|
||||
return None
|
||||
|
||||
known_envs = [s[4:] for s in config.sections() if s.startswith("env:")]
|
||||
if environment:
|
||||
if environment in known_envs:
|
||||
return config.items("env:%s" % environment)
|
||||
raise exception.UnknownEnvNames(environment, ", ".join(known_envs))
|
||||
|
||||
if not known_envs:
|
||||
return None
|
||||
|
||||
if config.has_option("platformio", "env_default"):
|
||||
env_default = config.get("platformio",
|
||||
"env_default").split(", ")[0].strip()
|
||||
if env_default and env_default in known_envs:
|
||||
return config.items("env:%s" % env_default)
|
||||
|
||||
return config.items("env:%s" % known_envs[0])
|
@ -12,10 +12,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
[report]
|
||||
# Regexes for lines to exclude from consideration
|
||||
exclude_lines =
|
||||
pragma: no cover
|
||||
def __repr__
|
||||
raise AssertionError
|
||||
raise NotImplementedError
|
||||
# pylint: disable=unused-import
|
||||
from platformio.device.monitor.filters.base import (
|
||||
DeviceMonitorFilterBase as DeviceMonitorFilter,
|
||||
)
|
@ -1,423 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# pylint: disable=too-many-arguments,too-many-locals, too-many-branches
|
||||
|
||||
from os import getcwd, makedirs
|
||||
from os.path import isdir, isfile, join
|
||||
from shutil import copyfile
|
||||
|
||||
import click
|
||||
|
||||
from platformio import exception, util
|
||||
from platformio.commands.platform import \
|
||||
platform_install as cli_platform_install
|
||||
from platformio.ide.projectgenerator import ProjectGenerator
|
||||
from platformio.managers.platform import PlatformManager
|
||||
|
||||
|
||||
def validate_boards(ctx, param, value): # pylint: disable=W0613
|
||||
pm = PlatformManager()
|
||||
for id_ in value:
|
||||
try:
|
||||
pm.board_config(id_)
|
||||
except exception.UnknownBoard:
|
||||
raise click.BadParameter(
|
||||
"`%s`. Please search for board ID using `platformio boards` "
|
||||
"command" % id_)
|
||||
return value
|
||||
|
||||
|
||||
@click.command(
|
||||
"init", short_help="Initialize PlatformIO project or update existing")
|
||||
@click.option(
|
||||
"--project-dir",
|
||||
"-d",
|
||||
default=getcwd,
|
||||
type=click.Path(
|
||||
exists=True,
|
||||
file_okay=False,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
resolve_path=True))
|
||||
@click.option(
|
||||
"-b", "--board", multiple=True, metavar="ID", callback=validate_boards)
|
||||
@click.option(
|
||||
"--ide", type=click.Choice(ProjectGenerator.get_supported_ides()))
|
||||
@click.option("-O", "--project-option", multiple=True)
|
||||
@click.option("--env-prefix", default="")
|
||||
@click.option("-s", "--silent", is_flag=True)
|
||||
@click.pass_context
|
||||
def cli(
|
||||
ctx, # pylint: disable=R0913
|
||||
project_dir,
|
||||
board,
|
||||
ide,
|
||||
project_option,
|
||||
env_prefix,
|
||||
silent):
|
||||
|
||||
if not silent:
|
||||
if project_dir == getcwd():
|
||||
click.secho(
|
||||
"\nThe current working directory", fg="yellow", nl=False)
|
||||
click.secho(" %s " % project_dir, fg="cyan", nl=False)
|
||||
click.secho("will be used for the project.", fg="yellow")
|
||||
click.echo("")
|
||||
|
||||
click.echo("The next files/directories have been created in %s" %
|
||||
click.style(project_dir, fg="cyan"))
|
||||
click.echo("%s - Put project header files here" % click.style(
|
||||
"include", fg="cyan"))
|
||||
click.echo("%s - Put here project specific (private) libraries" %
|
||||
click.style("lib", fg="cyan"))
|
||||
click.echo("%s - Put project source files here" % click.style(
|
||||
"src", fg="cyan"))
|
||||
click.echo("%s - Project Configuration File" % click.style(
|
||||
"platformio.ini", fg="cyan"))
|
||||
|
||||
is_new_project = not util.is_platformio_project(project_dir)
|
||||
init_base_project(project_dir)
|
||||
|
||||
if board:
|
||||
fill_project_envs(ctx, project_dir, board, project_option, env_prefix,
|
||||
ide is not None)
|
||||
|
||||
if ide:
|
||||
env_name = get_best_envname(project_dir, board)
|
||||
if not env_name:
|
||||
raise exception.BoardNotDefined()
|
||||
pg = ProjectGenerator(project_dir, ide, env_name)
|
||||
pg.generate()
|
||||
|
||||
if is_new_project:
|
||||
init_ci_conf(project_dir)
|
||||
init_cvs_ignore(project_dir)
|
||||
|
||||
if silent:
|
||||
return
|
||||
|
||||
if ide:
|
||||
click.secho(
|
||||
"\nProject has been successfully %s including configuration files "
|
||||
"for `%s` IDE." % ("initialized" if is_new_project else "updated",
|
||||
ide),
|
||||
fg="green")
|
||||
else:
|
||||
click.secho(
|
||||
"\nProject has been successfully %s! Useful commands:\n"
|
||||
"`pio run` - process/build project from the current directory\n"
|
||||
"`pio run --target upload` or `pio run -t upload` "
|
||||
"- upload firmware to a target\n"
|
||||
"`pio run --target clean` - clean project (remove compiled files)"
|
||||
"\n`pio run --help` - additional information" %
|
||||
("initialized" if is_new_project else "updated"),
|
||||
fg="green")
|
||||
|
||||
|
||||
def get_best_envname(project_dir, boards=None):
|
||||
config = util.load_project_config(project_dir)
|
||||
env_default = None
|
||||
if config.has_option("platformio", "env_default"):
|
||||
env_default = config.get("platformio",
|
||||
"env_default").split(", ")[0].strip()
|
||||
if env_default:
|
||||
return env_default
|
||||
section = None
|
||||
for section in config.sections():
|
||||
if not section.startswith("env:"):
|
||||
continue
|
||||
elif config.has_option(section, "board") and (not boards or config.get(
|
||||
section, "board") in boards):
|
||||
break
|
||||
return section[4:] if section else None
|
||||
|
||||
|
||||
def init_base_project(project_dir):
|
||||
if util.is_platformio_project(project_dir):
|
||||
return
|
||||
|
||||
copyfile(
|
||||
join(util.get_source_dir(), "projectconftpl.ini"),
|
||||
join(project_dir, "platformio.ini"))
|
||||
|
||||
with util.cd(project_dir):
|
||||
dir_to_readme = [
|
||||
(util.get_projectsrc_dir(), None),
|
||||
(util.get_projectinclude_dir(), init_include_readme),
|
||||
(util.get_projectlib_dir(), init_lib_readme),
|
||||
(util.get_projecttest_dir(), init_test_readme),
|
||||
]
|
||||
for (path, cb) in dir_to_readme:
|
||||
if isdir(path):
|
||||
continue
|
||||
makedirs(path)
|
||||
if cb:
|
||||
cb(path)
|
||||
|
||||
|
||||
def init_include_readme(include_dir):
|
||||
with open(join(include_dir, "README"), "w") as f:
|
||||
f.write("""
|
||||
This directory is intended for project header files.
|
||||
|
||||
A header file is a file containing C declarations and macro definitions
|
||||
to be shared between several project source files. You request the use of a
|
||||
header file in your project source file (C, C++, etc) located in `src` folder
|
||||
by including it, with the C preprocessing directive `#include'.
|
||||
|
||||
```src/main.c
|
||||
|
||||
#include "header.h"
|
||||
|
||||
int main (void)
|
||||
{
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
Including a header file produces the same results as copying the header file
|
||||
into each source file that needs it. Such copying would be time-consuming
|
||||
and error-prone. With a header file, the related declarations appear
|
||||
in only one place. If they need to be changed, they can be changed in one
|
||||
place, and programs that include the header file will automatically use the
|
||||
new version when next recompiled. The header file eliminates the labor of
|
||||
finding and changing all the copies as well as the risk that a failure to
|
||||
find one copy will result in inconsistencies within a program.
|
||||
|
||||
In C, the usual convention is to give header files names that end with `.h'.
|
||||
It is most portable to use only letters, digits, dashes, and underscores in
|
||||
header file names, and at most one dot.
|
||||
|
||||
Read more about using header files in official GCC documentation:
|
||||
|
||||
* Include Syntax
|
||||
* Include Operation
|
||||
* Once-Only Headers
|
||||
* Computed Includes
|
||||
|
||||
https://gcc.gnu.org/onlinedocs/cpp/Header-Files.html
|
||||
""")
|
||||
|
||||
|
||||
def init_lib_readme(lib_dir):
|
||||
with open(join(lib_dir, "README"), "w") as f:
|
||||
f.write("""
|
||||
This directory is intended for project specific (private) libraries.
|
||||
PlatformIO will compile them to static libraries and link into executable file.
|
||||
|
||||
The source code of each library should be placed in a an own separate directory
|
||||
("lib/your_library_name/[here are source files]").
|
||||
|
||||
For example, see a structure of the following two libraries `Foo` and `Bar`:
|
||||
|
||||
|--lib
|
||||
| |
|
||||
| |--Bar
|
||||
| | |--docs
|
||||
| | |--examples
|
||||
| | |--src
|
||||
| | |- Bar.c
|
||||
| | |- Bar.h
|
||||
| | |- library.json (optional, custom build options, etc) https://docs.platformio.org/page/librarymanager/config.html
|
||||
| |
|
||||
| |--Foo
|
||||
| | |- Foo.c
|
||||
| | |- Foo.h
|
||||
| |
|
||||
| |- README --> THIS FILE
|
||||
|
|
||||
|- platformio.ini
|
||||
|--src
|
||||
|- main.c
|
||||
|
||||
and a contents of `src/main.c`:
|
||||
```
|
||||
#include <Foo.h>
|
||||
#include <Bar.h>
|
||||
|
||||
int main (void)
|
||||
{
|
||||
...
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
PlatformIO Library Dependency Finder will find automatically dependent
|
||||
libraries scanning project source files.
|
||||
|
||||
More information about PlatformIO Library Dependency Finder
|
||||
- https://docs.platformio.org/page/librarymanager/ldf.html
|
||||
""")
|
||||
|
||||
|
||||
def init_test_readme(test_dir):
|
||||
with open(join(test_dir, "README"), "w") as f:
|
||||
f.write("""
|
||||
This directory is intended for PIO Unit Testing and project tests.
|
||||
|
||||
Unit Testing is a software testing method by which individual units of
|
||||
source code, sets of one or more MCU program modules together with associated
|
||||
control data, usage procedures, and operating procedures, are tested to
|
||||
determine whether they are fit for use. Unit testing finds problems early
|
||||
in the development cycle.
|
||||
|
||||
More information about PIO Unit Testing:
|
||||
- https://docs.platformio.org/page/plus/unit-testing.html
|
||||
""")
|
||||
|
||||
|
||||
def init_ci_conf(project_dir):
|
||||
conf_path = join(project_dir, ".travis.yml")
|
||||
if isfile(conf_path):
|
||||
return
|
||||
with open(conf_path, "w") as f:
|
||||
f.write("""# Continuous Integration (CI) is the practice, in software
|
||||
# engineering, of merging all developer working copies with a shared mainline
|
||||
# several times a day < https://docs.platformio.org/page/ci/index.html >
|
||||
#
|
||||
# Documentation:
|
||||
#
|
||||
# * Travis CI Embedded Builds with PlatformIO
|
||||
# < https://docs.travis-ci.com/user/integration/platformio/ >
|
||||
#
|
||||
# * PlatformIO integration with Travis CI
|
||||
# < https://docs.platformio.org/page/ci/travis.html >
|
||||
#
|
||||
# * User Guide for `platformio ci` command
|
||||
# < https://docs.platformio.org/page/userguide/cmd_ci.html >
|
||||
#
|
||||
#
|
||||
# Please choose one of the following templates (proposed below) and uncomment
|
||||
# it (remove "# " before each line) or use own configuration according to the
|
||||
# Travis CI documentation (see above).
|
||||
#
|
||||
|
||||
|
||||
#
|
||||
# Template #1: General project. Test it using existing `platformio.ini`.
|
||||
#
|
||||
|
||||
# language: python
|
||||
# python:
|
||||
# - "2.7"
|
||||
#
|
||||
# sudo: false
|
||||
# cache:
|
||||
# directories:
|
||||
# - "~/.platformio"
|
||||
#
|
||||
# install:
|
||||
# - pip install -U platformio
|
||||
# - platformio update
|
||||
#
|
||||
# script:
|
||||
# - platformio run
|
||||
|
||||
|
||||
#
|
||||
# Template #2: The project is intended to be used as a library with examples.
|
||||
#
|
||||
|
||||
# language: python
|
||||
# python:
|
||||
# - "2.7"
|
||||
#
|
||||
# sudo: false
|
||||
# cache:
|
||||
# directories:
|
||||
# - "~/.platformio"
|
||||
#
|
||||
# env:
|
||||
# - PLATFORMIO_CI_SRC=path/to/test/file.c
|
||||
# - PLATFORMIO_CI_SRC=examples/file.ino
|
||||
# - PLATFORMIO_CI_SRC=path/to/test/directory
|
||||
#
|
||||
# install:
|
||||
# - pip install -U platformio
|
||||
# - platformio update
|
||||
#
|
||||
# script:
|
||||
# - platformio ci --lib="." --board=ID_1 --board=ID_2 --board=ID_N
|
||||
""")
|
||||
|
||||
|
||||
def init_cvs_ignore(project_dir):
|
||||
conf_path = join(project_dir, ".gitignore")
|
||||
if isfile(conf_path):
|
||||
return
|
||||
with open(conf_path, "w") as fp:
|
||||
fp.writelines([".pio\n", ".pioenvs\n", ".piolibdeps\n"])
|
||||
|
||||
|
||||
def fill_project_envs(ctx, project_dir, board_ids, project_option, env_prefix,
|
||||
force_download):
|
||||
content = []
|
||||
used_boards = []
|
||||
used_platforms = []
|
||||
|
||||
config = util.load_project_config(project_dir)
|
||||
for section in config.sections():
|
||||
cond = [
|
||||
section.startswith("env:"),
|
||||
config.has_option(section, "board")
|
||||
]
|
||||
if all(cond):
|
||||
used_boards.append(config.get(section, "board"))
|
||||
|
||||
pm = PlatformManager()
|
||||
for id_ in board_ids:
|
||||
board_config = pm.board_config(id_)
|
||||
used_platforms.append(board_config['platform'])
|
||||
if id_ in used_boards:
|
||||
continue
|
||||
used_boards.append(id_)
|
||||
|
||||
envopts = {"platform": board_config['platform'], "board": id_}
|
||||
# find default framework for board
|
||||
frameworks = board_config.get("frameworks")
|
||||
if frameworks:
|
||||
envopts['framework'] = frameworks[0]
|
||||
|
||||
for item in project_option:
|
||||
if "=" not in item:
|
||||
continue
|
||||
_name, _value = item.split("=", 1)
|
||||
envopts[_name.strip()] = _value.strip()
|
||||
|
||||
content.append("")
|
||||
content.append("[env:%s%s]" % (env_prefix, id_))
|
||||
for name, value in envopts.items():
|
||||
content.append("%s = %s" % (name, value))
|
||||
|
||||
if force_download and used_platforms:
|
||||
_install_dependent_platforms(ctx, used_platforms)
|
||||
|
||||
if not content:
|
||||
return
|
||||
|
||||
with open(join(project_dir, "platformio.ini"), "a") as f:
|
||||
content.append("")
|
||||
f.write("\n".join(content))
|
||||
|
||||
|
||||
def _install_dependent_platforms(ctx, platforms):
|
||||
installed_platforms = [
|
||||
p['name'] for p in PlatformManager().get_installed()
|
||||
]
|
||||
if set(platforms) <= set(installed_platforms):
|
||||
return
|
||||
ctx.invoke(
|
||||
cli_platform_install,
|
||||
platforms=list(set(platforms) - set(installed_platforms)))
|
@ -15,100 +15,185 @@
|
||||
# pylint: disable=too-many-branches, too-many-locals
|
||||
|
||||
import json
|
||||
import time
|
||||
from os.path import isdir, join
|
||||
from urllib import quote
|
||||
import logging
|
||||
import os
|
||||
|
||||
import click
|
||||
|
||||
from platformio import exception, util
|
||||
from platformio.managers.lib import LibraryManager, get_builtin_libs
|
||||
from platformio.util import get_api_result
|
||||
from platformio import exception, fs
|
||||
from platformio.cli import PlatformioCLI
|
||||
from platformio.package.commands.install import package_install_cmd
|
||||
from platformio.package.commands.list import package_list_cmd
|
||||
from platformio.package.commands.search import package_search_cmd
|
||||
from platformio.package.commands.show import package_show_cmd
|
||||
from platformio.package.commands.uninstall import package_uninstall_cmd
|
||||
from platformio.package.commands.update import package_update_cmd
|
||||
from platformio.package.exception import NotGlobalLibDir
|
||||
from platformio.package.manager.library import LibraryPackageManager
|
||||
from platformio.package.meta import PackageItem, PackageSpec
|
||||
from platformio.proc import is_ci
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.project.helpers import get_project_dir, is_platformio_project
|
||||
|
||||
CTX_META_INPUT_DIRS_KEY = __name__ + ".input_dirs"
|
||||
CTX_META_PROJECT_ENVIRONMENTS_KEY = __name__ + ".project_environments"
|
||||
CTX_META_STORAGE_DIRS_KEY = __name__ + ".storage_dirs"
|
||||
CTX_META_STORAGE_LIBDEPS_KEY = __name__ + ".storage_lib_deps"
|
||||
|
||||
|
||||
@click.group(short_help="Library Manager")
|
||||
@click.option(
|
||||
"-g",
|
||||
"--global",
|
||||
is_flag=True,
|
||||
help="Manage global PlatformIO library storage")
|
||||
def get_project_global_lib_dir():
|
||||
return ProjectConfig.get_instance().get("platformio", "globallib_dir")
|
||||
|
||||
|
||||
def invoke_command(ctx, cmd, **kwargs):
|
||||
input_dirs = ctx.meta.get(CTX_META_INPUT_DIRS_KEY, [])
|
||||
project_environments = ctx.meta[CTX_META_PROJECT_ENVIRONMENTS_KEY]
|
||||
for input_dir in input_dirs:
|
||||
cmd_kwargs = kwargs.copy()
|
||||
if is_platformio_project(input_dir):
|
||||
cmd_kwargs["project_dir"] = input_dir
|
||||
cmd_kwargs["environments"] = project_environments
|
||||
else:
|
||||
cmd_kwargs["global"] = True
|
||||
cmd_kwargs["storage_dir"] = input_dir
|
||||
ctx.invoke(cmd, **cmd_kwargs)
|
||||
|
||||
|
||||
@click.group(short_help="Library manager", hidden=True)
|
||||
@click.option(
|
||||
"-d",
|
||||
"--storage-dir",
|
||||
multiple=True,
|
||||
default=None,
|
||||
type=click.Path(
|
||||
exists=True,
|
||||
file_okay=False,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
resolve_path=True),
|
||||
help="Manage custom library storage")
|
||||
type=click.Path(exists=True, file_okay=False, dir_okay=True, writable=True),
|
||||
help="Manage custom library storage",
|
||||
)
|
||||
@click.option(
|
||||
"-g", "--global", is_flag=True, help="Manage global PlatformIO library storage"
|
||||
)
|
||||
@click.option(
|
||||
"-e",
|
||||
"--environment",
|
||||
multiple=True,
|
||||
help=(
|
||||
"Manage libraries for the specific project build environments "
|
||||
"declared in `platformio.ini`"
|
||||
),
|
||||
)
|
||||
@click.pass_context
|
||||
def cli(ctx, **options):
|
||||
non_storage_cmds = ("search", "show", "register", "stats", "builtin")
|
||||
in_silence = PlatformioCLI.in_silence()
|
||||
storage_cmds = ("install", "uninstall", "update", "list")
|
||||
# skip commands that don't need storage folder
|
||||
if ctx.invoked_subcommand in non_storage_cmds or \
|
||||
(len(ctx.args) == 2 and ctx.args[1] in ("-h", "--help")):
|
||||
if ctx.invoked_subcommand not in storage_cmds or (
|
||||
len(ctx.args) == 2 and ctx.args[1] in ("-h", "--help")
|
||||
):
|
||||
return
|
||||
storage_dir = options['storage_dir']
|
||||
if not storage_dir:
|
||||
if options['global']:
|
||||
storage_dir = join(util.get_home_dir(), "lib")
|
||||
elif util.is_platformio_project():
|
||||
storage_dir = util.get_projectlibdeps_dir()
|
||||
elif util.is_ci():
|
||||
storage_dir = join(util.get_home_dir(), "lib")
|
||||
storage_dirs = list(options["storage_dir"])
|
||||
if options["global"]:
|
||||
storage_dirs.append(get_project_global_lib_dir())
|
||||
if not storage_dirs:
|
||||
if is_platformio_project():
|
||||
storage_dirs = [get_project_dir()]
|
||||
elif is_ci():
|
||||
storage_dirs = [get_project_global_lib_dir()]
|
||||
click.secho(
|
||||
"Warning! Global library storage is used automatically. "
|
||||
"Please use `platformio lib --global %s` command to remove "
|
||||
"this warning." % ctx.invoked_subcommand,
|
||||
fg="yellow")
|
||||
elif util.is_platformio_project(storage_dir):
|
||||
with util.cd(storage_dir):
|
||||
storage_dir = util.get_projectlibdeps_dir()
|
||||
fg="yellow",
|
||||
)
|
||||
|
||||
if not storage_dir and not util.is_platformio_project():
|
||||
raise exception.NotGlobalLibDir(util.get_project_dir(),
|
||||
join(util.get_home_dir(), "lib"),
|
||||
ctx.invoked_subcommand)
|
||||
if not storage_dirs:
|
||||
raise NotGlobalLibDir(
|
||||
get_project_dir(), get_project_global_lib_dir(), ctx.invoked_subcommand
|
||||
)
|
||||
|
||||
ctx.obj = LibraryManager(storage_dir)
|
||||
if "--json-output" not in ctx.args:
|
||||
click.echo("Library Storage: " + storage_dir)
|
||||
ctx.meta[CTX_META_PROJECT_ENVIRONMENTS_KEY] = options["environment"]
|
||||
ctx.meta[CTX_META_INPUT_DIRS_KEY] = storage_dirs
|
||||
ctx.meta[CTX_META_STORAGE_DIRS_KEY] = []
|
||||
ctx.meta[CTX_META_STORAGE_LIBDEPS_KEY] = {}
|
||||
for storage_dir in storage_dirs:
|
||||
if not is_platformio_project(storage_dir):
|
||||
ctx.meta[CTX_META_STORAGE_DIRS_KEY].append(storage_dir)
|
||||
continue
|
||||
with fs.cd(storage_dir):
|
||||
config = ProjectConfig.get_instance(
|
||||
os.path.join(storage_dir, "platformio.ini")
|
||||
)
|
||||
config.validate(options["environment"], silent=in_silence)
|
||||
libdeps_dir = config.get("platformio", "libdeps_dir")
|
||||
for env in config.envs():
|
||||
if options["environment"] and env not in options["environment"]:
|
||||
continue
|
||||
storage_dir = os.path.join(libdeps_dir, env)
|
||||
ctx.meta[CTX_META_STORAGE_DIRS_KEY].append(storage_dir)
|
||||
ctx.meta[CTX_META_STORAGE_LIBDEPS_KEY][storage_dir] = config.get(
|
||||
"env:" + env, "lib_deps", []
|
||||
)
|
||||
|
||||
|
||||
@cli.command("install", short_help="Install library")
|
||||
@click.argument("libraries", required=False, nargs=-1, metavar="[LIBRARY...]")
|
||||
# @click.option(
|
||||
# "--save",
|
||||
# is_flag=True,
|
||||
# help="Save installed libraries into the project's platformio.ini "
|
||||
# "library dependencies")
|
||||
@click.option(
|
||||
"-s", "--silent", is_flag=True, help="Suppress progress reporting")
|
||||
"--save/--no-save",
|
||||
is_flag=True,
|
||||
default=True,
|
||||
help="Save installed libraries into the `platformio.ini` dependency list"
|
||||
" (enabled by default)",
|
||||
)
|
||||
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
|
||||
@click.option(
|
||||
"--interactive",
|
||||
is_flag=True,
|
||||
help="Allow to make a choice for all prompts")
|
||||
help="Deprecated! Please use a strict dependency specification (owner/libname)",
|
||||
)
|
||||
@click.option(
|
||||
"-f",
|
||||
"--force",
|
||||
is_flag=True,
|
||||
help="Reinstall/redownload library if exists")
|
||||
@click.pass_obj
|
||||
def lib_install(lm, libraries, silent, interactive, force):
|
||||
# @TODO: "save" option
|
||||
for library in libraries:
|
||||
lm.install(
|
||||
library, silent=silent, interactive=interactive, force=force)
|
||||
"-f", "--force", is_flag=True, help="Reinstall/redownload library if exists"
|
||||
)
|
||||
@click.pass_context
|
||||
def lib_install( # pylint: disable=too-many-arguments,too-many-positional-arguments,unused-argument
|
||||
ctx, libraries, save, silent, interactive, force
|
||||
):
|
||||
click.secho(
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease use `pio pkg install` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
return invoke_command(
|
||||
ctx,
|
||||
package_install_cmd,
|
||||
libraries=libraries,
|
||||
no_save=not save,
|
||||
force=force,
|
||||
silent=silent,
|
||||
)
|
||||
|
||||
|
||||
@cli.command("uninstall", short_help="Uninstall libraries")
|
||||
@cli.command("uninstall", short_help="Remove libraries")
|
||||
@click.argument("libraries", nargs=-1, metavar="[LIBRARY...]")
|
||||
@click.pass_obj
|
||||
def lib_uninstall(lm, libraries):
|
||||
for library in libraries:
|
||||
lm.uninstall(library)
|
||||
@click.option(
|
||||
"--save/--no-save",
|
||||
is_flag=True,
|
||||
default=True,
|
||||
help="Remove libraries from the `platformio.ini` dependency list and save changes"
|
||||
" (enabled by default)",
|
||||
)
|
||||
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
|
||||
@click.pass_context
|
||||
def lib_uninstall(ctx, libraries, save, silent):
|
||||
click.secho(
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease use `pio pkg uninstall` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
invoke_command(
|
||||
ctx,
|
||||
package_uninstall_cmd,
|
||||
libraries=libraries,
|
||||
no_save=not save,
|
||||
silent=silent,
|
||||
)
|
||||
|
||||
|
||||
@cli.command("update", short_help="Update installed libraries")
|
||||
@ -117,69 +202,98 @@ def lib_uninstall(lm, libraries):
|
||||
"-c",
|
||||
"--only-check",
|
||||
is_flag=True,
|
||||
help="Do not update, only check for new version")
|
||||
help="DEPRECATED. Please use `--dry-run` instead",
|
||||
)
|
||||
@click.option(
|
||||
"--dry-run", is_flag=True, help="Do not update, only check for the new versions"
|
||||
)
|
||||
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@click.pass_obj
|
||||
def lib_update(lm, libraries, only_check, json_output):
|
||||
if not libraries:
|
||||
libraries = [manifest['__pkg_dir'] for manifest in lm.get_installed()]
|
||||
@click.pass_context
|
||||
def lib_update( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
ctx, libraries, only_check, dry_run, silent, json_output
|
||||
):
|
||||
only_check = dry_run or only_check
|
||||
if only_check and not json_output:
|
||||
raise exception.UserSideException(
|
||||
"This command is deprecated, please use `pio pkg outdated` instead"
|
||||
)
|
||||
|
||||
if not json_output:
|
||||
click.secho(
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease use `pio pkg update` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
return invoke_command(
|
||||
ctx,
|
||||
package_update_cmd,
|
||||
libraries=libraries,
|
||||
silent=silent,
|
||||
)
|
||||
|
||||
storage_dirs = ctx.meta[CTX_META_STORAGE_DIRS_KEY]
|
||||
json_result = {}
|
||||
for storage_dir in storage_dirs:
|
||||
lib_deps = ctx.meta.get(CTX_META_STORAGE_LIBDEPS_KEY, {}).get(storage_dir, [])
|
||||
lm = LibraryPackageManager(storage_dir)
|
||||
lm.set_log_level(logging.WARN if silent else logging.DEBUG)
|
||||
_libraries = libraries or lib_deps or lm.get_installed()
|
||||
|
||||
if only_check and json_output:
|
||||
result = []
|
||||
for library in libraries:
|
||||
pkg_dir = library if isdir(library) else None
|
||||
requirements = None
|
||||
url = None
|
||||
if not pkg_dir:
|
||||
name, requirements, url = lm.parse_pkg_uri(library)
|
||||
pkg_dir = lm.get_package_dir(name, requirements, url)
|
||||
if not pkg_dir:
|
||||
for library in _libraries:
|
||||
spec = None
|
||||
pkg = None
|
||||
if isinstance(library, PackageItem):
|
||||
pkg = library
|
||||
else:
|
||||
spec = PackageSpec(library)
|
||||
pkg = lm.get_package(spec)
|
||||
if not pkg:
|
||||
continue
|
||||
latest = lm.outdated(pkg_dir, requirements)
|
||||
if not latest:
|
||||
outdated = lm.outdated(pkg, spec)
|
||||
if not outdated.is_outdated(allow_incompatible=True):
|
||||
continue
|
||||
manifest = lm.load_manifest(pkg_dir)
|
||||
manifest['versionLatest'] = latest
|
||||
manifest = lm.legacy_load_manifest(pkg)
|
||||
manifest["versionWanted"] = (
|
||||
str(outdated.wanted) if outdated.wanted else None
|
||||
)
|
||||
manifest["versionLatest"] = (
|
||||
str(outdated.latest) if outdated.latest else None
|
||||
)
|
||||
result.append(manifest)
|
||||
return click.echo(json.dumps(result))
|
||||
else:
|
||||
for library in libraries:
|
||||
lm.update(library, only_check=only_check)
|
||||
|
||||
return True
|
||||
json_result[storage_dir] = result
|
||||
|
||||
return click.echo(
|
||||
json.dumps(
|
||||
json_result[storage_dirs[0]] if len(storage_dirs) == 1 else json_result
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def print_lib_item(item):
|
||||
click.secho(item['name'], fg="cyan")
|
||||
click.echo("=" * len(item['name']))
|
||||
if "id" in item:
|
||||
click.secho("#ID: %d" % item['id'], bold=True)
|
||||
if "description" in item or "url" in item:
|
||||
click.echo(item.get("description", item.get("url", "")))
|
||||
click.echo()
|
||||
@cli.command("list", short_help="List installed libraries")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@click.pass_context
|
||||
def lib_list(ctx, json_output):
|
||||
if not json_output:
|
||||
click.secho(
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease use `pio pkg list` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
return invoke_command(ctx, package_list_cmd, only_libraries=True)
|
||||
|
||||
for key in ("version", "homepage", "license", "keywords"):
|
||||
if key not in item or not item[key]:
|
||||
continue
|
||||
if isinstance(item[key], list):
|
||||
click.echo("%s: %s" % (key.title(), ", ".join(item[key])))
|
||||
else:
|
||||
click.echo("%s: %s" % (key.title(), item[key]))
|
||||
|
||||
for key in ("frameworks", "platforms"):
|
||||
if key not in item:
|
||||
continue
|
||||
click.echo("Compatible %s: %s" % (key, ", ".join(
|
||||
[i['title'] if isinstance(i, dict) else i for i in item[key]])))
|
||||
|
||||
if "authors" in item or "authornames" in item:
|
||||
click.echo("Authors: %s" % ", ".join(
|
||||
item.get("authornames",
|
||||
[a.get("name", "") for a in item.get("authors", [])])))
|
||||
|
||||
if "__src_url" in item:
|
||||
click.secho("Source: %s" % item['__src_url'])
|
||||
click.echo()
|
||||
storage_dirs = ctx.meta[CTX_META_STORAGE_DIRS_KEY]
|
||||
json_result = {}
|
||||
for storage_dir in storage_dirs:
|
||||
lm = LibraryPackageManager(storage_dir)
|
||||
json_result[storage_dir] = lm.legacy_get_installed()
|
||||
return click.echo(
|
||||
json.dumps(
|
||||
json_result[storage_dirs[0]] if len(storage_dirs) == 1 else json_result
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@cli.command("search", short_help="Search for a library")
|
||||
@ -187,6 +301,7 @@ def print_lib_item(item):
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@click.option("--page", type=click.INT, default=1)
|
||||
@click.option("--id", multiple=True)
|
||||
@click.option("-o", "--owner", multiple=True)
|
||||
@click.option("-n", "--name", multiple=True)
|
||||
@click.option("-a", "--author", multiple=True)
|
||||
@click.option("-k", "--keyword", multiple=True)
|
||||
@ -196,8 +311,12 @@ def print_lib_item(item):
|
||||
@click.option(
|
||||
"--noninteractive",
|
||||
is_flag=True,
|
||||
help="Do not prompt, automatically paginate with delay")
|
||||
def lib_search(query, json_output, page, noninteractive, **filters):
|
||||
help="Do not prompt, automatically paginate with delay",
|
||||
)
|
||||
@click.pass_context
|
||||
def lib_search( # pylint: disable=unused-argument
|
||||
ctx, query, json_output, page, noninteractive, **filters
|
||||
):
|
||||
if not query:
|
||||
query = []
|
||||
if not isinstance(query, list):
|
||||
@ -207,95 +326,41 @@ def lib_search(query, json_output, page, noninteractive, **filters):
|
||||
for value in values:
|
||||
query.append('%s:"%s"' % (key, value))
|
||||
|
||||
result = get_api_result(
|
||||
"/v2/lib/search",
|
||||
dict(query=" ".join(query), page=page),
|
||||
cache_valid="1d")
|
||||
|
||||
if json_output:
|
||||
click.echo(json.dumps(result))
|
||||
return
|
||||
|
||||
if result['total'] == 0:
|
||||
if not json_output:
|
||||
click.secho(
|
||||
"Nothing has been found by your request\n"
|
||||
"Try a less-specific search or use truncation (or wildcard) "
|
||||
"operator",
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease use `pio pkg search` instead.\n",
|
||||
fg="yellow",
|
||||
nl=False)
|
||||
click.secho(" *", fg="green")
|
||||
click.secho("For example: DS*, PCA*, DHT* and etc.\n", fg="yellow")
|
||||
click.echo("For more examples and advanced search syntax, "
|
||||
"please use documentation:")
|
||||
click.secho(
|
||||
"https://docs.platformio.org/page/userguide/lib/cmd_search.html\n",
|
||||
fg="cyan")
|
||||
return
|
||||
)
|
||||
query.append("type:library")
|
||||
return ctx.invoke(package_search_cmd, query=" ".join(query), page=page)
|
||||
|
||||
click.secho(
|
||||
"Found %d libraries:\n" % result['total'],
|
||||
fg="green" if result['total'] else "yellow")
|
||||
|
||||
while True:
|
||||
for item in result['items']:
|
||||
print_lib_item(item)
|
||||
|
||||
if (int(result['page']) * int(result['perpage']) >= int(
|
||||
result['total'])):
|
||||
break
|
||||
|
||||
if noninteractive:
|
||||
click.echo()
|
||||
click.secho(
|
||||
"Loading next %d libraries... Press Ctrl+C to stop!" %
|
||||
result['perpage'],
|
||||
fg="yellow")
|
||||
click.echo()
|
||||
time.sleep(5)
|
||||
elif not click.confirm("Show next libraries?"):
|
||||
break
|
||||
result = get_api_result(
|
||||
"/v2/lib/search", {
|
||||
"query": " ".join(query),
|
||||
"page": int(result['page']) + 1
|
||||
},
|
||||
cache_valid="1d")
|
||||
|
||||
|
||||
@cli.command("list", short_help="List installed libraries")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
@click.pass_obj
|
||||
def lib_list(lm, json_output):
|
||||
items = lm.get_installed()
|
||||
|
||||
if json_output:
|
||||
return click.echo(json.dumps(items))
|
||||
|
||||
if not items:
|
||||
return None
|
||||
|
||||
for item in sorted(items, key=lambda i: i['name']):
|
||||
print_lib_item(item)
|
||||
|
||||
return True
|
||||
regclient = LibraryPackageManager().get_registry_client_instance()
|
||||
result = regclient.fetch_json_data(
|
||||
"get",
|
||||
"/v2/lib/search",
|
||||
params=dict(query=" ".join(query), page=page),
|
||||
x_cache_valid="1d",
|
||||
)
|
||||
return click.echo(json.dumps(result))
|
||||
|
||||
|
||||
@cli.command("builtin", short_help="List built-in libraries")
|
||||
@click.option("--storage", multiple=True)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def lib_builtin(storage, json_output):
|
||||
items = get_builtin_libs(storage)
|
||||
items = LibraryPackageManager.get_builtin_libs(storage)
|
||||
if json_output:
|
||||
return click.echo(json.dumps(items))
|
||||
|
||||
for storage_ in items:
|
||||
if not storage_['items']:
|
||||
if not storage_["items"]:
|
||||
continue
|
||||
click.secho(storage_['name'], fg="green")
|
||||
click.echo("*" * len(storage_['name']))
|
||||
click.secho(storage_["name"], fg="green")
|
||||
click.echo("*" * len(storage_["name"]))
|
||||
click.echo()
|
||||
|
||||
for item in sorted(storage_['items'], key=lambda i: i['name']):
|
||||
for item in sorted(storage_["items"], key=lambda i: i["name"]):
|
||||
print_lib_item(item)
|
||||
|
||||
return True
|
||||
@ -304,169 +369,91 @@ def lib_builtin(storage, json_output):
|
||||
@cli.command("show", short_help="Show detailed info about a library")
|
||||
@click.argument("library", metavar="[LIBRARY]")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def lib_show(library, json_output):
|
||||
lm = LibraryManager()
|
||||
name, requirements, _ = lm.parse_pkg_uri(library)
|
||||
lib_id = lm.search_lib_id({
|
||||
"name": name,
|
||||
"requirements": requirements
|
||||
},
|
||||
silent=json_output,
|
||||
interactive=not json_output)
|
||||
lib = get_api_result("/lib/info/%d" % lib_id, cache_valid="1d")
|
||||
if json_output:
|
||||
return click.echo(json.dumps(lib))
|
||||
|
||||
click.secho(lib['name'], fg="cyan")
|
||||
click.echo("=" * len(lib['name']))
|
||||
click.secho("#ID: %d" % lib['id'], bold=True)
|
||||
click.echo(lib['description'])
|
||||
click.echo()
|
||||
|
||||
click.echo(
|
||||
"Version: %s, released %s" %
|
||||
(lib['version']['name'],
|
||||
time.strftime("%c", util.parse_date(lib['version']['released']))))
|
||||
click.echo("Manifest: %s" % lib['confurl'])
|
||||
for key in ("homepage", "repository", "license"):
|
||||
if key not in lib or not lib[key]:
|
||||
continue
|
||||
if isinstance(lib[key], list):
|
||||
click.echo("%s: %s" % (key.title(), ", ".join(lib[key])))
|
||||
else:
|
||||
click.echo("%s: %s" % (key.title(), lib[key]))
|
||||
|
||||
blocks = []
|
||||
|
||||
_authors = []
|
||||
for author in lib.get("authors", []):
|
||||
_data = []
|
||||
for key in ("name", "email", "url", "maintainer"):
|
||||
if not author[key]:
|
||||
continue
|
||||
if key == "email":
|
||||
_data.append("<%s>" % author[key])
|
||||
elif key == "maintainer":
|
||||
_data.append("(maintainer)")
|
||||
else:
|
||||
_data.append(author[key])
|
||||
_authors.append(" ".join(_data))
|
||||
if _authors:
|
||||
blocks.append(("Authors", _authors))
|
||||
|
||||
blocks.append(("Keywords", lib['keywords']))
|
||||
for key in ("frameworks", "platforms"):
|
||||
if key not in lib or not lib[key]:
|
||||
continue
|
||||
blocks.append(("Compatible %s" % key, [i['title'] for i in lib[key]]))
|
||||
blocks.append(("Headers", lib['headers']))
|
||||
blocks.append(("Examples", lib['examples']))
|
||||
blocks.append(("Versions", [
|
||||
"%s, released %s" %
|
||||
(v['name'], time.strftime("%c", util.parse_date(v['released'])))
|
||||
for v in lib['versions']
|
||||
]))
|
||||
blocks.append(("Unique Downloads", [
|
||||
"Today: %s" % lib['dlstats']['day'],
|
||||
"Week: %s" % lib['dlstats']['week'],
|
||||
"Month: %s" % lib['dlstats']['month']
|
||||
]))
|
||||
|
||||
for (title, rows) in blocks:
|
||||
click.echo()
|
||||
click.secho(title, bold=True)
|
||||
click.echo("-" * len(title))
|
||||
for row in rows:
|
||||
click.echo(row)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@cli.command("register", short_help="Register a new library")
|
||||
@click.argument("config_url")
|
||||
def lib_register(config_url):
|
||||
if (not config_url.startswith("http://")
|
||||
and not config_url.startswith("https://")):
|
||||
raise exception.InvalidLibConfURL(config_url)
|
||||
|
||||
result = get_api_result("/lib/register", data=dict(config_url=config_url))
|
||||
if "message" in result and result['message']:
|
||||
@click.pass_context
|
||||
def lib_show(ctx, library, json_output):
|
||||
if not json_output:
|
||||
click.secho(
|
||||
result['message'],
|
||||
fg="green"
|
||||
if "successed" in result and result['successed'] else "red")
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease use `pio pkg show` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
return ctx.invoke(package_show_cmd, pkg_type="library", spec=library)
|
||||
|
||||
lm = LibraryPackageManager()
|
||||
lm.set_log_level(logging.ERROR if json_output else logging.DEBUG)
|
||||
lib_id = lm.reveal_registry_package_id(library)
|
||||
regclient = lm.get_registry_client_instance()
|
||||
lib = regclient.fetch_json_data(
|
||||
"get", "/v2/lib/info/%d" % lib_id, x_cache_valid="1h"
|
||||
)
|
||||
return click.echo(json.dumps(lib))
|
||||
|
||||
|
||||
@cli.command("register", short_help="Deprecated")
|
||||
@click.argument("config_url")
|
||||
def lib_register(config_url): # pylint: disable=unused-argument
|
||||
raise exception.UserSideException(
|
||||
"This command is deprecated. Please use `pio pkg publish` command."
|
||||
)
|
||||
|
||||
|
||||
@cli.command("stats", short_help="Library Registry Statistics")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def lib_stats(json_output):
|
||||
result = get_api_result("/lib/stats", cache_valid="1h")
|
||||
if not json_output:
|
||||
click.secho(
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease visit "
|
||||
"https://registry.platformio.org\n",
|
||||
fg="yellow",
|
||||
)
|
||||
return None
|
||||
|
||||
if json_output:
|
||||
return click.echo(json.dumps(result))
|
||||
regclient = LibraryPackageManager().get_registry_client_instance()
|
||||
result = regclient.fetch_json_data("get", "/v2/lib/stats", x_cache_valid="1h")
|
||||
return click.echo(json.dumps(result))
|
||||
|
||||
printitem_tpl = "{name:<33} {url}"
|
||||
printitemdate_tpl = "{name:<33} {date:23} {url}"
|
||||
|
||||
def _print_title(title):
|
||||
click.secho(title.upper(), bold=True)
|
||||
click.echo("*" * len(title))
|
||||
|
||||
def _print_header(with_date=False):
|
||||
click.echo((printitemdate_tpl if with_date else printitem_tpl).format(
|
||||
name=click.style("Name", fg="cyan"),
|
||||
date="Date",
|
||||
url=click.style("Url", fg="blue")))
|
||||
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
click.echo("-" * terminal_width)
|
||||
|
||||
def _print_lib_item(item):
|
||||
date = str(
|
||||
time.strftime("%c", util.parse_date(item['date'])) if "date" in
|
||||
item else "")
|
||||
url = click.style(
|
||||
"https://platformio.org/lib/show/%s/%s" % (item['id'],
|
||||
quote(item['name'])),
|
||||
fg="blue")
|
||||
click.echo(
|
||||
(printitemdate_tpl if "date" in item else printitem_tpl).format(
|
||||
name=click.style(item['name'], fg="cyan"), date=date, url=url))
|
||||
|
||||
def _print_tag_item(name):
|
||||
click.echo(
|
||||
printitem_tpl.format(
|
||||
name=click.style(name, fg="cyan"),
|
||||
url=click.style(
|
||||
"https://platformio.org/lib/search?query=" + quote(
|
||||
"keyword:%s" % name),
|
||||
fg="blue")))
|
||||
|
||||
for key in ("updated", "added"):
|
||||
_print_title("Recently " + key)
|
||||
_print_header(with_date=True)
|
||||
for item in result.get(key, []):
|
||||
_print_lib_item(item)
|
||||
click.echo()
|
||||
|
||||
_print_title("Recent keywords")
|
||||
_print_header(with_date=False)
|
||||
for item in result.get("lastkeywords"):
|
||||
_print_tag_item(item)
|
||||
def print_lib_item(item):
|
||||
click.secho(item["name"], fg="cyan")
|
||||
click.echo("=" * len(item["name"]))
|
||||
if "id" in item:
|
||||
click.secho("#ID: %d" % item["id"], bold=True)
|
||||
if "description" in item or "url" in item:
|
||||
click.echo(item.get("description", item.get("url", "")))
|
||||
click.echo()
|
||||
|
||||
_print_title("Popular keywords")
|
||||
_print_header(with_date=False)
|
||||
for item in result.get("topkeywords"):
|
||||
_print_tag_item(item)
|
||||
for key in ("version", "homepage", "license", "keywords"):
|
||||
if key not in item or not item[key]:
|
||||
continue
|
||||
if isinstance(item[key], list):
|
||||
click.echo("%s: %s" % (key.capitalize(), ", ".join(item[key])))
|
||||
else:
|
||||
click.echo("%s: %s" % (key.capitalize(), item[key]))
|
||||
|
||||
for key in ("frameworks", "platforms"):
|
||||
if key not in item:
|
||||
continue
|
||||
click.echo(
|
||||
"Compatible %s: %s"
|
||||
% (
|
||||
key,
|
||||
", ".join(
|
||||
[i["title"] if isinstance(i, dict) else i for i in item[key]]
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
if "authors" in item or "authornames" in item:
|
||||
click.echo(
|
||||
"Authors: %s"
|
||||
% ", ".join(
|
||||
item.get(
|
||||
"authornames", [a.get("name", "") for a in item.get("authors", [])]
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
if "__src_url" in item:
|
||||
click.secho("Source: %s" % item["__src_url"])
|
||||
click.echo()
|
||||
|
||||
for key, title in (("dlday", "Today"), ("dlweek", "Week"), ("dlmonth",
|
||||
"Month")):
|
||||
_print_title("Featured: " + title)
|
||||
_print_header(with_date=False)
|
||||
for item in result.get(key, []):
|
||||
_print_lib_item(item)
|
||||
click.echo()
|
||||
|
||||
return True
|
||||
|
@ -13,160 +13,44 @@
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
from os.path import dirname, isdir
|
||||
import logging
|
||||
import os
|
||||
|
||||
import click
|
||||
|
||||
from platformio import app, exception, util
|
||||
from platformio.commands.boards import print_boards
|
||||
from platformio.managers.platform import PlatformFactory, PlatformManager
|
||||
from platformio.exception import UserSideException
|
||||
from platformio.package.commands.install import package_install_cmd
|
||||
from platformio.package.commands.list import package_list_cmd
|
||||
from platformio.package.commands.search import package_search_cmd
|
||||
from platformio.package.commands.show import package_show_cmd
|
||||
from platformio.package.commands.uninstall import package_uninstall_cmd
|
||||
from platformio.package.commands.update import package_update_cmd
|
||||
from platformio.package.manager.platform import PlatformPackageManager
|
||||
from platformio.package.meta import PackageItem, PackageSpec
|
||||
from platformio.package.version import get_original_version
|
||||
from platformio.platform.exception import UnknownPlatform
|
||||
from platformio.platform.factory import PlatformFactory
|
||||
|
||||
|
||||
@click.group(short_help="Platform Manager")
|
||||
@click.group(short_help="Platform manager", hidden=True)
|
||||
def cli():
|
||||
pass
|
||||
|
||||
|
||||
def _print_platforms(platforms):
|
||||
for platform in platforms:
|
||||
click.echo("{name} ~ {title}".format(
|
||||
name=click.style(platform['name'], fg="cyan"),
|
||||
title=platform['title']))
|
||||
click.echo("=" * (3 + len(platform['name'] + platform['title'])))
|
||||
click.echo(platform['description'])
|
||||
click.echo()
|
||||
if "homepage" in platform:
|
||||
click.echo("Home: %s" % platform['homepage'])
|
||||
if "frameworks" in platform and platform['frameworks']:
|
||||
click.echo("Frameworks: %s" % ", ".join(platform['frameworks']))
|
||||
if "packages" in platform:
|
||||
click.echo("Packages: %s" % ", ".join(platform['packages']))
|
||||
if "version" in platform:
|
||||
click.echo("Version: " + platform['version'])
|
||||
click.echo()
|
||||
|
||||
|
||||
def _get_registry_platforms():
|
||||
platforms = util.get_api_result("/platforms", cache_valid="7d")
|
||||
pm = PlatformManager()
|
||||
for platform in platforms or []:
|
||||
platform['versions'] = pm.get_all_repo_versions(platform['name'])
|
||||
return platforms
|
||||
|
||||
|
||||
def _original_version(version):
|
||||
if version.count(".") != 2:
|
||||
return None
|
||||
_, y = version.split(".")[:2]
|
||||
if int(y) < 100:
|
||||
return None
|
||||
if len(y) % 2 != 0:
|
||||
y = "0" + y
|
||||
parts = [str(int(y[i * 2:i * 2 + 2])) for i in range(len(y) / 2)]
|
||||
return ".".join(parts)
|
||||
|
||||
|
||||
def _get_platform_data(*args, **kwargs):
|
||||
try:
|
||||
return _get_installed_platform_data(*args, **kwargs)
|
||||
except exception.UnknownPlatform:
|
||||
return _get_registry_platform_data(*args, **kwargs)
|
||||
|
||||
|
||||
def _get_installed_platform_data(platform,
|
||||
with_boards=True,
|
||||
expose_packages=True):
|
||||
p = PlatformFactory.newPlatform(platform)
|
||||
data = dict(
|
||||
name=p.name,
|
||||
title=p.title,
|
||||
description=p.description,
|
||||
version=p.version,
|
||||
homepage=p.homepage,
|
||||
repository=p.repository_url,
|
||||
url=p.vendor_url,
|
||||
docs=p.docs_url,
|
||||
license=p.license,
|
||||
forDesktop=not p.is_embedded(),
|
||||
frameworks=sorted(p.frameworks.keys() if p.frameworks else []),
|
||||
packages=p.packages.keys() if p.packages else [])
|
||||
|
||||
# if dump to API
|
||||
# del data['version']
|
||||
# return data
|
||||
|
||||
# overwrite VCS version and add extra fields
|
||||
manifest = PlatformManager().load_manifest(dirname(p.manifest_path))
|
||||
assert manifest
|
||||
for key in manifest:
|
||||
if key == "version" or key.startswith("__"):
|
||||
data[key] = manifest[key]
|
||||
|
||||
if with_boards:
|
||||
data['boards'] = [c.get_brief_data() for c in p.get_boards().values()]
|
||||
|
||||
if not data['packages'] or not expose_packages:
|
||||
return data
|
||||
|
||||
data['packages'] = []
|
||||
installed_pkgs = p.get_installed_packages()
|
||||
for name, opts in p.packages.items():
|
||||
item = dict(
|
||||
name=name,
|
||||
type=p.get_package_type(name),
|
||||
requirements=opts.get("version"),
|
||||
optional=opts.get("optional") is True)
|
||||
if name in installed_pkgs:
|
||||
for key, value in installed_pkgs[name].items():
|
||||
if key not in ("url", "version", "description"):
|
||||
continue
|
||||
item[key] = value
|
||||
if key == "version":
|
||||
item["originalVersion"] = _original_version(value)
|
||||
data['packages'].append(item)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def _get_registry_platform_data( # pylint: disable=unused-argument
|
||||
platform,
|
||||
with_boards=True,
|
||||
expose_packages=True):
|
||||
_data = None
|
||||
for p in _get_registry_platforms():
|
||||
if p['name'] == platform:
|
||||
_data = p
|
||||
break
|
||||
|
||||
if not _data:
|
||||
return None
|
||||
|
||||
data = dict(
|
||||
name=_data['name'],
|
||||
title=_data['title'],
|
||||
description=_data['description'],
|
||||
homepage=_data['homepage'],
|
||||
repository=_data['repository'],
|
||||
url=_data['url'],
|
||||
license=_data['license'],
|
||||
forDesktop=_data['forDesktop'],
|
||||
frameworks=_data['frameworks'],
|
||||
packages=_data['packages'],
|
||||
versions=_data['versions'])
|
||||
|
||||
if with_boards:
|
||||
data['boards'] = [
|
||||
board for board in PlatformManager().get_registered_boards()
|
||||
if board['platform'] == _data['name']
|
||||
]
|
||||
|
||||
return data
|
||||
|
||||
|
||||
@cli.command("search", short_help="Search for development platform")
|
||||
@click.argument("query", required=False)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def platform_search(query, json_output):
|
||||
@click.pass_context
|
||||
def platform_search(ctx, query, json_output):
|
||||
if not json_output:
|
||||
click.secho(
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease use `pio pkg search` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
query = query or ""
|
||||
return ctx.invoke(package_search_cmd, query=f"type:platform {query}".strip())
|
||||
|
||||
platforms = []
|
||||
for platform in _get_registry_platforms():
|
||||
if query == "all":
|
||||
@ -176,119 +60,89 @@ def platform_search(query, json_output):
|
||||
continue
|
||||
platforms.append(
|
||||
_get_registry_platform_data(
|
||||
platform['name'], with_boards=False, expose_packages=False))
|
||||
|
||||
if json_output:
|
||||
click.echo(json.dumps(platforms))
|
||||
else:
|
||||
_print_platforms(platforms)
|
||||
platform["name"], with_boards=False, expose_packages=False
|
||||
)
|
||||
)
|
||||
click.echo(json.dumps(platforms))
|
||||
return None
|
||||
|
||||
|
||||
@cli.command("frameworks", short_help="List supported frameworks, SDKs")
|
||||
@click.argument("query", required=False)
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def platform_frameworks(query, json_output):
|
||||
if not json_output:
|
||||
click.secho(
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease visit https://docs.platformio.org"
|
||||
"/en/latest/frameworks/index.html\n",
|
||||
fg="yellow",
|
||||
)
|
||||
return
|
||||
|
||||
regclient = PlatformPackageManager().get_registry_client_instance()
|
||||
frameworks = []
|
||||
for framework in util.get_api_result("/frameworks", cache_valid="7d"):
|
||||
for framework in regclient.fetch_json_data(
|
||||
"get", "/v2/frameworks", x_cache_valid="1d"
|
||||
):
|
||||
if query == "all":
|
||||
query = ""
|
||||
search_data = json.dumps(framework)
|
||||
if query and query.lower() not in search_data.lower():
|
||||
continue
|
||||
framework['homepage'] = (
|
||||
"https://platformio.org/frameworks/" + framework['name'])
|
||||
framework['platforms'] = [
|
||||
platform['name'] for platform in _get_registry_platforms()
|
||||
if framework['name'] in platform['frameworks']
|
||||
framework["homepage"] = "https://platformio.org/frameworks/" + framework["name"]
|
||||
framework["platforms"] = [
|
||||
platform["name"]
|
||||
for platform in _get_registry_platforms()
|
||||
if framework["name"] in platform["frameworks"]
|
||||
]
|
||||
frameworks.append(framework)
|
||||
|
||||
frameworks = sorted(frameworks, key=lambda manifest: manifest['name'])
|
||||
if json_output:
|
||||
click.echo(json.dumps(frameworks))
|
||||
else:
|
||||
_print_platforms(frameworks)
|
||||
frameworks = sorted(frameworks, key=lambda manifest: manifest["name"])
|
||||
click.echo(json.dumps(frameworks))
|
||||
|
||||
|
||||
@cli.command("list", short_help="List installed development platforms")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def platform_list(json_output):
|
||||
platforms = []
|
||||
pm = PlatformManager()
|
||||
for manifest in pm.get_installed():
|
||||
platforms.append(
|
||||
_get_installed_platform_data(
|
||||
manifest['__pkg_dir'],
|
||||
with_boards=False,
|
||||
expose_packages=False))
|
||||
@click.pass_context
|
||||
def platform_list(ctx, json_output):
|
||||
if not json_output:
|
||||
click.secho(
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease use `pio pkg list` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
return ctx.invoke(package_list_cmd, **{"global": True, "only_platforms": True})
|
||||
|
||||
platforms = sorted(platforms, key=lambda manifest: manifest['name'])
|
||||
if json_output:
|
||||
click.echo(json.dumps(platforms))
|
||||
else:
|
||||
_print_platforms(platforms)
|
||||
platforms = []
|
||||
pm = PlatformPackageManager()
|
||||
for pkg in pm.get_installed():
|
||||
platforms.append(
|
||||
_get_installed_platform_data(pkg, with_boards=False, expose_packages=False)
|
||||
)
|
||||
|
||||
platforms = sorted(platforms, key=lambda manifest: manifest["name"])
|
||||
click.echo(json.dumps(platforms))
|
||||
return None
|
||||
|
||||
|
||||
@cli.command("show", short_help="Show details about development platform")
|
||||
@click.argument("platform")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def platform_show(platform, json_output): # pylint: disable=too-many-branches
|
||||
@click.pass_context
|
||||
def platform_show(ctx, platform, json_output): # pylint: disable=too-many-branches
|
||||
if not json_output:
|
||||
click.secho(
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease use `pio pkg show` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
return ctx.invoke(package_show_cmd, pkg_type="platform", spec=platform)
|
||||
|
||||
data = _get_platform_data(platform)
|
||||
if not data:
|
||||
raise exception.UnknownPlatform(platform)
|
||||
if json_output:
|
||||
return click.echo(json.dumps(data))
|
||||
|
||||
click.echo("{name} ~ {title}".format(
|
||||
name=click.style(data['name'], fg="cyan"), title=data['title']))
|
||||
click.echo("=" * (3 + len(data['name'] + data['title'])))
|
||||
click.echo(data['description'])
|
||||
click.echo()
|
||||
if "version" in data:
|
||||
click.echo("Version: %s" % data['version'])
|
||||
if data['homepage']:
|
||||
click.echo("Home: %s" % data['homepage'])
|
||||
if data['repository']:
|
||||
click.echo("Repository: %s" % data['repository'])
|
||||
if data['url']:
|
||||
click.echo("Vendor: %s" % data['url'])
|
||||
if data['license']:
|
||||
click.echo("License: %s" % data['license'])
|
||||
if data['frameworks']:
|
||||
click.echo("Frameworks: %s" % ", ".join(data['frameworks']))
|
||||
|
||||
if not data['packages']:
|
||||
return None
|
||||
|
||||
if not isinstance(data['packages'][0], dict):
|
||||
click.echo("Packages: %s" % ", ".join(data['packages']))
|
||||
else:
|
||||
click.echo()
|
||||
click.secho("Packages", bold=True)
|
||||
click.echo("--------")
|
||||
for item in data['packages']:
|
||||
click.echo()
|
||||
click.echo("Package %s" % click.style(item['name'], fg="yellow"))
|
||||
click.echo("-" * (8 + len(item['name'])))
|
||||
if item['type']:
|
||||
click.echo("Type: %s" % item['type'])
|
||||
click.echo("Requirements: %s" % item['requirements'])
|
||||
click.echo("Installed: %s" %
|
||||
("Yes" if item.get("version") else "No (optional)"))
|
||||
if "version" in item:
|
||||
click.echo("Version: %s" % item['version'])
|
||||
if "originalVersion" in item:
|
||||
click.echo("Original version: %s" % item['originalVersion'])
|
||||
if "description" in item:
|
||||
click.echo("Description: %s" % item['description'])
|
||||
|
||||
if data['boards']:
|
||||
click.echo()
|
||||
click.secho("Boards", bold=True)
|
||||
click.echo("------")
|
||||
print_boards(data['boards'])
|
||||
|
||||
return True
|
||||
raise UnknownPlatform(platform)
|
||||
return click.echo(json.dumps(data))
|
||||
|
||||
|
||||
@cli.command("install", short_help="Install new development platform")
|
||||
@ -296,93 +150,242 @@ def platform_show(platform, json_output): # pylint: disable=too-many-branches
|
||||
@click.option("--with-package", multiple=True)
|
||||
@click.option("--without-package", multiple=True)
|
||||
@click.option("--skip-default-package", is_flag=True)
|
||||
@click.option("--with-all-packages", is_flag=True)
|
||||
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
|
||||
@click.option(
|
||||
"-f",
|
||||
"--force",
|
||||
is_flag=True,
|
||||
help="Reinstall/redownload dev/platform and its packages if exist")
|
||||
def platform_install(platforms, with_package, without_package,
|
||||
skip_default_package, force):
|
||||
pm = PlatformManager()
|
||||
for platform in platforms:
|
||||
if pm.install(
|
||||
name=platform,
|
||||
with_packages=with_package,
|
||||
without_packages=without_package,
|
||||
skip_default_package=skip_default_package,
|
||||
force=force):
|
||||
click.secho(
|
||||
"The platform '%s' has been successfully installed!\n"
|
||||
"The rest of packages will be installed automatically "
|
||||
"depending on your build environment." % platform,
|
||||
fg="green")
|
||||
help="Reinstall/redownload dev/platform and its packages if exist",
|
||||
)
|
||||
@click.pass_context
|
||||
def platform_install( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
ctx,
|
||||
platforms,
|
||||
with_package,
|
||||
without_package,
|
||||
skip_default_package,
|
||||
with_all_packages,
|
||||
silent,
|
||||
force,
|
||||
):
|
||||
click.secho(
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease use `pio pkg install` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
ctx.invoke(
|
||||
package_install_cmd,
|
||||
**{
|
||||
"global": True,
|
||||
"platforms": platforms,
|
||||
"skip_dependencies": (
|
||||
not with_all_packages
|
||||
and (with_package or without_package or skip_default_package)
|
||||
),
|
||||
"silent": silent,
|
||||
"force": force,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@cli.command("uninstall", short_help="Uninstall development platform")
|
||||
@click.argument("platforms", nargs=-1, required=True, metavar="[PLATFORM...]")
|
||||
def platform_uninstall(platforms):
|
||||
pm = PlatformManager()
|
||||
for platform in platforms:
|
||||
if pm.uninstall(platform):
|
||||
click.secho(
|
||||
"The platform '%s' has been successfully "
|
||||
"uninstalled!" % platform,
|
||||
fg="green")
|
||||
@click.pass_context
|
||||
def platform_uninstall(ctx, platforms):
|
||||
click.secho(
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease use `pio pkg uninstall` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
ctx.invoke(
|
||||
package_uninstall_cmd,
|
||||
**{
|
||||
"global": True,
|
||||
"platforms": platforms,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@cli.command("update", short_help="Update installed development platforms")
|
||||
@click.argument("platforms", nargs=-1, required=False, metavar="[PLATFORM...]")
|
||||
@click.option(
|
||||
"-p",
|
||||
"--only-packages",
|
||||
is_flag=True,
|
||||
help="Update only the platform packages")
|
||||
"-p", "--only-packages", is_flag=True, help="Update only the platform packages"
|
||||
)
|
||||
@click.option(
|
||||
"-c",
|
||||
"--only-check",
|
||||
is_flag=True,
|
||||
help="Do not update, only check for a new version")
|
||||
help="DEPRECATED. Please use `--dry-run` instead",
|
||||
)
|
||||
@click.option(
|
||||
"--dry-run", is_flag=True, help="Do not update, only check for the new versions"
|
||||
)
|
||||
@click.option("-s", "--silent", is_flag=True, help="Suppress progress reporting")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def platform_update(platforms, only_packages, only_check, json_output):
|
||||
pm = PlatformManager()
|
||||
pkg_dir_to_name = {}
|
||||
if not platforms:
|
||||
platforms = []
|
||||
for manifest in pm.get_installed():
|
||||
platforms.append(manifest['__pkg_dir'])
|
||||
pkg_dir_to_name[manifest['__pkg_dir']] = manifest.get(
|
||||
"title", manifest['name'])
|
||||
@click.pass_context
|
||||
def platform_update( # pylint: disable=too-many-locals,too-many-arguments,too-many-positional-arguments
|
||||
ctx, platforms, only_check, dry_run, silent, json_output, **_
|
||||
):
|
||||
only_check = dry_run or only_check
|
||||
|
||||
if only_check and json_output:
|
||||
result = []
|
||||
for platform in platforms:
|
||||
pkg_dir = platform if isdir(platform) else None
|
||||
requirements = None
|
||||
url = None
|
||||
if not pkg_dir:
|
||||
name, requirements, url = pm.parse_pkg_uri(platform)
|
||||
pkg_dir = pm.get_package_dir(name, requirements, url)
|
||||
if not pkg_dir:
|
||||
continue
|
||||
latest = pm.outdated(pkg_dir, requirements)
|
||||
if (not latest and not PlatformFactory.newPlatform(
|
||||
pkg_dir).are_outdated_packages()):
|
||||
continue
|
||||
data = _get_installed_platform_data(
|
||||
pkg_dir, with_boards=False, expose_packages=False)
|
||||
if latest:
|
||||
data['versionLatest'] = latest
|
||||
result.append(data)
|
||||
return click.echo(json.dumps(result))
|
||||
else:
|
||||
# cleanup cached board and platform lists
|
||||
app.clean_cache()
|
||||
for platform in platforms:
|
||||
click.echo("Platform %s" % click.style(
|
||||
pkg_dir_to_name.get(platform, platform), fg="cyan"))
|
||||
click.echo("--------")
|
||||
pm.update(
|
||||
platform, only_packages=only_packages, only_check=only_check)
|
||||
click.echo()
|
||||
if only_check and not json_output:
|
||||
raise UserSideException(
|
||||
"This command is deprecated, please use `pio pkg outdated` instead"
|
||||
)
|
||||
|
||||
if not json_output:
|
||||
click.secho(
|
||||
"\nWARNING: This command is deprecated and will be removed in "
|
||||
"the next releases. \nPlease use `pio pkg update` instead.\n",
|
||||
fg="yellow",
|
||||
)
|
||||
return ctx.invoke(
|
||||
package_update_cmd,
|
||||
**{
|
||||
"global": True,
|
||||
"platforms": platforms,
|
||||
"silent": silent,
|
||||
},
|
||||
)
|
||||
|
||||
pm = PlatformPackageManager()
|
||||
pm.set_log_level(logging.WARN if silent else logging.DEBUG)
|
||||
platforms = platforms or pm.get_installed()
|
||||
result = []
|
||||
for platform in platforms:
|
||||
spec = None
|
||||
pkg = None
|
||||
if isinstance(platform, PackageItem):
|
||||
pkg = platform
|
||||
else:
|
||||
spec = PackageSpec(platform)
|
||||
pkg = pm.get_package(spec)
|
||||
if not pkg:
|
||||
continue
|
||||
outdated = pm.outdated(pkg, spec)
|
||||
if (
|
||||
not outdated.is_outdated(allow_incompatible=True)
|
||||
and not PlatformFactory.new(pkg).are_outdated_packages()
|
||||
):
|
||||
continue
|
||||
data = _get_installed_platform_data(
|
||||
pkg, with_boards=False, expose_packages=False
|
||||
)
|
||||
if outdated.is_outdated(allow_incompatible=True):
|
||||
data["versionLatest"] = str(outdated.latest) if outdated.latest else None
|
||||
result.append(data)
|
||||
click.echo(json.dumps(result))
|
||||
return True
|
||||
|
||||
|
||||
#
|
||||
# Helpers
|
||||
#
|
||||
|
||||
|
||||
def _get_registry_platforms():
|
||||
regclient = PlatformPackageManager().get_registry_client_instance()
|
||||
return regclient.fetch_json_data("get", "/v2/platforms", x_cache_valid="1d")
|
||||
|
||||
|
||||
def _get_platform_data(*args, **kwargs):
|
||||
try:
|
||||
return _get_installed_platform_data(*args, **kwargs)
|
||||
except UnknownPlatform:
|
||||
return _get_registry_platform_data(*args, **kwargs)
|
||||
|
||||
|
||||
def _get_installed_platform_data(platform, with_boards=True, expose_packages=True):
|
||||
p = PlatformFactory.new(platform)
|
||||
data = dict(
|
||||
name=p.name,
|
||||
title=p.title,
|
||||
description=p.description,
|
||||
version=p.version,
|
||||
homepage=p.homepage,
|
||||
url=p.homepage,
|
||||
repository=p.repository_url,
|
||||
license=p.license,
|
||||
forDesktop=not p.is_embedded(),
|
||||
frameworks=sorted(list(p.frameworks) if p.frameworks else []),
|
||||
packages=list(p.packages) if p.packages else [],
|
||||
)
|
||||
|
||||
# if dump to API
|
||||
# del data['version']
|
||||
# return data
|
||||
|
||||
# overwrite VCS version and add extra fields
|
||||
manifest = PlatformPackageManager().legacy_load_manifest(
|
||||
os.path.dirname(p.manifest_path)
|
||||
)
|
||||
assert manifest
|
||||
for key in manifest:
|
||||
if key == "version" or key.startswith("__"):
|
||||
data[key] = manifest[key]
|
||||
|
||||
if with_boards:
|
||||
data["boards"] = [c.get_brief_data() for c in p.get_boards().values()]
|
||||
|
||||
if not data["packages"] or not expose_packages:
|
||||
return data
|
||||
|
||||
data["packages"] = []
|
||||
installed_pkgs = {
|
||||
pkg.metadata.name: p.pm.load_manifest(pkg) for pkg in p.get_installed_packages()
|
||||
}
|
||||
for name, options in p.packages.items():
|
||||
item = dict(
|
||||
name=name,
|
||||
type=p.get_package_type(name),
|
||||
requirements=options.get("version"),
|
||||
optional=options.get("optional") is True,
|
||||
)
|
||||
if name in installed_pkgs:
|
||||
for key, value in installed_pkgs[name].items():
|
||||
if key not in ("url", "version", "description"):
|
||||
continue
|
||||
item[key] = value
|
||||
if key == "version":
|
||||
item["originalVersion"] = get_original_version(value)
|
||||
data["packages"].append(item)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def _get_registry_platform_data( # pylint: disable=unused-argument
|
||||
platform, with_boards=True, expose_packages=True
|
||||
):
|
||||
_data = None
|
||||
for p in _get_registry_platforms():
|
||||
if p["name"] == platform:
|
||||
_data = p
|
||||
break
|
||||
|
||||
if not _data:
|
||||
return None
|
||||
|
||||
data = dict(
|
||||
ownername=_data.get("ownername"),
|
||||
name=_data["name"],
|
||||
title=_data["title"],
|
||||
description=_data["description"],
|
||||
homepage=_data["homepage"],
|
||||
repository=_data["repository"],
|
||||
url=_data["url"],
|
||||
license=_data["license"],
|
||||
forDesktop=_data["forDesktop"],
|
||||
frameworks=_data["frameworks"],
|
||||
packages=_data["packages"],
|
||||
versions=_data.get("versions"),
|
||||
)
|
||||
|
||||
if with_boards:
|
||||
data["boards"] = [
|
||||
board
|
||||
for board in PlatformPackageManager().get_registered_boards()
|
||||
if board["platform"] == _data["name"]
|
||||
]
|
||||
|
||||
return data
|
||||
|
@ -1,209 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
import threading
|
||||
from os import getcwd
|
||||
from os.path import isfile, join
|
||||
from tempfile import mkdtemp
|
||||
from time import sleep
|
||||
|
||||
import click
|
||||
|
||||
from platformio import exception, util
|
||||
from platformio.commands.device import device_monitor as cmd_device_monitor
|
||||
from platformio.managers.core import pioplus_call
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
|
||||
|
||||
@click.group("remote", short_help="PIO Remote")
|
||||
@click.option("-a", "--agent", multiple=True)
|
||||
def cli(**kwargs):
|
||||
pass
|
||||
|
||||
|
||||
@cli.group("agent", short_help="Start new agent or list active")
|
||||
def remote_agent():
|
||||
pass
|
||||
|
||||
|
||||
@remote_agent.command("start", short_help="Start agent")
|
||||
@click.option("-n", "--name")
|
||||
@click.option("-s", "--share", multiple=True, metavar="E-MAIL")
|
||||
@click.option(
|
||||
"-d",
|
||||
"--working-dir",
|
||||
envvar="PLATFORMIO_REMOTE_AGENT_DIR",
|
||||
type=click.Path(
|
||||
file_okay=False, dir_okay=True, writable=True, resolve_path=True))
|
||||
def remote_agent_start(**kwargs):
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@remote_agent.command("reload", short_help="Reload agents")
|
||||
def remote_agent_reload():
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@remote_agent.command("list", short_help="List active agents")
|
||||
def remote_agent_list():
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@cli.command(
|
||||
"update", short_help="Update installed Platforms, Packages and Libraries")
|
||||
@click.option(
|
||||
"-c",
|
||||
"--only-check",
|
||||
is_flag=True,
|
||||
help="Do not update, only check for new version")
|
||||
def remote_update(only_check):
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@cli.command("run", short_help="Process project environments remotely")
|
||||
@click.option("-e", "--environment", multiple=True)
|
||||
@click.option("-t", "--target", multiple=True)
|
||||
@click.option("--upload-port")
|
||||
@click.option(
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=getcwd,
|
||||
type=click.Path(
|
||||
exists=True,
|
||||
file_okay=True,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
resolve_path=True))
|
||||
@click.option("--disable-auto-clean", is_flag=True)
|
||||
@click.option("-r", "--force-remote", is_flag=True)
|
||||
@click.option("-s", "--silent", is_flag=True)
|
||||
@click.option("-v", "--verbose", is_flag=True)
|
||||
def remote_run(**kwargs):
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@cli.command("test", short_help="Remote Unit Testing")
|
||||
@click.option("--environment", "-e", multiple=True, metavar="<environment>")
|
||||
@click.option("--ignore", "-i", multiple=True, metavar="<pattern>")
|
||||
@click.option("--upload-port")
|
||||
@click.option("--test-port")
|
||||
@click.option(
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=getcwd,
|
||||
type=click.Path(
|
||||
exists=True,
|
||||
file_okay=False,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
resolve_path=True))
|
||||
@click.option("-r", "--force-remote", is_flag=True)
|
||||
@click.option("--without-building", is_flag=True)
|
||||
@click.option("--without-uploading", is_flag=True)
|
||||
@click.option("--verbose", "-v", is_flag=True)
|
||||
def remote_test(**kwargs):
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@cli.group("device", short_help="Monitor remote device or list existing")
|
||||
def remote_device():
|
||||
pass
|
||||
|
||||
|
||||
@remote_device.command("list", short_help="List remote devices")
|
||||
@click.option("--json-output", is_flag=True)
|
||||
def device_list(json_output):
|
||||
pioplus_call(sys.argv[1:])
|
||||
|
||||
|
||||
@remote_device.command("monitor", short_help="Monitor remote device")
|
||||
@click.option("--port", "-p", help="Port, a number or a device name")
|
||||
@click.option(
|
||||
"--baud", "-b", type=int, default=9600, help="Set baud rate, default=9600")
|
||||
@click.option(
|
||||
"--parity",
|
||||
default="N",
|
||||
type=click.Choice(["N", "E", "O", "S", "M"]),
|
||||
help="Set parity, default=N")
|
||||
@click.option(
|
||||
"--rtscts", is_flag=True, help="Enable RTS/CTS flow control, default=Off")
|
||||
@click.option(
|
||||
"--xonxoff",
|
||||
is_flag=True,
|
||||
help="Enable software flow control, default=Off")
|
||||
@click.option(
|
||||
"--rts",
|
||||
default=None,
|
||||
type=click.IntRange(0, 1),
|
||||
help="Set initial RTS line state")
|
||||
@click.option(
|
||||
"--dtr",
|
||||
default=None,
|
||||
type=click.IntRange(0, 1),
|
||||
help="Set initial DTR line state")
|
||||
@click.option("--echo", is_flag=True, help="Enable local echo, default=Off")
|
||||
@click.option(
|
||||
"--encoding",
|
||||
default="UTF-8",
|
||||
help="Set the encoding for the serial port (e.g. hexlify, "
|
||||
"Latin1, UTF-8), default: UTF-8")
|
||||
@click.option("--filter", "-f", multiple=True, help="Add text transformation")
|
||||
@click.option(
|
||||
"--eol",
|
||||
default="CRLF",
|
||||
type=click.Choice(["CR", "LF", "CRLF"]),
|
||||
help="End of line mode, default=CRLF")
|
||||
@click.option(
|
||||
"--raw", is_flag=True, help="Do not apply any encodings/transformations")
|
||||
@click.option(
|
||||
"--exit-char",
|
||||
type=int,
|
||||
default=3,
|
||||
help="ASCII code of special character that is used to exit "
|
||||
"the application, default=3 (Ctrl+C)")
|
||||
@click.option(
|
||||
"--menu-char",
|
||||
type=int,
|
||||
default=20,
|
||||
help="ASCII code of special character that is used to "
|
||||
"control miniterm (menu), default=20 (DEC)")
|
||||
@click.option(
|
||||
"--quiet",
|
||||
is_flag=True,
|
||||
help="Diagnostics: suppress non-error messages, default=Off")
|
||||
@click.pass_context
|
||||
def device_monitor(ctx, **kwargs):
|
||||
|
||||
def _tx_target(sock_dir):
|
||||
try:
|
||||
pioplus_call(sys.argv[1:] + ["--sock", sock_dir])
|
||||
except exception.ReturnErrorCode:
|
||||
pass
|
||||
|
||||
sock_dir = mkdtemp(suffix="pioplus")
|
||||
sock_file = join(sock_dir, "sock")
|
||||
try:
|
||||
t = threading.Thread(target=_tx_target, args=(sock_dir, ))
|
||||
t.start()
|
||||
while t.is_alive() and not isfile(sock_file):
|
||||
sleep(0.1)
|
||||
if not t.is_alive():
|
||||
return
|
||||
kwargs['port'] = open(sock_file).read()
|
||||
ctx.invoke(cmd_device_monitor, **kwargs)
|
||||
t.join(2)
|
||||
finally:
|
||||
util.rmtree_(sock_dir)
|
@ -1,435 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from hashlib import sha1
|
||||
from os import getcwd, makedirs, walk
|
||||
from os.path import getmtime, isdir, isfile, join
|
||||
from time import time
|
||||
|
||||
import click
|
||||
|
||||
from platformio import __version__, exception, telemetry, util
|
||||
from platformio.commands.device import device_monitor as cmd_device_monitor
|
||||
from platformio.commands.lib import lib_install as cmd_lib_install
|
||||
from platformio.commands.platform import \
|
||||
platform_install as cmd_platform_install
|
||||
from platformio.managers.lib import LibraryManager, is_builtin_lib
|
||||
from platformio.managers.platform import PlatformFactory
|
||||
|
||||
# pylint: disable=too-many-arguments,too-many-locals,too-many-branches
|
||||
|
||||
|
||||
@click.command("run", short_help="Process project environments")
|
||||
@click.option("-e", "--environment", multiple=True)
|
||||
@click.option("-t", "--target", multiple=True)
|
||||
@click.option("--upload-port")
|
||||
@click.option(
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=getcwd,
|
||||
type=click.Path(
|
||||
exists=True,
|
||||
file_okay=True,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
resolve_path=True))
|
||||
@click.option("-s", "--silent", is_flag=True)
|
||||
@click.option("-v", "--verbose", is_flag=True)
|
||||
@click.option("--disable-auto-clean", is_flag=True)
|
||||
@click.pass_context
|
||||
def cli(ctx, environment, target, upload_port, project_dir, silent, verbose,
|
||||
disable_auto_clean):
|
||||
# find project directory on upper level
|
||||
if isfile(project_dir):
|
||||
project_dir = util.find_project_dir_above(project_dir)
|
||||
|
||||
if not util.is_platformio_project(project_dir):
|
||||
raise exception.NotPlatformIOProject(project_dir)
|
||||
|
||||
with util.cd(project_dir):
|
||||
# clean obsolete build dir
|
||||
if not disable_auto_clean:
|
||||
try:
|
||||
_clean_build_dir(util.get_projectbuild_dir())
|
||||
except: # pylint: disable=bare-except
|
||||
click.secho(
|
||||
"Can not remove temporary directory `%s`. Please remove "
|
||||
"it manually to avoid build issues" %
|
||||
util.get_projectbuild_dir(force=True),
|
||||
fg="yellow")
|
||||
|
||||
config = util.load_project_config()
|
||||
env_default = None
|
||||
if config.has_option("platformio", "env_default"):
|
||||
env_default = util.parse_conf_multi_values(
|
||||
config.get("platformio", "env_default"))
|
||||
|
||||
check_project_defopts(config)
|
||||
check_project_envs(config, environment or env_default)
|
||||
|
||||
results = []
|
||||
start_time = time()
|
||||
for section in config.sections():
|
||||
if not section.startswith("env:"):
|
||||
continue
|
||||
|
||||
envname = section[4:]
|
||||
skipenv = any([
|
||||
environment and envname not in environment, not environment
|
||||
and env_default and envname not in env_default
|
||||
])
|
||||
if skipenv:
|
||||
results.append((envname, None))
|
||||
continue
|
||||
|
||||
if not silent and results:
|
||||
click.echo()
|
||||
|
||||
options = {}
|
||||
for k, v in config.items(section):
|
||||
options[k] = v
|
||||
if "piotest" not in options and "piotest" in ctx.meta:
|
||||
options['piotest'] = ctx.meta['piotest']
|
||||
|
||||
ep = EnvironmentProcessor(ctx, envname, options, target,
|
||||
upload_port, silent, verbose)
|
||||
result = (envname, ep.process())
|
||||
results.append(result)
|
||||
if result[1] and "monitor" in ep.get_build_targets() and \
|
||||
"nobuild" not in ep.get_build_targets():
|
||||
ctx.invoke(
|
||||
cmd_device_monitor,
|
||||
environment=environment[0] if environment else None)
|
||||
|
||||
found_error = any(status is False for (_, status) in results)
|
||||
|
||||
if (found_error or not silent) and len(results) > 1:
|
||||
click.echo()
|
||||
print_summary(results, start_time)
|
||||
|
||||
if found_error:
|
||||
raise exception.ReturnErrorCode(1)
|
||||
return True
|
||||
|
||||
|
||||
class EnvironmentProcessor(object):
|
||||
|
||||
DEFAULT_DUMP_OPTIONS = ("platform", "framework", "board")
|
||||
|
||||
KNOWN_PLATFORMIO_OPTIONS = [
|
||||
"description", "env_default", "home_dir", "lib_dir", "libdeps_dir",
|
||||
"include_dir", "src_dir", "build_dir", "data_dir", "test_dir",
|
||||
"boards_dir", "lib_extra_dirs"
|
||||
]
|
||||
|
||||
KNOWN_ENV_OPTIONS = [
|
||||
"platform", "framework", "board", "build_flags", "src_build_flags",
|
||||
"build_unflags", "src_filter", "extra_scripts", "targets",
|
||||
"upload_port", "upload_protocol", "upload_speed", "upload_flags",
|
||||
"upload_resetmethod", "lib_deps", "lib_ignore", "lib_extra_dirs",
|
||||
"lib_ldf_mode", "lib_compat_mode", "lib_archive", "piotest",
|
||||
"test_transport", "test_filter", "test_ignore", "test_port",
|
||||
"test_speed", "test_build_project_src", "debug_tool", "debug_port",
|
||||
"debug_init_cmds", "debug_extra_cmds", "debug_server",
|
||||
"debug_init_break", "debug_load_cmd", "debug_load_mode",
|
||||
"debug_svd_path", "monitor_port", "monitor_speed", "monitor_rts",
|
||||
"monitor_dtr"
|
||||
]
|
||||
|
||||
IGNORE_BUILD_OPTIONS = [
|
||||
"test_transport", "test_filter", "test_ignore", "test_port",
|
||||
"test_speed", "debug_port", "debug_init_cmds", "debug_extra_cmds",
|
||||
"debug_server", "debug_init_break", "debug_load_cmd",
|
||||
"debug_load_mode", "monitor_port", "monitor_speed", "monitor_rts",
|
||||
"monitor_dtr"
|
||||
]
|
||||
|
||||
REMAPED_OPTIONS = {"framework": "pioframework", "platform": "pioplatform"}
|
||||
|
||||
RENAMED_OPTIONS = {
|
||||
"lib_use": "lib_deps",
|
||||
"lib_force": "lib_deps",
|
||||
"extra_script": "extra_scripts",
|
||||
"monitor_baud": "monitor_speed",
|
||||
"board_mcu": "board_build.mcu",
|
||||
"board_f_cpu": "board_build.f_cpu",
|
||||
"board_f_flash": "board_build.f_flash",
|
||||
"board_flash_mode": "board_build.flash_mode"
|
||||
}
|
||||
|
||||
RENAMED_PLATFORMS = {"espressif": "espressif8266"}
|
||||
|
||||
def __init__(
|
||||
self, # pylint: disable=R0913
|
||||
cmd_ctx,
|
||||
name,
|
||||
options,
|
||||
targets,
|
||||
upload_port,
|
||||
silent,
|
||||
verbose):
|
||||
self.cmd_ctx = cmd_ctx
|
||||
self.name = name
|
||||
self.options = options
|
||||
self.targets = targets
|
||||
self.upload_port = upload_port
|
||||
self.silent = silent
|
||||
self.verbose = verbose
|
||||
|
||||
def process(self):
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
start_time = time()
|
||||
env_dump = []
|
||||
|
||||
for k, v in self.options.items():
|
||||
self.options[k] = self.options[k].strip()
|
||||
if self.verbose or k in self.DEFAULT_DUMP_OPTIONS:
|
||||
env_dump.append(
|
||||
"%s: %s" % (k, ", ".join(util.parse_conf_multi_values(v))))
|
||||
|
||||
if not self.silent:
|
||||
click.echo("Processing %s (%s)" % (click.style(
|
||||
self.name, fg="cyan", bold=True), "; ".join(env_dump)))
|
||||
click.secho("-" * terminal_width, bold=True)
|
||||
|
||||
self.options = self._validate_options(self.options)
|
||||
result = self._run()
|
||||
is_error = result['returncode'] != 0
|
||||
|
||||
if self.silent and not is_error:
|
||||
return True
|
||||
|
||||
if is_error or "piotest_processor" not in self.cmd_ctx.meta:
|
||||
print_header(
|
||||
"[%s] Took %.2f seconds" % (
|
||||
(click.style("ERROR", fg="red", bold=True) if is_error else
|
||||
click.style("SUCCESS", fg="green", bold=True)),
|
||||
time() - start_time),
|
||||
is_error=is_error)
|
||||
|
||||
return not is_error
|
||||
|
||||
def _validate_options(self, options):
|
||||
result = {}
|
||||
for k, v in options.items():
|
||||
# process obsolete options
|
||||
if k in self.RENAMED_OPTIONS:
|
||||
click.secho(
|
||||
"Warning! `%s` option is deprecated and will be "
|
||||
"removed in the next release! Please use "
|
||||
"`%s` instead." % (k, self.RENAMED_OPTIONS[k]),
|
||||
fg="yellow")
|
||||
k = self.RENAMED_OPTIONS[k]
|
||||
# process renamed platforms
|
||||
if k == "platform" and v in self.RENAMED_PLATFORMS:
|
||||
click.secho(
|
||||
"Warning! Platform `%s` is deprecated and will be "
|
||||
"removed in the next release! Please use "
|
||||
"`%s` instead." % (v, self.RENAMED_PLATFORMS[v]),
|
||||
fg="yellow")
|
||||
v = self.RENAMED_PLATFORMS[v]
|
||||
|
||||
# warn about unknown options
|
||||
unknown_conditions = [
|
||||
k not in self.KNOWN_ENV_OPTIONS, not k.startswith("custom_"),
|
||||
not k.startswith("board_")
|
||||
]
|
||||
if all(unknown_conditions):
|
||||
click.secho(
|
||||
"Detected non-PlatformIO `%s` option in `[env:%s]` section"
|
||||
% (k, self.name),
|
||||
fg="yellow")
|
||||
result[k] = v
|
||||
return result
|
||||
|
||||
def get_build_variables(self):
|
||||
variables = {"pioenv": self.name}
|
||||
if self.upload_port:
|
||||
variables['upload_port'] = self.upload_port
|
||||
for k, v in self.options.items():
|
||||
if k in self.REMAPED_OPTIONS:
|
||||
k = self.REMAPED_OPTIONS[k]
|
||||
if k in self.IGNORE_BUILD_OPTIONS:
|
||||
continue
|
||||
if k == "targets" or (k == "upload_port" and self.upload_port):
|
||||
continue
|
||||
variables[k] = v
|
||||
return variables
|
||||
|
||||
def get_build_targets(self):
|
||||
targets = []
|
||||
if self.targets:
|
||||
targets = [t for t in self.targets]
|
||||
elif "targets" in self.options:
|
||||
targets = self.options['targets'].split(", ")
|
||||
return targets
|
||||
|
||||
def _run(self):
|
||||
if "platform" not in self.options:
|
||||
raise exception.UndefinedEnvPlatform(self.name)
|
||||
|
||||
build_vars = self.get_build_variables()
|
||||
build_targets = self.get_build_targets()
|
||||
|
||||
telemetry.on_run_environment(self.options, build_targets)
|
||||
|
||||
# skip monitor target, we call it above
|
||||
if "monitor" in build_targets:
|
||||
build_targets.remove("monitor")
|
||||
if "nobuild" not in build_targets:
|
||||
# install dependent libraries
|
||||
if "lib_install" in self.options:
|
||||
_autoinstall_libdeps(self.cmd_ctx, [
|
||||
int(d.strip())
|
||||
for d in self.options['lib_install'].split(",")
|
||||
if d.strip()
|
||||
], self.verbose)
|
||||
if "lib_deps" in self.options:
|
||||
_autoinstall_libdeps(
|
||||
self.cmd_ctx,
|
||||
util.parse_conf_multi_values(self.options['lib_deps']),
|
||||
self.verbose)
|
||||
|
||||
try:
|
||||
p = PlatformFactory.newPlatform(self.options['platform'])
|
||||
except exception.UnknownPlatform:
|
||||
self.cmd_ctx.invoke(
|
||||
cmd_platform_install,
|
||||
platforms=[self.options['platform']],
|
||||
skip_default_package=True)
|
||||
p = PlatformFactory.newPlatform(self.options['platform'])
|
||||
|
||||
return p.run(build_vars, build_targets, self.silent, self.verbose)
|
||||
|
||||
|
||||
def _autoinstall_libdeps(ctx, libraries, verbose=False):
|
||||
if not libraries:
|
||||
return
|
||||
storage_dir = util.get_projectlibdeps_dir()
|
||||
ctx.obj = LibraryManager(storage_dir)
|
||||
if verbose:
|
||||
click.echo("Library Storage: " + storage_dir)
|
||||
for lib in libraries:
|
||||
try:
|
||||
ctx.invoke(cmd_lib_install, libraries=[lib], silent=not verbose)
|
||||
except exception.LibNotFound as e:
|
||||
if verbose or not is_builtin_lib(lib):
|
||||
click.secho("Warning! %s" % e, fg="yellow")
|
||||
except exception.InternetIsOffline as e:
|
||||
click.secho(str(e), fg="yellow")
|
||||
|
||||
|
||||
def _clean_build_dir(build_dir):
|
||||
structhash_file = join(build_dir, "structure.hash")
|
||||
proj_hash = calculate_project_hash()
|
||||
|
||||
# if project's config is modified
|
||||
if (isdir(build_dir)
|
||||
and getmtime(join(util.get_project_dir(),
|
||||
"platformio.ini")) > getmtime(build_dir)):
|
||||
util.rmtree_(build_dir)
|
||||
|
||||
# check project structure
|
||||
if isdir(build_dir) and isfile(structhash_file):
|
||||
with open(structhash_file) as f:
|
||||
if f.read() == proj_hash:
|
||||
return
|
||||
util.rmtree_(build_dir)
|
||||
|
||||
if not isdir(build_dir):
|
||||
makedirs(build_dir)
|
||||
|
||||
with open(structhash_file, "w") as f:
|
||||
f.write(proj_hash)
|
||||
|
||||
|
||||
def print_header(label, is_error=False):
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
width = len(click.unstyle(label))
|
||||
half_line = "=" * ((terminal_width - width - 2) / 2)
|
||||
click.echo("%s %s %s" % (half_line, label, half_line), err=is_error)
|
||||
|
||||
|
||||
def print_summary(results, start_time):
|
||||
print_header("[%s]" % click.style("SUMMARY"))
|
||||
|
||||
envname_max_len = 0
|
||||
for (envname, _) in results:
|
||||
if len(envname) > envname_max_len:
|
||||
envname_max_len = len(envname)
|
||||
|
||||
successed = True
|
||||
for (envname, status) in results:
|
||||
status_str = click.style("SUCCESS", fg="green")
|
||||
if status is False:
|
||||
successed = False
|
||||
status_str = click.style("ERROR", fg="red")
|
||||
elif status is None:
|
||||
status_str = click.style("SKIP", fg="yellow")
|
||||
|
||||
format_str = (
|
||||
"Environment {0:<" + str(envname_max_len + 9) + "}\t[{1}]")
|
||||
click.echo(
|
||||
format_str.format(click.style(envname, fg="cyan"), status_str),
|
||||
err=status is False)
|
||||
|
||||
print_header(
|
||||
"[%s] Took %.2f seconds" % (
|
||||
(click.style("SUCCESS", fg="green", bold=True) if successed else
|
||||
click.style("ERROR", fg="red", bold=True)), time() - start_time),
|
||||
is_error=not successed)
|
||||
|
||||
|
||||
def check_project_defopts(config):
|
||||
if not config.has_section("platformio"):
|
||||
return True
|
||||
unknown = set([k for k, _ in config.items("platformio")]) - set(
|
||||
EnvironmentProcessor.KNOWN_PLATFORMIO_OPTIONS)
|
||||
if not unknown:
|
||||
return True
|
||||
click.secho(
|
||||
"Warning! Ignore unknown `%s` option in `[platformio]` section" %
|
||||
", ".join(unknown),
|
||||
fg="yellow")
|
||||
return False
|
||||
|
||||
|
||||
def check_project_envs(config, environments=None):
|
||||
if not config.sections():
|
||||
raise exception.ProjectEnvsNotAvailable()
|
||||
|
||||
known = set([s[4:] for s in config.sections() if s.startswith("env:")])
|
||||
unknown = set(environments or []) - known
|
||||
if unknown:
|
||||
raise exception.UnknownEnvNames(", ".join(unknown), ", ".join(known))
|
||||
return True
|
||||
|
||||
|
||||
def calculate_project_hash():
|
||||
check_suffixes = (".c", ".cc", ".cpp", ".h", ".hpp", ".s", ".S")
|
||||
chunks = [__version__]
|
||||
for d in (util.get_projectsrc_dir(), util.get_projectlib_dir()):
|
||||
if not isdir(d):
|
||||
continue
|
||||
for root, _, files in walk(d):
|
||||
for f in files:
|
||||
path = join(root, f)
|
||||
if path.endswith(check_suffixes):
|
||||
chunks.append(path)
|
||||
chunks_to_str = ",".join(sorted(chunks))
|
||||
if "windows" in util.get_systype():
|
||||
# Fix issue with useless project rebuilding for case insensitive FS.
|
||||
# A case of disk drive can differ...
|
||||
chunks_to_str = chunks_to_str.lower()
|
||||
return sha1(chunks_to_str).hexdigest()
|
@ -13,11 +13,21 @@
|
||||
# limitations under the License.
|
||||
|
||||
import click
|
||||
from tabulate import tabulate
|
||||
|
||||
from platformio import app
|
||||
from platformio.compat import string_types
|
||||
|
||||
|
||||
@click.group(short_help="Manage PlatformIO settings")
|
||||
def format_value(raw):
|
||||
if isinstance(raw, bool):
|
||||
return "Yes" if raw else "No"
|
||||
if isinstance(raw, string_types):
|
||||
return raw
|
||||
return str(raw)
|
||||
|
||||
|
||||
@click.group(short_help="Manage system settings")
|
||||
def cli():
|
||||
pass
|
||||
|
||||
@ -25,41 +35,31 @@ def cli():
|
||||
@cli.command("get", short_help="Get existing setting/-s")
|
||||
@click.argument("name", required=False)
|
||||
def settings_get(name):
|
||||
tabular_data = []
|
||||
for key, options in sorted(app.DEFAULT_SETTINGS.items()):
|
||||
if name and name != key:
|
||||
continue
|
||||
raw_value = app.get_setting(key)
|
||||
formatted_value = format_value(raw_value)
|
||||
|
||||
list_tpl = "{name:<40} {value:<35} {description}"
|
||||
terminal_width, _ = click.get_terminal_size()
|
||||
if raw_value != options["value"]:
|
||||
default_formatted_value = format_value(options["value"])
|
||||
formatted_value += "%s" % (
|
||||
"\n" if len(default_formatted_value) > 10 else " "
|
||||
)
|
||||
formatted_value += "[%s]" % click.style(
|
||||
default_formatted_value, fg="yellow"
|
||||
)
|
||||
|
||||
tabular_data.append(
|
||||
(click.style(key, fg="cyan"), formatted_value, options["description"])
|
||||
)
|
||||
|
||||
click.echo(
|
||||
list_tpl.format(
|
||||
name=click.style("Name", fg="cyan"),
|
||||
value=(click.style("Value", fg="green") + click.style(
|
||||
" [Default]", fg="yellow")),
|
||||
description="Description"))
|
||||
click.echo("-" * terminal_width)
|
||||
|
||||
for _name, _data in sorted(app.DEFAULT_SETTINGS.items()):
|
||||
if name and name != _name:
|
||||
continue
|
||||
_value = app.get_setting(_name)
|
||||
|
||||
_value_str = str(_value)
|
||||
if isinstance(_value, bool):
|
||||
_value_str = "Yes" if _value else "No"
|
||||
_value_str = click.style(_value_str, fg="green")
|
||||
|
||||
if _value != _data['value']:
|
||||
_defvalue_str = str(_data['value'])
|
||||
if isinstance(_data['value'], bool):
|
||||
_defvalue_str = "Yes" if _data['value'] else "No"
|
||||
_value_str += click.style(" [%s]" % _defvalue_str, fg="yellow")
|
||||
else:
|
||||
_value_str += click.style(" ", fg="yellow")
|
||||
|
||||
click.echo(
|
||||
list_tpl.format(
|
||||
name=click.style(_name, fg="cyan"),
|
||||
value=_value_str,
|
||||
description=_data['description']))
|
||||
tabulate(
|
||||
tabular_data, headers=["Name", "Current value [Default]", "Description"]
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@cli.command("set", short_help="Set new value for the setting")
|
||||
@ -76,5 +76,5 @@ def settings_set(ctx, name, value):
|
||||
@click.pass_context
|
||||
def settings_reset(ctx):
|
||||
app.reset_settings()
|
||||
click.secho("The settings have been reseted!", fg="green")
|
||||
click.secho("The settings have been reset!", fg="green")
|
||||
ctx.invoke(settings_get)
|
||||
|
@ -1,67 +0,0 @@
|
||||
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
from os import getcwd
|
||||
|
||||
import click
|
||||
|
||||
from platformio.managers.core import pioplus_call
|
||||
|
||||
|
||||
@click.command("test", short_help="Local Unit Testing")
|
||||
@click.option("--environment", "-e", multiple=True, metavar="<environment>")
|
||||
@click.option(
|
||||
"--filter",
|
||||
"-f",
|
||||
multiple=True,
|
||||
metavar="<pattern>",
|
||||
help="Filter tests by a pattern")
|
||||
@click.option(
|
||||
"--ignore",
|
||||
"-i",
|
||||
multiple=True,
|
||||
metavar="<pattern>",
|
||||
help="Ignore tests by a pattern")
|
||||
@click.option("--upload-port")
|
||||
@click.option("--test-port")
|
||||
@click.option(
|
||||
"-d",
|
||||
"--project-dir",
|
||||
default=getcwd,
|
||||
type=click.Path(
|
||||
exists=True,
|
||||
file_okay=False,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
resolve_path=True))
|
||||
@click.option("--without-building", is_flag=True)
|
||||
@click.option("--without-uploading", is_flag=True)
|
||||
@click.option(
|
||||
"--no-reset",
|
||||
is_flag=True,
|
||||
help="Disable software reset via Serial.DTR/RST")
|
||||
@click.option(
|
||||
"--monitor-rts",
|
||||
default=None,
|
||||
type=click.IntRange(0, 1),
|
||||
help="Set initial RTS line state for Serial Monitor")
|
||||
@click.option(
|
||||
"--monitor-dtr",
|
||||
default=None,
|
||||
type=click.IntRange(0, 1),
|
||||
help="Set initial DTR line state for Serial Monitor")
|
||||
@click.option("--verbose", "-v", is_flag=True)
|
||||
def cli(*args, **kwargs): # pylint: disable=unused-argument
|
||||
pioplus_call(sys.argv[1:])
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user