forked from espressif/esp-idf
Compare commits
638 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
11eaf41b37 | ||
|
|
c54555bb01 | ||
|
|
2710c37d51 | ||
|
|
9156cb085f | ||
|
|
0c5d2c261d | ||
|
|
15341d51c2 | ||
|
|
86d6781f68 | ||
|
|
f4e96ada37 | ||
|
|
5f74ed7419 | ||
|
|
35e025f8e3 | ||
|
|
9699f27723 | ||
|
|
5408580c41 | ||
|
|
3f75f037c0 | ||
|
|
df1bf06665 | ||
|
|
3a429b0310 | ||
|
|
b8bb11402a | ||
|
|
43c854c278 | ||
|
|
1eb6d90278 | ||
|
|
e1dfdf26bc | ||
|
|
5a64cdecce | ||
|
|
69aeebb986 | ||
|
|
9b555497ca | ||
|
|
b1dee3e510 | ||
|
|
676917955e | ||
|
|
977a90b245 | ||
|
|
af966596d4 | ||
|
|
0b6c7e6aad | ||
|
|
40c03f95f8 | ||
|
|
ffd46a32f7 | ||
|
|
d1a363a7f8 | ||
|
|
aa2451eb88 | ||
|
|
f759073049 | ||
|
|
7c05e2f291 | ||
|
|
09475c744f | ||
|
|
e8a5fdcff3 | ||
|
|
ffe6458c30 | ||
|
|
be39aabe00 | ||
|
|
eae8973711 | ||
|
|
62a4b70b44 | ||
|
|
581fa1b688 | ||
|
|
f317bb15fd | ||
|
|
49fd283364 | ||
|
|
745a1f492e | ||
|
|
5a234cf642 | ||
|
|
07f9c7664d | ||
|
|
2d66984b09 | ||
|
|
dea2d77712 | ||
|
|
ea45c22a5c | ||
|
|
016b63dacf | ||
|
|
53921e8104 | ||
|
|
c9ead16682 | ||
|
|
39dd85639a | ||
|
|
a45478abc6 | ||
|
|
a69e92864f | ||
|
|
b5785b41eb | ||
|
|
53be71f8f2 | ||
|
|
95a5d3ff36 | ||
|
|
912f6c527c | ||
|
|
baf258e3e6 | ||
|
|
951e3b2b8c | ||
|
|
e9c1d5c4b6 | ||
|
|
48226735d1 | ||
|
|
f4c6faa943 | ||
|
|
bd18b8ba6a | ||
|
|
086294e3b3 | ||
|
|
b85b15a3c4 | ||
|
|
1ff54dde9a | ||
|
|
8477ec19f5 | ||
|
|
fd9a55a775 | ||
|
|
239a6da000 | ||
|
|
9694fe3c2a | ||
|
|
36a7c4ca66 | ||
|
|
0e03d101e2 | ||
|
|
50cb3a4435 | ||
|
|
06086d3a36 | ||
|
|
1fdb0e1871 | ||
|
|
61379f09cd | ||
|
|
9b50d7d60f | ||
|
|
796065f6fb | ||
|
|
e86e0f3dc8 | ||
|
|
915cb63bb1 | ||
|
|
33756fd210 | ||
|
|
194343e056 | ||
|
|
c4d3d74e39 | ||
|
|
fb0c9550d8 | ||
|
|
f18d887a6d | ||
|
|
c30d9829d3 | ||
|
|
6d238cdba2 | ||
|
|
5c24c70ed8 | ||
|
|
fcb9cf8b93 | ||
|
|
83b6c79f93 | ||
|
|
14d88e5e5e | ||
|
|
7823f6c996 | ||
|
|
bdce46f1e3 | ||
|
|
8e4fa20695 | ||
|
|
bb45925dff | ||
|
|
962b105be5 | ||
|
|
ce6b508c4f | ||
|
|
f30d282d66 | ||
|
|
daed0d9f3a | ||
|
|
09db8a96f0 | ||
|
|
fe277d5b13 | ||
|
|
659ae0ce9e | ||
|
|
77430f3e2d | ||
|
|
125a784983 | ||
|
|
21f33f6e2a | ||
|
|
a80717fbd5 | ||
|
|
690d1a032e | ||
|
|
bf0a5fe7f4 | ||
|
|
e79f46c044 | ||
|
|
23e7663119 | ||
|
|
17f2bceeb7 | ||
|
|
000c70b3a1 | ||
|
|
17654970e9 | ||
|
|
c3b7aa76d9 | ||
|
|
5b110971ac | ||
|
|
0d8ca93e8d | ||
|
|
c70dde1420 | ||
|
|
90e21c4404 | ||
|
|
9d2d1e2d39 | ||
|
|
99d10ca3d2 | ||
|
|
03211c7024 | ||
|
|
3139ae0f0e | ||
|
|
0b9b16cb77 | ||
|
|
40f7adaf3b | ||
|
|
0af120ae17 | ||
|
|
4efa9ca1db | ||
|
|
ede660ce4b | ||
|
|
1fb7a2ca58 | ||
|
|
c8b5789267 | ||
|
|
5592d28f9e | ||
|
|
d5076b5749 | ||
|
|
f183419d94 | ||
|
|
4a6a10044d | ||
|
|
1ff3eaafe9 | ||
|
|
7b32ba0763 | ||
|
|
82d53440a4 | ||
|
|
f75b4b1636 | ||
|
|
ae21032e9d | ||
|
|
4e09c6e346 | ||
|
|
0056ab449e | ||
|
|
c6bf363f14 | ||
|
|
5515c890ed | ||
|
|
d1efa0d869 | ||
|
|
6a216ca839 | ||
|
|
b893744fd1 | ||
|
|
f29d324691 | ||
|
|
dd83f65e5c | ||
|
|
6b8740ae8d | ||
|
|
01808d0cfb | ||
|
|
9ae095fc20 | ||
|
|
d1f8299726 | ||
|
|
0375c9bca0 | ||
|
|
692d15abbe | ||
|
|
b4a612345b | ||
|
|
fb98bb40f3 | ||
|
|
cff4d95568 | ||
|
|
a58af467fc | ||
|
|
15f1425f67 | ||
|
|
0d60862933 | ||
|
|
c50a84f218 | ||
|
|
5c729436d6 | ||
|
|
96a3926e22 | ||
|
|
c4444d1385 | ||
|
|
504c6a09e1 | ||
|
|
2b475022c5 | ||
|
|
13c548cefa | ||
|
|
1427e14b72 | ||
|
|
424bf120a3 | ||
|
|
90c51d25e0 | ||
|
|
81bc017ff6 | ||
|
|
7b5799830c | ||
|
|
579bda6b49 | ||
|
|
ae2120cdcd | ||
|
|
e49823f10c | ||
|
|
e3a854769a | ||
|
|
3d42ac21fa | ||
|
|
ff8a6a5bf9 | ||
|
|
cf4c2ea576 | ||
|
|
608d575172 | ||
|
|
0da6bb8825 | ||
|
|
bff4001473 | ||
|
|
8b68b39198 | ||
|
|
9edbb98a2c | ||
|
|
786c23bcfa | ||
|
|
41ca90905e | ||
|
|
4435437501 | ||
|
|
9effe1aba6 | ||
|
|
8dcf3e5f80 | ||
|
|
8ce0b8bf73 | ||
|
|
ceb9e42058 | ||
|
|
6947dd533a | ||
|
|
e6a4ad3d55 | ||
|
|
21b06f43bf | ||
|
|
911ad39808 | ||
|
|
651f60e663 | ||
|
|
129bc8b7ff | ||
|
|
6a34106488 | ||
|
|
ff14e382a4 | ||
|
|
499625be33 | ||
|
|
ebcb490aa9 | ||
|
|
613c17bc2f | ||
|
|
a2b96227ac | ||
|
|
1ef5c02c37 | ||
|
|
aefe0722a0 | ||
|
|
96ba61f89f | ||
|
|
985cc9318a | ||
|
|
8f79c54242 | ||
|
|
6949092234 | ||
|
|
ad25a90d6a | ||
|
|
d5dc04693d | ||
|
|
aca0d3ad17 | ||
|
|
a36d8bc742 | ||
|
|
3fdbfb2069 | ||
|
|
87cc516338 | ||
|
|
c26660e504 | ||
|
|
da169340f9 | ||
|
|
f727b7b25d | ||
|
|
0fe3ec63a0 | ||
|
|
7778ab2759 | ||
|
|
fdf04b3198 | ||
|
|
debcb50fd2 | ||
|
|
7b3c08e37a | ||
|
|
db15c0e183 | ||
|
|
a5dc34f416 | ||
|
|
d9d388dae7 | ||
|
|
97594d2076 | ||
|
|
0a1e5846c7 | ||
|
|
a6cbcb91d2 | ||
|
|
3c8cccc73b | ||
|
|
a7e2ea76d8 | ||
|
|
8005821b09 | ||
|
|
1f2d548fbb | ||
|
|
841339c012 | ||
|
|
1fb97c1718 | ||
|
|
39a383981d | ||
|
|
61bd19b446 | ||
|
|
4e5757f1ab | ||
|
|
0d4d3c103f | ||
|
|
f8736aed36 | ||
|
|
37bf8dff6b | ||
|
|
90d69b38b2 | ||
|
|
f32321e6a5 | ||
|
|
968b15d380 | ||
|
|
e951cebfa2 | ||
|
|
fdd8d4284f | ||
|
|
191466d824 | ||
|
|
56563f7092 | ||
|
|
572a66b62e | ||
|
|
a4bfa19ebd | ||
|
|
a3cee541d3 | ||
|
|
bdb0756cdb | ||
|
|
6595897d2d | ||
|
|
66759438a9 | ||
|
|
56a6cad52c | ||
|
|
be27966ce9 | ||
|
|
e2b18f2c2c | ||
|
|
e19be79e4f | ||
|
|
a5b9169a7e | ||
|
|
2d8fb1604f | ||
|
|
d07bd19ba6 | ||
|
|
6139b362a0 | ||
|
|
6a08a13e70 | ||
|
|
70c7f3725f | ||
|
|
b586575970 | ||
|
|
7c3a604e27 | ||
|
|
bc2ef597a4 | ||
|
|
4cf3acb594 | ||
|
|
af4991fb39 | ||
|
|
e9c617fa19 | ||
|
|
a168fde297 | ||
|
|
8558aa4414 | ||
|
|
72176eee5d | ||
|
|
3da789bae6 | ||
|
|
34795220d2 | ||
|
|
357e0e144b | ||
|
|
58f1bde9ef | ||
|
|
f9c8db8f94 | ||
|
|
6a92a3582e | ||
|
|
609d25482b | ||
|
|
36cbbdcf65 | ||
|
|
a75cf3effd | ||
|
|
b85e6d3dd8 | ||
|
|
555bd367e1 | ||
|
|
c2e134b775 | ||
|
|
7cdddd47aa | ||
|
|
ffd3a58ff9 | ||
|
|
bf816394d4 | ||
|
|
1625943486 | ||
|
|
52eab870e5 | ||
|
|
ffbbf0cebf | ||
|
|
a635d11b4a | ||
|
|
df43d670a1 | ||
|
|
7a3efab6a7 | ||
|
|
6388f3f13c | ||
|
|
eb848eaa6b | ||
|
|
c7b527bd27 | ||
|
|
aba5fdcdcd | ||
|
|
fc95a892ab | ||
|
|
4c1c9373e9 | ||
|
|
3081c8b5ea | ||
|
|
afbbaaf417 | ||
|
|
913550f62c | ||
|
|
ee0ee4887f | ||
|
|
63fee6c23a | ||
|
|
633dd89d4a | ||
|
|
c9f1d3e8be | ||
|
|
e3653aaa98 | ||
|
|
0818b1fca1 | ||
|
|
44f266693a | ||
|
|
35844b3d09 | ||
|
|
ca3bcb18b0 | ||
|
|
8c86ccc2c6 | ||
|
|
7ee2470603 | ||
|
|
821d82f04e | ||
|
|
4e0459f112 | ||
|
|
3c43fb0707 | ||
|
|
fb4e56e9a3 | ||
|
|
60e439db60 | ||
|
|
b94656115e | ||
|
|
068a364a6b | ||
|
|
134fd6b8d8 | ||
|
|
d3a78fef1b | ||
|
|
3ca40da386 | ||
|
|
b05cfb4eb1 | ||
|
|
2110f6b037 | ||
|
|
1ef33e12a4 | ||
|
|
720985250b | ||
|
|
dc835a0918 | ||
|
|
264284e0de | ||
|
|
2482c4a025 | ||
|
|
0d0265f6f3 | ||
|
|
eba1baa63d | ||
|
|
4f33ef4e11 | ||
|
|
0d55c89950 | ||
|
|
ba7b323c3e | ||
|
|
e58ed21fbf | ||
|
|
026fb6e292 | ||
|
|
587b4b32f8 | ||
|
|
d28751ee7f | ||
|
|
7a335421e1 | ||
|
|
9f65fa31e2 | ||
|
|
dd295049a9 | ||
|
|
b807b6ebf3 | ||
|
|
e4ecfc2133 | ||
|
|
99a923fa0f | ||
|
|
40d48108b1 | ||
|
|
55ed548cc6 | ||
|
|
3d2daa08cc | ||
|
|
804f939ed0 | ||
|
|
d465ed93f6 | ||
|
|
c90d14c979 | ||
|
|
afab071f51 | ||
|
|
d680a3949b | ||
|
|
faf6bc933e | ||
|
|
6fcfe379bc | ||
|
|
e089cb3d4d | ||
|
|
2555d5cb12 | ||
|
|
c1779ff8b7 | ||
|
|
1437d00487 | ||
|
|
1a9f3b22f4 | ||
|
|
567bc462f0 | ||
|
|
a9b1a27c9a | ||
|
|
09a3d068d9 | ||
|
|
70e83a5871 | ||
|
|
e849809f4b | ||
|
|
bc38841c11 | ||
|
|
f40318c6c7 | ||
|
|
4122499997 | ||
|
|
ef5acffcbb | ||
|
|
1f79b5045d | ||
|
|
74c0234f72 | ||
|
|
330f3edf11 | ||
|
|
63ea519cbe | ||
|
|
c71ee4f78a | ||
|
|
5c74467a5d | ||
|
|
8150abfb67 | ||
|
|
4bedb8372b | ||
|
|
170055603e | ||
|
|
634ce1e425 | ||
|
|
b74bdaceee | ||
|
|
b780287e3b | ||
|
|
8a64c4c404 | ||
|
|
c6c42d0b5c | ||
|
|
9ead485ffd | ||
|
|
6abe369115 | ||
|
|
be96274ea3 | ||
|
|
f28c47e4af | ||
|
|
a9f7ea3566 | ||
|
|
f0e1a1f35f | ||
|
|
c77b2fcd5d | ||
|
|
8cc8a1108d | ||
|
|
541b665b9f | ||
|
|
ee65ea9fb7 | ||
|
|
7add372f37 | ||
|
|
263d186a5f | ||
|
|
b7ac980fbc | ||
|
|
80f3916f0f | ||
|
|
7faa087670 | ||
|
|
d0dab67955 | ||
|
|
004e93764f | ||
|
|
f41d1b09f8 | ||
|
|
83d9e81789 | ||
|
|
e202aa3b9f | ||
|
|
c23973dfa7 | ||
|
|
8495745471 | ||
|
|
323f128228 | ||
|
|
6ffc6a40a7 | ||
|
|
c7a270f83d | ||
|
|
93b2c4640b | ||
|
|
ecd894a4bd | ||
|
|
45d801d815 | ||
|
|
15b27b1749 | ||
|
|
26aa680b5b | ||
|
|
b7b042f45d | ||
|
|
8f44bee739 | ||
|
|
e785f453f4 | ||
|
|
533b82aae1 | ||
|
|
17c72d85e4 | ||
|
|
564e5c9429 | ||
|
|
fb6ba3f337 | ||
|
|
f04854bb4a | ||
|
|
d7ae1665f1 | ||
|
|
ba8afdbf81 | ||
|
|
feda32be06 | ||
|
|
df7ba090f3 | ||
|
|
c73141a9c2 | ||
|
|
98261d38cc | ||
|
|
b1bba5fdd3 | ||
|
|
bb29c6e55d | ||
|
|
aaf398617c | ||
|
|
782eaa3c7e | ||
|
|
d4b4fe85af | ||
|
|
40093b34eb | ||
|
|
78a3ecb7ff | ||
|
|
135987b339 | ||
|
|
e67e9cca57 | ||
|
|
ea21b8b700 | ||
|
|
600c7c8828 | ||
|
|
8abcc07d1f | ||
|
|
bad8adfd59 | ||
|
|
f207ce15df | ||
|
|
2882b6f68b | ||
|
|
f434d21f4a | ||
|
|
d3b4acf7a0 | ||
|
|
35013d90a3 | ||
|
|
57bbfd423a | ||
|
|
3d591c57c0 | ||
|
|
9d694e40ed | ||
|
|
1cdca25776 | ||
|
|
0e61201243 | ||
|
|
7f0f299e66 | ||
|
|
fc11d2ae72 | ||
|
|
7f8fe9569c | ||
|
|
bf8a6ef490 | ||
|
|
f8ee9b334c | ||
|
|
47ddba60d7 | ||
|
|
2fa1e2b23a | ||
|
|
f0869bb354 | ||
|
|
89c3bebad4 | ||
|
|
600986cf49 | ||
|
|
4379d26f65 | ||
|
|
30ecc4ce72 | ||
|
|
9bf48e77f0 | ||
|
|
b7f1aa5292 | ||
|
|
d65f8a5fc6 | ||
|
|
34d964bf38 | ||
|
|
48b4693eae | ||
|
|
9ecfa6af81 | ||
|
|
43f2476aac | ||
|
|
476f83c602 | ||
|
|
ab93a6bd5b | ||
|
|
79dd7a350e | ||
|
|
adc8351458 | ||
|
|
681439b85a | ||
|
|
8baaeb2fa3 | ||
|
|
eb51374615 | ||
|
|
00484dc8a7 | ||
|
|
539c4d8cd0 | ||
|
|
f5b7b148f1 | ||
|
|
b62d63b767 | ||
|
|
de36cb7904 | ||
|
|
07245bf43a | ||
|
|
2b6feac67c | ||
|
|
6cabcc8206 | ||
|
|
6bf448ddc8 | ||
|
|
868d52dcd1 | ||
|
|
22c5a4befc | ||
|
|
d8b2b9e77f | ||
|
|
7b27e4e66a | ||
|
|
4175c60a21 | ||
|
|
592b1660a0 | ||
|
|
5740323822 | ||
|
|
4824325fe4 | ||
|
|
c570105f36 | ||
|
|
841d75b3a3 | ||
|
|
2d07e3a6dc | ||
|
|
d0aa950fa6 | ||
|
|
7bbe4eae46 | ||
|
|
2f694bee0a | ||
|
|
293f5631bb | ||
|
|
7391b59f54 | ||
|
|
e5155c2a54 | ||
|
|
2c0cea641a | ||
|
|
b4f6edecbc | ||
|
|
ab3bb3d414 | ||
|
|
e1b6713f8f | ||
|
|
1efaf83ef4 | ||
|
|
8fe15a26d5 | ||
|
|
11a92e3dbd | ||
|
|
42aaf57419 | ||
|
|
05a4a77b52 | ||
|
|
9583c45947 | ||
|
|
4d0d0f5d10 | ||
|
|
5e65545320 | ||
|
|
0b0d474cab | ||
|
|
78784a60c0 | ||
|
|
3ab9a2fd71 | ||
|
|
6e187ee0af | ||
|
|
94c27b976d | ||
|
|
8da85639a2 | ||
|
|
621acc4d75 | ||
|
|
99f06b7c56 | ||
|
|
fe3d0955af | ||
|
|
c56f226e51 | ||
|
|
9036037a36 | ||
|
|
fbf4b7f705 | ||
|
|
89b27577d6 | ||
|
|
498e41d93e | ||
|
|
340195ba79 | ||
|
|
db5e8805ed | ||
|
|
9b5be39b0f | ||
|
|
9cd791b6ca | ||
|
|
4331ae783a | ||
|
|
e0286e24c8 | ||
|
|
441b0f1ea0 | ||
|
|
be39a0d2aa | ||
|
|
82052a0fc4 | ||
|
|
41be2013fd | ||
|
|
1ae8347bf4 | ||
|
|
0402fdeba1 | ||
|
|
2a968da432 | ||
|
|
a442a6b65c | ||
|
|
1d9a155fd7 | ||
|
|
02cacc6e05 | ||
|
|
f8268a2848 | ||
|
|
2e8cc61af7 | ||
|
|
9d3d9d16f0 | ||
|
|
83aeb7bbb2 | ||
|
|
614aed7409 | ||
|
|
477e19f71c | ||
|
|
7c33c24fe4 | ||
|
|
d9876ffd53 | ||
|
|
f57acc21d6 | ||
|
|
48960337b8 | ||
|
|
28a8e77021 | ||
|
|
66992aca7a | ||
|
|
f01a40afe2 | ||
|
|
03e31dd0ba | ||
|
|
42db3c8660 | ||
|
|
1f9d4c5c5c | ||
|
|
a2b9004203 | ||
|
|
1b07551997 | ||
|
|
02045155ae | ||
|
|
86a62e1de9 | ||
|
|
d709fdfd12 | ||
|
|
90f7b0d321 | ||
|
|
8c7ee8482e | ||
|
|
793de30863 | ||
|
|
ff4c82a32d | ||
|
|
172b7e45b8 | ||
|
|
6e2c0de765 | ||
|
|
7f3221aa09 | ||
|
|
e9df6d89af | ||
|
|
29cfa2d472 | ||
|
|
ce5cd8bf75 | ||
|
|
5a43831006 | ||
|
|
5bf1070143 | ||
|
|
a25673efb2 | ||
|
|
04ead4c281 | ||
|
|
bfa65bfd0d | ||
|
|
1ae6dad0d8 | ||
|
|
cd33e77e0f | ||
|
|
5171308939 | ||
|
|
bcf2ef7ddd | ||
|
|
3c67e1ba01 | ||
|
|
4400846c61 | ||
|
|
3207599b6d | ||
|
|
6077c3f70c | ||
|
|
3d37631967 | ||
|
|
8a12837745 | ||
|
|
b6ad8703ce | ||
|
|
8ab97bb37d | ||
|
|
83f4025a92 | ||
|
|
e3c2ee1ba9 | ||
|
|
85ba4189f8 | ||
|
|
93efb9e351 | ||
|
|
89dcaf4ae5 | ||
|
|
1dd7ece5d3 | ||
|
|
7a701fbc85 | ||
|
|
e8d43b03a1 | ||
|
|
8f8f7ddb45 | ||
|
|
deceda36ac | ||
|
|
33e0f11ef4 | ||
|
|
7c93afb8bd | ||
|
|
1a3a74c26c | ||
|
|
0582b4a25f | ||
|
|
57e59a47c1 | ||
|
|
23c0565a95 | ||
|
|
462d57e18f | ||
|
|
94c38470ac | ||
|
|
b05df37981 | ||
|
|
8b8c3d6204 | ||
|
|
22108bbeb4 | ||
|
|
8dda17e3a3 | ||
|
|
59e504ef27 | ||
|
|
b9ed6f722b | ||
|
|
f03448ca94 | ||
|
|
ba274216b9 | ||
|
|
e758c819d9 | ||
|
|
5fb2277aaa | ||
|
|
3b7e220519 | ||
|
|
e3d72eada7 | ||
|
|
20ca1c0dfa | ||
|
|
caaf3fbe69 | ||
|
|
113c22c1fc | ||
|
|
0de2c5092b | ||
|
|
1ea656f3d9 | ||
|
|
a2e4a3581b | ||
|
|
b68c026adb | ||
|
|
855f95e588 | ||
|
|
1276247696 | ||
|
|
cb1765de81 | ||
|
|
7385079984 | ||
|
|
868737e022 | ||
|
|
80997d5860 | ||
|
|
2b0d48f84d | ||
|
|
dec0bc482c |
@@ -1,4 +0,0 @@
|
||||
[codespell]
|
||||
skip = build,*.yuv,components/fatfs/src/*,alice.txt,*.rgb,components/wpa_supplicant/*,components/esp_wifi/*,*.pem,*/COPYING*,docs/sphinx-known-warnings.txt
|
||||
ignore-words-list = ser,dout,rsource,fram,inout,shs,ans,aci,unstall,unstalling,hart,wheight,wel,ot,fane,assertIn,registr,oen,parms
|
||||
write-changes = true
|
||||
165
.flake8
Normal file
165
.flake8
Normal file
@@ -0,0 +1,165 @@
|
||||
[flake8]
|
||||
|
||||
select =
|
||||
# Full lists are given in order to suppress all errors from other plugins
|
||||
# Full list of pyflakes error codes:
|
||||
F401, # module imported but unused
|
||||
F402, # import module from line N shadowed by loop variable
|
||||
F403, # 'from module import *' used; unable to detect undefined names
|
||||
F404, # future import(s) name after other statements
|
||||
F405, # name may be undefined, or defined from star imports: module
|
||||
F406, # 'from module import *' only allowed at module level
|
||||
F407, # an undefined __future__ feature name was imported
|
||||
F601, # dictionary key name repeated with different values
|
||||
F602, # dictionary key variable name repeated with different values
|
||||
F621, # too many expressions in an assignment with star-unpacking
|
||||
F622, # two or more starred expressions in an assignment (a, *b, *c = d)
|
||||
F631, # assertion test is a tuple, which are always True
|
||||
F701, # a break statement outside of a while or for loop
|
||||
F702, # a continue statement outside of a while or for loop
|
||||
F703, # a continue statement in a finally block in a loop
|
||||
F704, # a yield or yield from statement outside of a function
|
||||
F705, # a return statement with arguments inside a generator
|
||||
F706, # a return statement outside of a function/method
|
||||
F707, # an except: block as not the last exception handler
|
||||
F721, F722, # doctest syntax error syntax error in forward type annotation
|
||||
F811, # redefinition of unused name from line N
|
||||
F812, # list comprehension redefines name from line N
|
||||
F821, # undefined name name
|
||||
F822, # undefined name name in __all__
|
||||
F823, # local variable name referenced before assignment
|
||||
F831, # duplicate argument name in function definition
|
||||
F841, # local variable name is assigned to but never used
|
||||
F901, # raise NotImplemented should be raise NotImplementedError
|
||||
|
||||
# Full list of pycodestyle violations:
|
||||
E101, # indentation contains mixed spaces and tabs
|
||||
E111, # indentation is not a multiple of four
|
||||
E112, # expected an indented block
|
||||
E113, # unexpected indentation
|
||||
E114, # indentation is not a multiple of four (comment)
|
||||
E115, # expected an indented block (comment)
|
||||
E116, # unexpected indentation (comment)
|
||||
E121, # continuation line under-indented for hanging indent
|
||||
E122, # continuation line missing indentation or outdented
|
||||
E123, # closing bracket does not match indentation of opening bracket's line
|
||||
E124, # closing bracket does not match visual indentation
|
||||
E125, # continuation line with same indent as next logical line
|
||||
E126, # continuation line over-indented for hanging indent
|
||||
E127, # continuation line over-indented for visual indent
|
||||
E128, # continuation line under-indented for visual indent
|
||||
E129, # visually indented line with same indent as next logical line
|
||||
E131, # continuation line unaligned for hanging indent
|
||||
E133, # closing bracket is missing indentation
|
||||
E201, # whitespace after '('
|
||||
E202, # whitespace before ')'
|
||||
E203, # whitespace before ':'
|
||||
E211, # whitespace before '('
|
||||
E221, # multiple spaces before operator
|
||||
E222, # multiple spaces after operator
|
||||
E223, # tab before operator
|
||||
E224, # tab after operator
|
||||
E225, # missing whitespace around operator
|
||||
E226, # missing whitespace around arithmetic operator
|
||||
E227, # missing whitespace around bitwise or shift operator
|
||||
E228, # missing whitespace around modulo operator
|
||||
E231, # missing whitespace after ',', ';', or ':'
|
||||
E241, # multiple spaces after ','
|
||||
E242, # tab after ','
|
||||
E251, # unexpected spaces around keyword / parameter equals
|
||||
E261, # at least two spaces before inline comment
|
||||
E262, # inline comment should start with '# '
|
||||
E265, # block comment should start with '# '
|
||||
E266, # too many leading '#' for block comment
|
||||
E271, # multiple spaces after keyword
|
||||
E272, # multiple spaces before keyword
|
||||
E273, # tab after keyword
|
||||
E274, # tab before keyword
|
||||
E275, # missing whitespace after keyword
|
||||
E301, # expected 1 blank line, found 0
|
||||
E302, # expected 2 blank lines, found 0
|
||||
E303, # too many blank lines
|
||||
E304, # blank lines found after function decorator
|
||||
E305, # expected 2 blank lines after end of function or class
|
||||
E306, # expected 1 blank line before a nested definition
|
||||
E401, # multiple imports on one line
|
||||
E402, # module level import not at top of file
|
||||
E501, # line too long (82 > 79 characters)
|
||||
E502, # the backslash is redundant between brackets
|
||||
E701, # multiple statements on one line (colon)
|
||||
E702, # multiple statements on one line (semicolon)
|
||||
E703, # statement ends with a semicolon
|
||||
E704, # multiple statements on one line (def)
|
||||
E711, # comparison to None should be 'if cond is None:'
|
||||
E712, # comparison to True should be 'if cond is True:' or 'if cond:'
|
||||
E713, # test for membership should be 'not in'
|
||||
E714, # test for object identity should be 'is not'
|
||||
E721, # do not compare types, use 'isinstance()'
|
||||
E722, # do not use bare except, specify exception instead
|
||||
E731, # do not assign a lambda expression, use a def
|
||||
E741, # do not use variables named 'l', 'O', or 'I'
|
||||
E742, # do not define classes named 'l', 'O', or 'I'
|
||||
E743, # do not define functions named 'l', 'O', or 'I'
|
||||
E901, # SyntaxError or IndentationError
|
||||
E902, # IOError
|
||||
W191, # indentation contains tabs
|
||||
W291, # trailing whitespace
|
||||
W292, # no newline at end of file
|
||||
W293, # blank line contains whitespace
|
||||
W391, # blank line at end of file
|
||||
W503, # line break before binary operator
|
||||
W504, # line break after binary operator
|
||||
W505, # doc line too long (82 > 79 characters)
|
||||
W601, # .has_key() is deprecated, use 'in'
|
||||
W602, # deprecated form of raising exception
|
||||
W603, # '<>' is deprecated, use '!='
|
||||
W604, # backticks are deprecated, use 'repr()'
|
||||
W605, # invalid escape sequence 'x'
|
||||
W606, # 'async' and 'await' are reserved keywords starting with Python 3.7
|
||||
|
||||
# Full list of flake8 violations
|
||||
E999, # failed to compile a file into an Abstract Syntax Tree for the plugins that require it
|
||||
|
||||
# Full list of mccabe violations
|
||||
C901 # complexity value provided by the user
|
||||
|
||||
ignore =
|
||||
E221, # multiple spaces before operator
|
||||
E231, # missing whitespace after ',', ';', or ':'
|
||||
E241, # multiple spaces after ','
|
||||
W503, # line break before binary operator
|
||||
W504 # line break after binary operator
|
||||
|
||||
max-line-length = 160
|
||||
|
||||
show_source = True
|
||||
|
||||
statistics = True
|
||||
|
||||
exclude =
|
||||
.git,
|
||||
__pycache__,
|
||||
# submodules
|
||||
components/bootloader/subproject/components/micro-ecc/micro-ecc,
|
||||
components/bt/host/nimble/nimble,
|
||||
components/cmock/CMock,
|
||||
components/json/cJSON,
|
||||
components/mbedtls/mbedtls,
|
||||
components/openthread/openthread,
|
||||
components/unity/unity,
|
||||
components/spiffs/spiffs,
|
||||
# autogenerated scripts
|
||||
components/protocomm/python/constants_pb2.py,
|
||||
components/protocomm/python/sec0_pb2.py,
|
||||
components/protocomm/python/sec1_pb2.py,
|
||||
components/protocomm/python/sec2_pb2.py,
|
||||
components/protocomm/python/session_pb2.py,
|
||||
components/wifi_provisioning/python/wifi_ctrl_pb2.py,
|
||||
components/wifi_provisioning/python/wifi_scan_pb2.py,
|
||||
components/wifi_provisioning/python/wifi_config_pb2.py,
|
||||
components/wifi_provisioning/python/wifi_constants_pb2.py,
|
||||
components/esp_local_ctrl/python/esp_local_ctrl_pb2.py,
|
||||
|
||||
per-file-ignores =
|
||||
# Sphinx conf.py files use star imports to setup config variables
|
||||
docs/conf_common.py: F405
|
||||
20
.github/ISSUE_TEMPLATE/01_build_install_bug.yml
vendored
20
.github/ISSUE_TEMPLATE/01_build_install_bug.yml
vendored
@@ -95,26 +95,6 @@ body:
|
||||
render: plain
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
id: diag
|
||||
attributes:
|
||||
label: Diagnostic report archive.
|
||||
description: |
|
||||
Diagnostic report for ESP-IDF created using [idf.py diag](https://docs.espressif.com/projects/esp-idf/en/latest/esp32/api-guides/tools/idf-diag.html) or [esp-idf-diag](https://github.com/espressif/esp-idf-diag). The `idf.py diag` command is available beginning with ESP-IDF version 5.5. For older versions, you may want to consider using the `esp-idf-diag` command.
|
||||
|
||||
In your project directory, execute the following command:
|
||||
|
||||
Using `idf.py diag`
|
||||
1. idf.py diag
|
||||
|
||||
Using `esp-idf-diag`
|
||||
1. pip install esp-idf-diag
|
||||
2. esp-idf-diag create
|
||||
|
||||
Once the report is generated, the tool will guide you with the next steps.
|
||||
placeholder: Please attach the diagnostic report zip file here.
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
id: more-info
|
||||
attributes:
|
||||
|
||||
22
.github/ISSUE_TEMPLATE/02_runtime_bug.yml
vendored
22
.github/ISSUE_TEMPLATE/02_runtime_bug.yml
vendored
@@ -26,7 +26,7 @@ body:
|
||||
id: chip_revision
|
||||
attributes:
|
||||
label: Espressif SoC revision.
|
||||
description: On which Espressif SoC revision does your application run on? Run `esptool chip-id` (or `esptool.py chip_id` for ESP-IDF v5.5 and older) to find it.
|
||||
description: On which Espressif SoC revision does your application run on? Run `esptool chip_id` to find it.
|
||||
placeholder: ex. ESP32-C3 (QFN32) (revision v0.3)
|
||||
validations:
|
||||
required: true
|
||||
@@ -123,26 +123,6 @@ body:
|
||||
render: plain
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
id: diag
|
||||
attributes:
|
||||
label: Diagnostic report archive.
|
||||
description: |
|
||||
Diagnostic report for ESP-IDF created using [idf.py diag](https://docs.espressif.com/projects/esp-idf/en/latest/esp32/api-guides/tools/idf-diag.html) or [esp-idf-diag](https://github.com/espressif/esp-idf-diag). The `idf.py diag` command is available beginning with ESP-IDF version 5.5. For older versions, you may want to consider using the `esp-idf-diag` command.
|
||||
|
||||
In your project directory, execute the following command:
|
||||
|
||||
Using `idf.py diag`
|
||||
1. idf.py diag
|
||||
|
||||
Using `esp-idf-diag`
|
||||
1. pip install esp-idf-diag
|
||||
2. esp-idf-diag create
|
||||
|
||||
Once the report is generated, the tool will guide you with the next steps.
|
||||
placeholder: Please attach the diagnostic report zip file here.
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
id: more-info
|
||||
attributes:
|
||||
|
||||
5
.github/dangerjs/.gitignore
vendored
Normal file
5
.github/dangerjs/.gitignore
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
# Transpiled JavaScript (if any)
|
||||
dist
|
||||
|
||||
# Installed dependencies
|
||||
node_modules
|
||||
47
.github/dangerjs/README.md
vendored
Normal file
47
.github/dangerjs/README.md
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
# DangerJS pull request automatic review tool - GitHub
|
||||
|
||||
## Implementation
|
||||
The main development is done in Espressif Gitlab project.
|
||||
Espressif [GitHub project espressif/esp-idf](https://github.com/espressif/esp-idf) is only a public mirror.
|
||||
|
||||
Therefore, all changes and updates to DangerJS files (`.github/dangerjs`) must be made via MR in the **Gitlab** repository by Espressif engineer.
|
||||
|
||||
When adding a new Danger rule or updating existing one, might be a good idea to test it on the developer's fork of GitHub project. This way, the new feature can be tested using a GitHub action without concern of damaging Espressif's GitHub repository.
|
||||
|
||||
Danger for Espressif GitHub is implemented in TypeScript. This makes the code more readable and robust than plain JavaScript.
|
||||
Compilation to JavaScript code (using `tsc`) is not necessary; Danger handles TypeScript natively.
|
||||
|
||||
A good practice is to store each Danger rule in a separate module, and then import these modules into the main Danger file `.github/dangerjs/dangerfile.ts` (see how this is done for currently present modules when adding a new one).
|
||||
|
||||
If the Danger module (new check/rule) uses an external NPM module (e.g. `axios`), be sure to add this dependency to `.github/dangerjs/package.json` and also update `.github/dangerjs/package-lock.json`.
|
||||
|
||||
In the GitHub action, `danger` is not installed globally (nor are its dependencies) and the `npx` call is used to start the `danger` checks in CI.
|
||||
|
||||
|
||||
## Adding new Danger rule
|
||||
For local development you can use following strategy
|
||||
|
||||
#### Install dependencies
|
||||
```sh
|
||||
cd .github/dangerjs
|
||||
npm install
|
||||
```
|
||||
(If the IDE still shows compiler/typing errors, reload the IDE window.)
|
||||
|
||||
#### Add new code as needed or make updates
|
||||
|
||||
#### Test locally
|
||||
Danger rules can be tested locally (without running the GitHub action pipeline).
|
||||
To do this, you have to first export the ENV variables used by Danger in the local terminal:
|
||||
|
||||
```sh
|
||||
export GITHUB_TOKEN='**************************************'
|
||||
```
|
||||
|
||||
Then you can call Danger by:
|
||||
```sh
|
||||
cd .github/dangerjs
|
||||
|
||||
danger pr https://github.com/espressif/esp-idf/pull/<number_of_pull_request>
|
||||
```
|
||||
The result will be displayed in your terminal.
|
||||
48
.github/dangerjs/dangerfile.ts
vendored
Normal file
48
.github/dangerjs/dangerfile.ts
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
import { DangerResults } from "danger";
|
||||
declare const results: DangerResults;
|
||||
declare const message: (message: string, results?: DangerResults) => void;
|
||||
declare const markdown: (message: string, results?: DangerResults) => void;
|
||||
|
||||
// Import modules with danger rules
|
||||
// (Modules with checks are stored in ".github/dangerjs/<module_name>.ts". To import them, use path relative to "dangerfile.ts")
|
||||
import prCommitsTooManyCommits from "./prCommitsTooManyCommits";
|
||||
import prDescription from "./prDescription";
|
||||
import prTargetBranch from "./prTargetBranch";
|
||||
import prInfoContributor from "./prInfoContributor";
|
||||
import prCommitMessage from "./prCommitMessage";
|
||||
|
||||
async function runDangerRules(): Promise<void> {
|
||||
// Message to contributor about review and merge process
|
||||
const prInfoContributorMessage: string = await prInfoContributor();
|
||||
markdown(prInfoContributorMessage);
|
||||
|
||||
// Run danger checks
|
||||
prCommitsTooManyCommits();
|
||||
prDescription();
|
||||
prTargetBranch();
|
||||
prCommitMessage();
|
||||
|
||||
// Add success log if no issues
|
||||
const dangerFails: number = results.fails.length;
|
||||
const dangerWarns: number = results.warnings.length;
|
||||
const dangerInfos: number = results.messages.length;
|
||||
if (!dangerFails && !dangerWarns && !dangerInfos) {
|
||||
return message("Good Job! All checks are passing!");
|
||||
}
|
||||
|
||||
// Add retry link
|
||||
addRetryLink();
|
||||
}
|
||||
|
||||
runDangerRules();
|
||||
|
||||
function addRetryLink(): void {
|
||||
const serverUrl: string | undefined = process.env.GITHUB_SERVER_URL;
|
||||
const repoName: string | undefined = process.env.GITHUB_REPOSITORY;
|
||||
const runId: string | undefined = process.env.GITHUB_RUN_ID;
|
||||
|
||||
const retryLinkUrl: string = `${serverUrl}/${repoName}/actions/runs/${runId}`;
|
||||
const retryLink: string = `<sub>:repeat: You can re-run automatic PR checks by retrying the <a href="${retryLinkUrl}">DangerJS action</a></sub>`;
|
||||
|
||||
markdown(retryLink);
|
||||
}
|
||||
1999
.github/dangerjs/package-lock.json
generated
vendored
Normal file
1999
.github/dangerjs/package-lock.json
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
18
.github/dangerjs/package.json
vendored
Normal file
18
.github/dangerjs/package.json
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"name": "dangerjs-github",
|
||||
"description": "GitHub PR reviewing with DangerJS",
|
||||
"main": "dangerfile.ts",
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"axios": "^1.3.3",
|
||||
"danger": "^11.2.3",
|
||||
"request": "^2.88.2",
|
||||
"sync-request": "^6.1.0",
|
||||
"typescript": "^5.0.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^18.15.11"
|
||||
}
|
||||
}
|
||||
67
.github/dangerjs/prCommitMessage.ts
vendored
Normal file
67
.github/dangerjs/prCommitMessage.ts
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
import { DangerDSLType, DangerResults } from "danger";
|
||||
declare const danger: DangerDSLType;
|
||||
declare const warn: (message: string, results?: DangerResults) => void;
|
||||
|
||||
interface Commit {
|
||||
message: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if commit messages are sufficiently descriptive (not too short).
|
||||
*
|
||||
* Search for commit messages that appear to be automatically generated or temporary messages and report them.
|
||||
*
|
||||
* @dangerjs WARN
|
||||
*/
|
||||
export default function (): void {
|
||||
const prCommits: Commit[] = danger.git.commits;
|
||||
|
||||
const detectRegexes: RegExp[] = [
|
||||
/^Merge pull request #\d+ from .*/i, // Automatically generated message by GitHub
|
||||
/^Merged .+:.+ into .+/i, // Automatically generated message by GitHub
|
||||
/^Automatic merge by GitHub Action/i, // Automatically generated message by GitHub
|
||||
/^Merge branch '.*' of .+ into .+/i, // Automatically generated message by GitHub
|
||||
/^Create\s[a-zA-Z0-9_.-]+(\.[a-zA-Z0-9]{1,4})?(?=\s|$)/, // Automatically generated message by GitHub using UI
|
||||
/^Delete\s[a-zA-Z0-9_.-]+(\.[a-zA-Z0-9]{1,4})?(?=\s|$)/, // Automatically generated message by GitHub using UI
|
||||
/^Update\s[a-zA-Z0-9_.-]+(\.[a-zA-Z0-9]{1,4})?(?=\s|$)/, // Automatically generated message by GitHub using UI
|
||||
/^Initial commit/i, // Automatically generated message by GitHub
|
||||
/^WIP.*/i, // Message starts with prefix "WIP"
|
||||
/^Cleaned.*/i, // Message starts "Cleaned", , probably temporary
|
||||
/^Test:.*/i, // Message starts with "test" prefix, probably temporary
|
||||
/clean ?up/i, // Message contains "clean up", probably temporary
|
||||
/^[^A-Za-z0-9\s].*/, // Message starts with special characters
|
||||
];
|
||||
|
||||
let partMessages: string[] = [];
|
||||
|
||||
for (const commit of prCommits) {
|
||||
const commitMessage: string = commit.message;
|
||||
const commitMessageTitle: string = commit.message.split("\n")[0];
|
||||
|
||||
// Check if the commit message matches any regex from "detectRegexes"
|
||||
if (detectRegexes.some((regex) => commitMessage.match(regex))) {
|
||||
partMessages.push(
|
||||
`- the commit message \`${commitMessageTitle}\` appears to be a temporary or automatically generated message`
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if the commit message is not too short
|
||||
const shortCommitMessageThreshold: number = 20; // commit message is considered too short below this number of characters
|
||||
if (commitMessage.length < shortCommitMessageThreshold) {
|
||||
partMessages.push(
|
||||
`- the commit message \`${commitMessageTitle}\` may not be sufficiently descriptive`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Create report
|
||||
if (partMessages.length) {
|
||||
partMessages.sort();
|
||||
let dangerMessage = `\nSome issues found for the commit messages in this MR:\n${partMessages.join(
|
||||
"\n"
|
||||
)}
|
||||
\nPlease consider updating these commit messages.`;
|
||||
warn(dangerMessage);
|
||||
}
|
||||
}
|
||||
19
.github/dangerjs/prCommitsTooManyCommits.ts
vendored
Normal file
19
.github/dangerjs/prCommitsTooManyCommits.ts
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
import { DangerDSLType, DangerResults } from "danger";
|
||||
declare const danger: DangerDSLType;
|
||||
declare const message: (message: string, results?: DangerResults) => void;
|
||||
|
||||
/**
|
||||
* Check if pull request has not an excessive numbers of commits (if squashed)
|
||||
*
|
||||
* @dangerjs INFO
|
||||
*/
|
||||
export default function (): void {
|
||||
const tooManyCommitThreshold: number = 2; // above this number of commits, squash commits is suggested
|
||||
const prCommits: number = danger.github.commits.length;
|
||||
|
||||
if (prCommits > tooManyCommitThreshold) {
|
||||
return message(
|
||||
`You might consider squashing your ${prCommits} commits (simplifying branch history).`
|
||||
);
|
||||
}
|
||||
}
|
||||
19
.github/dangerjs/prDescription.ts
vendored
Normal file
19
.github/dangerjs/prDescription.ts
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
import { DangerDSLType, DangerResults } from "danger";
|
||||
declare const danger: DangerDSLType;
|
||||
declare const warn: (message: string, results?: DangerResults) => void;
|
||||
|
||||
/**
|
||||
* Check if pull request has has a sufficiently accurate description
|
||||
*
|
||||
* @dangerjs WARN
|
||||
*/
|
||||
export default function (): void {
|
||||
const prDescription: string = danger.github.pr.body;
|
||||
const shortPrDescriptionThreshold: number = 100; // Description is considered too short below this number of characters
|
||||
|
||||
if (prDescription.length < shortPrDescriptionThreshold) {
|
||||
return warn(
|
||||
"The PR description looks very brief, please check if more details can be added."
|
||||
);
|
||||
}
|
||||
}
|
||||
58
.github/dangerjs/prInfoContributor.ts
vendored
Normal file
58
.github/dangerjs/prInfoContributor.ts
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
import { DangerDSLType } from "danger";
|
||||
declare const danger: DangerDSLType;
|
||||
|
||||
interface Contributor {
|
||||
login?: string;
|
||||
}
|
||||
|
||||
const authorLogin = danger.github.pr.user.login;
|
||||
const messageKnownContributor: string = `
|
||||
***
|
||||
👋 **Hi ${authorLogin}**, thank you for your another contribution to \`espressif/esp-idf\` project!
|
||||
|
||||
If the change is approved and passes the tests in our internal git repository, it will appear in this public Github repository on the next sync.
|
||||
***
|
||||
`;
|
||||
|
||||
const messageFirstContributor: string = `
|
||||
***
|
||||
👋 **Welcome ${authorLogin}**, thank you for your first contribution to \`espressif/esp-idf\` project!
|
||||
|
||||
📘 Please check [Contributions Guide](https://docs.espressif.com/projects/esp-idf/en/latest/esp32/contribute/index.html#contributions-guide) for the contribution checklist, information regarding code and documentation style, testing and other topics.
|
||||
|
||||
🖊️ Please also make sure you have **read and signed** the [Contributor License Agreement for espressif/esp-idf project](https://cla-assistant.io/espressif/esp-idf).
|
||||
|
||||
#### Pull request review and merge process you can expect
|
||||
Espressif develops the ESP-IDF project in an internal repository (Gitlab). We do welcome contributions in the form of bug reports, feature requests and pull requests via this public GitHub repository.
|
||||
|
||||
1. An internal issue has been created for the PR, we assign it to the relevant engineer
|
||||
2. They review the PR and either approve it or ask you for changes or clarifications
|
||||
3. Once the Github PR is approved, we synchronize it into our internal git repository
|
||||
4. In the internal git repository we do the final review, collect approvals from core owners and make sure all the automated tests are passing
|
||||
- At this point we may do some adjustments to the proposed change, or extend it by adding tests or documentation.
|
||||
5. If the change is approved and passes the tests it is merged into the \`master\` branch
|
||||
6. On next sync from the internal git repository merged change will appear in this public Github repository
|
||||
|
||||
***
|
||||
`;
|
||||
|
||||
/**
|
||||
* Check whether the author of the pull request is known or a first-time contributor, and add a message to the PR with information about the review and merge process.
|
||||
*/
|
||||
export default async function (): Promise<string> {
|
||||
const contributors = await danger.github.api.repos.listContributors({
|
||||
owner: danger.github.thisPR.owner,
|
||||
repo: danger.github.thisPR.repo,
|
||||
});
|
||||
|
||||
const contributorsData: Contributor[] = contributors.data;
|
||||
const knownContributors: (string | undefined)[] = contributorsData.map(
|
||||
(contributor: Contributor) => contributor.login
|
||||
);
|
||||
|
||||
if (knownContributors.includes(authorLogin)) {
|
||||
return messageKnownContributor;
|
||||
} else {
|
||||
return messageFirstContributor;
|
||||
}
|
||||
}
|
||||
19
.github/dangerjs/prTargetBranch.ts
vendored
Normal file
19
.github/dangerjs/prTargetBranch.ts
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
import { DangerDSLType, DangerResults } from "danger";
|
||||
declare const danger: DangerDSLType;
|
||||
declare const fail: (message: string, results?: DangerResults) => void;
|
||||
|
||||
/**
|
||||
* Check if the target branch is "master"
|
||||
*
|
||||
* @dangerjs FAIL
|
||||
*/
|
||||
export default function (): void {
|
||||
const prTargetBranch: string = danger.github?.pr?.base?.ref;
|
||||
|
||||
if (prTargetBranch !== "master") {
|
||||
return fail(`
|
||||
The target branch for this pull request should be \`master\`.\n
|
||||
If you would like to add this feature to the release branch, please state this in the PR description and we will consider backporting it.
|
||||
`);
|
||||
}
|
||||
}
|
||||
17
.github/dangerjs/tsconfig.json
vendored
Normal file
17
.github/dangerjs/tsconfig.json
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"module": "commonjs",
|
||||
"moduleResolution": "node",
|
||||
"esModuleInterop": true,
|
||||
"target": "es6",
|
||||
"noImplicitAny": true,
|
||||
"noUnusedParameters": true,
|
||||
"strictNullChecks": true,
|
||||
"sourceMap": true,
|
||||
"removeComments": true,
|
||||
"outDir": "./dist"
|
||||
},
|
||||
"include": [
|
||||
"./*.ts"
|
||||
]
|
||||
}
|
||||
15
.github/dependabot.yml
vendored
Normal file
15
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "all"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
ignore:
|
||||
- directory: ".gitlab/dangerjs"
|
||||
patterns:
|
||||
- "package-lock.json"
|
||||
- directory: ".github/dangerjs"
|
||||
patterns:
|
||||
- "package-lock.json"
|
||||
# Disable "version updates" (keep only "security updates")
|
||||
open-pull-requests-limit: 0
|
||||
25
.github/workflows/dangerjs.yml
vendored
25
.github/workflows/dangerjs.yml
vendored
@@ -9,19 +9,28 @@ permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
pull-request-style-linter:
|
||||
danger-check:
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: .github/dangerjs
|
||||
steps:
|
||||
- name: Check out PR head
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
|
||||
- name: DangerJS pull request linter
|
||||
uses: espressif/shared-github-dangerjs@v1
|
||||
- name: Setup NodeJS environment
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 18
|
||||
cache: npm
|
||||
cache-dependency-path: .github/dangerjs/package-lock.json
|
||||
|
||||
- name: Install DangerJS dependencies
|
||||
run: npm install
|
||||
|
||||
- name: Run DangerJS
|
||||
run: npx danger ci --failOnErrors -v
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
instructions-gitlab-mirror: 'true'
|
||||
instructions-contributions-file: 'CONTRIBUTING.md'
|
||||
instructions-cla-link: 'https://cla-assistant.io/espressif/esp-idf'
|
||||
|
||||
2
.github/workflows/docker.yml
vendored
2
.github/workflows/docker.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
# Disable the job in forks
|
||||
if: ${{ github.repository_owner == 'espressif' }}
|
||||
|
||||
runs-on: ubuntu-24.04-X64-large
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Depending on the branch/tag, set CLONE_BRANCH_OR_TAG variable (used in the Dockerfile
|
||||
# as a build arg) and TAG_NAME (used when tagging the image).
|
||||
|
||||
5
.github/workflows/issue_comment.yml
vendored
5
.github/workflows/issue_comment.yml
vendored
@@ -12,13 +12,12 @@ jobs:
|
||||
name: Sync Issue Comments to Jira
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
- name: Sync issue comments to JIRA
|
||||
uses: espressif/sync-jira-actions@v1
|
||||
uses: espressif/github-actions/sync_issues_to_jira@master
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
JIRA_PASS: ${{ secrets.JIRA_PASS }}
|
||||
JIRA_PROJECT: IDFGH
|
||||
JIRA_URL: ${{ secrets.JIRA_URL }}
|
||||
JIRA_USER: ${{ secrets.JIRA_USER }}
|
||||
WEBHOOK_URL: ${{ secrets.JIRA_ISSUE_COMMENT_WEBHOOK_URL }}
|
||||
|
||||
5
.github/workflows/new_issues.yml
vendored
5
.github/workflows/new_issues.yml
vendored
@@ -12,13 +12,12 @@ jobs:
|
||||
name: Sync issues to Jira
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
- name: Sync GitHub issues to Jira project
|
||||
uses: espressif/sync-jira-actions@v1
|
||||
uses: espressif/github-actions/sync_issues_to_jira@master
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
JIRA_PASS: ${{ secrets.JIRA_PASS }}
|
||||
JIRA_PROJECT: IDFGH
|
||||
JIRA_URL: ${{ secrets.JIRA_URL }}
|
||||
JIRA_USER: ${{ secrets.JIRA_USER }}
|
||||
WEBHOOK_URL: ${{ secrets.JIRA_ISSUE_COMMENT_WEBHOOK_URL }}
|
||||
|
||||
2
.github/workflows/new_prs.yml
vendored
2
.github/workflows/new_prs.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
name: Sync PRs to Jira
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
- name: Sync PRs to Jira project
|
||||
uses: espressif/github-actions/sync_issues_to_jira@master
|
||||
with:
|
||||
|
||||
2
.github/workflows/pr_approved.yml
vendored
2
.github/workflows/pr_approved.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
||||
(github.event.label.name == 'PR-Sync-Rebase') ||
|
||||
(github.event.label.name == 'PR-Sync-Update')
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
- name: Sync approved PRs to internal codebase
|
||||
uses: espressif/github-actions/github_pr_to_internal_pr@master
|
||||
env:
|
||||
|
||||
6
.github/workflows/pre_commit_check.yml
vendored
6
.github/workflows/pre_commit_check.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
SKIP: "cleanup-ignore-lists" # Comma-separated string of ignored pre-commit check IDs
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v2
|
||||
- name: Fetch head and base refs
|
||||
# This is necessary for pre-commit to check the changes in the PR branch
|
||||
run: |
|
||||
@@ -23,7 +23,7 @@ jobs:
|
||||
- name: Set up Python environment
|
||||
uses: actions/setup-python@master
|
||||
with:
|
||||
python-version: "3.10"
|
||||
python-version: v3.8
|
||||
- name: Install python packages
|
||||
run: |
|
||||
pip install pre-commit
|
||||
@@ -40,5 +40,3 @@ jobs:
|
||||
echo ""
|
||||
exit 1
|
||||
fi
|
||||
# Run pre-commit for PowerShell scripts check
|
||||
pre-commit run --hook-stage manual check-powershell-scripts --from-ref base_ref --to-ref pr_ref --show-diff-on-failure
|
||||
|
||||
11
.github/workflows/release_zips.yml
vendored
11
.github/workflows/release_zips.yml
vendored
@@ -8,11 +8,10 @@ on:
|
||||
jobs:
|
||||
release_zips:
|
||||
name: Create release zip file
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Create a recursive clone source zip
|
||||
uses: espressif/release-zips-action@v1
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
release_project_name: ESP-IDF
|
||||
git_extra_args: --shallow-since="1 year ago"
|
||||
uses: espressif/github-actions/release_zips@master
|
||||
env:
|
||||
RELEASE_PROJECT_NAME: ESP-IDF
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
35
.github/workflows/vulnerability_scan.yml
vendored
35
.github/workflows/vulnerability_scan.yml
vendored
@@ -1,35 +0,0 @@
|
||||
name: Vulnerability scan
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 0 * * *'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
vulnerability-scan:
|
||||
strategy:
|
||||
# We don't want to run all jobs in parallel, because this would
|
||||
# overload NVD and we would get 503
|
||||
max-parallel: 1
|
||||
matrix:
|
||||
# References/branches which should be scanned for vulnerabilities are
|
||||
# defined in the VULNERABILITY_SCAN_REFS variable as json list.
|
||||
# For example: ['master', 'release/v5.2', 'release/v5.1', 'release/v5.0', 'release/v4.4']
|
||||
ref: ${{ fromJSON(vars.VULNERABILITY_SCAN_REFS) }}
|
||||
name: Vulnerability scan
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
ref: ${{ matrix.ref }}
|
||||
|
||||
- name: Vulnerability scan
|
||||
env:
|
||||
SBOM_CHECK_LOCAL_DB: ${{ vars.SBOM_CHECK_LOCAL_DB }}
|
||||
SBOM_MATTERMOST_WEBHOOK: ${{ secrets.SBOM_MATTERMOST_WEBHOOK }}
|
||||
NVDAPIKEY: ${{ secrets.NVDAPIKEY }}
|
||||
uses: espressif/esp-idf-sbom-action@master
|
||||
with:
|
||||
ref: ${{ matrix.ref }}
|
||||
47
.gitignore
vendored
47
.gitignore
vendored
@@ -24,6 +24,18 @@ GPATH
|
||||
# cache dir
|
||||
.cache/
|
||||
|
||||
# Components Unit Test Apps files
|
||||
components/**/build/
|
||||
components/**/build_*_*/
|
||||
components/**/sdkconfig
|
||||
components/**/sdkconfig.old
|
||||
|
||||
# Example project files
|
||||
examples/**/build/
|
||||
examples/**/build_esp*_*/
|
||||
examples/**/sdkconfig
|
||||
examples/**/sdkconfig.old
|
||||
|
||||
# Doc build artifacts
|
||||
docs/_build/
|
||||
docs/doxygen_sqlite3.db
|
||||
@@ -32,24 +44,16 @@ docs/doxygen_sqlite3.db
|
||||
docs/_static/DejaVuSans.ttf
|
||||
docs/_static/NotoSansSC-Regular.otf
|
||||
|
||||
# Components Unit Test Apps files
|
||||
components/**/build/
|
||||
components/**/build_*_*/
|
||||
components/**/sdkconfig
|
||||
components/**/sdkconfig.old
|
||||
components/**/test_apps/wifi_nvs_config/nvs_data_suffix.csv
|
||||
|
||||
# Example project files
|
||||
examples/**/build/
|
||||
examples/**/build_*_*/
|
||||
examples/**/sdkconfig
|
||||
examples/**/sdkconfig.old
|
||||
|
||||
# Unit test app files
|
||||
tools/unit-test-app/build
|
||||
tools/unit-test-app/build_*_*/
|
||||
tools/unit-test-app/sdkconfig
|
||||
tools/unit-test-app/sdkconfig.old
|
||||
tools/unit-test-app/build
|
||||
tools/unit-test-app/build_*_*/
|
||||
tools/unit-test-app/output
|
||||
tools/unit-test-app/test_configs
|
||||
|
||||
# Unit Test CMake compile log folder
|
||||
log_ut_cmake
|
||||
|
||||
# test application build files
|
||||
tools/test_apps/**/build/
|
||||
@@ -57,8 +61,7 @@ tools/test_apps/**/build_*_*/
|
||||
tools/test_apps/**/sdkconfig
|
||||
tools/test_apps/**/sdkconfig.old
|
||||
|
||||
TEST_LOGS/
|
||||
build_summary_*.xml
|
||||
TEST_LOGS
|
||||
|
||||
# gcov coverage reports
|
||||
*.gcda
|
||||
@@ -66,6 +69,8 @@ build_summary_*.xml
|
||||
coverage.info
|
||||
coverage_report/
|
||||
|
||||
test_multi_heap_host
|
||||
|
||||
# VS Code Settings
|
||||
.vscode/
|
||||
|
||||
@@ -95,13 +100,9 @@ dependencies.lock
|
||||
managed_components
|
||||
|
||||
# pytest log
|
||||
pytest-embedded/
|
||||
# legacy one
|
||||
pytest_embedded_log/
|
||||
app_info_*.txt
|
||||
size_info*.txt
|
||||
XUNIT_RESULT*.xml
|
||||
.manifest_sha
|
||||
list_job_*.txt
|
||||
size_info.txt
|
||||
|
||||
# clang config (for LSP)
|
||||
.clangd
|
||||
|
||||
@@ -3,30 +3,29 @@ workflow:
|
||||
# Disable those non-protected push triggered pipelines
|
||||
- if: '$CI_COMMIT_REF_NAME != "master" && $CI_COMMIT_BRANCH !~ /^release\/v/ && $CI_COMMIT_TAG !~ /^v\d+\.\d+(\.\d+)?($|-)/ && $CI_COMMIT_TAG !~ /^qa-test/ && $CI_PIPELINE_SOURCE == "push"'
|
||||
when: never
|
||||
# merged result pipelines
|
||||
- if: $CI_MERGE_REQUEST_SOURCE_BRANCH_SHA
|
||||
# when running merged result pipelines, CI_COMMIT_SHA represents the temp commit it created.
|
||||
# Please use PIPELINE_COMMIT_SHA at all places that require a commit sha of the original commit.
|
||||
- if: $CI_OPEN_MERGE_REQUESTS != null
|
||||
variables:
|
||||
PIPELINE_COMMIT_SHA: $CI_MERGE_REQUEST_SOURCE_BRANCH_SHA
|
||||
# else
|
||||
- if: $CI_MERGE_REQUEST_SOURCE_BRANCH_SHA == null || $CI_MERGE_REQUEST_SOURCE_BRANCH_SHA == ""
|
||||
IS_MR_PIPELINE: 1
|
||||
- if: $CI_OPEN_MERGE_REQUESTS == null
|
||||
variables:
|
||||
PIPELINE_COMMIT_SHA: $CI_COMMIT_SHA
|
||||
IS_MR_PIPELINE: 0
|
||||
- when: always
|
||||
|
||||
# Place the default settings in `.gitlab/ci/common.yml` instead
|
||||
|
||||
include:
|
||||
- ".gitlab/ci/danger.yml"
|
||||
- ".gitlab/ci/common.yml"
|
||||
- ".gitlab/ci/rules.yml"
|
||||
- ".gitlab/ci/upload_cache.yml"
|
||||
- ".gitlab/ci/docs.yml"
|
||||
- ".gitlab/ci/static-code-analysis.yml"
|
||||
- ".gitlab/ci/pre_commit.yml"
|
||||
- ".gitlab/ci/pre_check.yml"
|
||||
- ".gitlab/ci/build.yml"
|
||||
- ".gitlab/ci/integration_test.yml"
|
||||
- ".gitlab/ci/host-test.yml"
|
||||
- ".gitlab/ci/deploy.yml"
|
||||
- ".gitlab/ci/post_deploy.yml"
|
||||
- ".gitlab/ci/test-win.yml"
|
||||
- '.gitlab/ci/common.yml'
|
||||
- '.gitlab/ci/rules.yml'
|
||||
- '.gitlab/ci/upload_cache.yml'
|
||||
- '.gitlab/ci/docs.yml'
|
||||
- '.gitlab/ci/static-code-analysis.yml'
|
||||
- '.gitlab/ci/pre_check.yml'
|
||||
- '.gitlab/ci/build.yml'
|
||||
- '.gitlab/ci/integration_test.yml'
|
||||
- '.gitlab/ci/host-test.yml'
|
||||
- '.gitlab/ci/target-test.yml'
|
||||
- '.gitlab/ci/deploy.yml'
|
||||
|
||||
@@ -2,252 +2,5 @@
|
||||
#
|
||||
# https://docs.gitlab.com/ee/user/project/code_owners.html#the-syntax-of-code-owners-files
|
||||
#
|
||||
# If more than one rule matches a given file, the latest rule is used.
|
||||
# The file should be generally kept sorted, except when it is necessary
|
||||
# to use a different order due to the fact above. In that case, use
|
||||
# '# sort-order-reset' comment line to reset the sort order.
|
||||
#
|
||||
# Recipes for a few common cases:
|
||||
#
|
||||
# 1. Specific directory with all its contents:
|
||||
#
|
||||
# /components/app_trace/
|
||||
#
|
||||
# Note the trailing slash!
|
||||
#
|
||||
# 2. File with certain extension in any subdirectory of a certain directory:
|
||||
#
|
||||
# /examples/**/*.py
|
||||
#
|
||||
# This includes an *.py files in /examples/ directory as well.
|
||||
#
|
||||
# 3. Contents of a directory with a certain name, anywhere in the tree:
|
||||
#
|
||||
# test_*_host/
|
||||
#
|
||||
# Will match everything under components/efuse/test_efuse_host/,
|
||||
# components/heap/test_multi_heap_host/, components/lwip/test_afl_host/, etc.
|
||||
#
|
||||
# 4. Same as above, except limited to a specific place in the tree:
|
||||
#
|
||||
# /components/esp32*/
|
||||
#
|
||||
# Matches everything under /components/esp32, /components/esp32s2, etc.
|
||||
# Doesn't match /tools/some-test/components/esp32s5.
|
||||
#
|
||||
# 5. Specific file:
|
||||
#
|
||||
# /tools/tools.json
|
||||
#
|
||||
# 6. File with a certain name anywhere in the tree
|
||||
#
|
||||
# .gitignore
|
||||
#
|
||||
|
||||
* @esp-idf-codeowners/other
|
||||
|
||||
/.* @esp-idf-codeowners/tools
|
||||
/.codespellrc @esp-idf-codeowners/ci
|
||||
/.github/workflows/ @esp-idf-codeowners/ci
|
||||
/.gitlab-ci.yml @esp-idf-codeowners/ci
|
||||
/.gitlab/ci/ @esp-idf-codeowners/ci
|
||||
/.idf_build_apps.toml @esp-idf-codeowners/ci
|
||||
/.idf_ci.toml @esp-idf-codeowners/ci
|
||||
/.pre-commit-config.yaml @esp-idf-codeowners/ci
|
||||
/.vale.ini @esp-idf-codeowners/docs
|
||||
/CMakeLists.txt @esp-idf-codeowners/build-config
|
||||
/COMPATIBILITY*.md @esp-idf-codeowners/peripherals
|
||||
/CONTRIBUTING.md @esp-idf-codeowners/docs
|
||||
/Kconfig @esp-idf-codeowners/build-config
|
||||
/README*.md @esp-idf-codeowners/docs
|
||||
/ROADMAP*.md @esp-idf-codeowners/docs
|
||||
/SUPPORT_POLICY*.md @esp-idf-codeowners/docs
|
||||
/add_path.sh @esp-idf-codeowners/tools
|
||||
/conftest.py @esp-idf-codeowners/ci
|
||||
/export.* @esp-idf-codeowners/tools
|
||||
/install.* @esp-idf-codeowners/tools
|
||||
/pytest.ini @esp-idf-codeowners/ci
|
||||
/ruff.toml @esp-idf-codeowners/tools
|
||||
/sdkconfig.rename @esp-idf-codeowners/build-config
|
||||
/sonar-project.properties @esp-idf-codeowners/ci
|
||||
|
||||
# sort-order-reset
|
||||
|
||||
/components/app_trace/ @esp-idf-codeowners/debugging
|
||||
/components/app_update/ @esp-idf-codeowners/system @esp-idf-codeowners/app-utilities
|
||||
/components/bootloader*/ @esp-idf-codeowners/system @esp-idf-codeowners/security
|
||||
/components/bootloader_support/bootloader_flash/ @esp-idf-codeowners/peripherals
|
||||
/components/bt/ @esp-idf-codeowners/bluetooth
|
||||
/components/cmock/ @esp-idf-codeowners/system
|
||||
/components/console/ @esp-idf-codeowners/system @esp-idf-codeowners/app-utilities/console
|
||||
/components/cxx/ @esp-idf-codeowners/system
|
||||
/components/driver/ @esp-idf-codeowners/peripherals
|
||||
/components/efuse/ @esp-idf-codeowners/system
|
||||
/components/esp_adc/ @esp-idf-codeowners/peripherals
|
||||
/components/esp_app_format/ @esp-idf-codeowners/system @esp-idf-codeowners/app-utilities
|
||||
/components/esp_blockdev/ @esp-idf-codeowners/storage
|
||||
/components/esp_bootloader_format/ @esp-idf-codeowners/system @esp-idf-codeowners/app-utilities
|
||||
/components/esp_coex/ @esp-idf-codeowners/wifi @esp-idf-codeowners/bluetooth @esp-idf-codeowners/ieee802154
|
||||
/components/esp_common/ @esp-idf-codeowners/system
|
||||
/components/esp_driver_*/ @esp-idf-codeowners/peripherals
|
||||
/components/esp_driver_sdmmc/ @esp-idf-codeowners/peripherals @esp-idf-codeowners/storage
|
||||
/components/esp_eth/ @esp-idf-codeowners/network
|
||||
/components/esp_event/ @esp-idf-codeowners/system
|
||||
/components/esp_gdbstub/ @esp-idf-codeowners/debugging
|
||||
/components/esp_hal_*/ @esp-idf-codeowners/peripherals
|
||||
/components/esp_hid/ @esp-idf-codeowners/bluetooth
|
||||
/components/esp_http_client/ @esp-idf-codeowners/app-utilities
|
||||
/components/esp_http_server/ @esp-idf-codeowners/app-utilities
|
||||
/components/esp_https_ota/ @esp-idf-codeowners/app-utilities
|
||||
/components/esp_https_server/ @esp-idf-codeowners/app-utilities
|
||||
/components/esp_hw_support/ @esp-idf-codeowners/system @esp-idf-codeowners/peripherals
|
||||
/components/esp_hw_support/lowpower/ @esp-idf-codeowners/power-management
|
||||
/components/esp_hw_support/usb_phy/ @esp-idf-codeowners/peripherals/usb
|
||||
/components/esp_lcd/ @esp-idf-codeowners/peripherals
|
||||
/components/esp_libc/ @esp-idf-codeowners/system @esp-idf-codeowners/toolchain
|
||||
/components/esp_local_ctrl/ @esp-idf-codeowners/app-utilities
|
||||
/components/esp_mm/ @esp-idf-codeowners/peripherals
|
||||
/components/esp_netif/ @esp-idf-codeowners/network
|
||||
/components/esp_netif_stack/ @esp-idf-codeowners/network
|
||||
/components/esp_partition/ @esp-idf-codeowners/storage
|
||||
/components/esp_phy/ @esp-idf-codeowners/bluetooth @esp-idf-codeowners/wifi @esp-idf-codeowners/ieee802154
|
||||
/components/esp_pm/ @esp-idf-codeowners/power-management @esp-idf-codeowners/bluetooth @esp-idf-codeowners/wifi @esp-idf-codeowners/system
|
||||
/components/esp_psram/ @esp-idf-codeowners/peripherals
|
||||
/components/esp_psram/system_layer/ @esp-idf-codeowners/peripherals @esp-idf-codeowners/system
|
||||
/components/esp_ringbuf/ @esp-idf-codeowners/system
|
||||
/components/esp_rom/ @esp-idf-codeowners/system @esp-idf-codeowners/bluetooth @esp-idf-codeowners/wifi
|
||||
/components/esp_security/ @esp-idf-codeowners/security
|
||||
/components/esp_stdio/ @esp-idf-codeowners/storage @esp-idf-codeowners/system
|
||||
/components/esp_system/ @esp-idf-codeowners/system
|
||||
/components/esp_tee/ @esp-idf-codeowners/security
|
||||
/components/esp_timer/ @esp-idf-codeowners/system
|
||||
/components/esp-tls/ @esp-idf-codeowners/app-utilities
|
||||
/components/esp_trace/ @esp-idf-codeowners/debugging
|
||||
/components/esp_usb_cdc_rom_console/ @esp-idf-codeowners/system @esp-idf-codeowners/peripherals/usb
|
||||
/components/esp_wifi/ @esp-idf-codeowners/wifi
|
||||
/components/espcoredump/ @esp-idf-codeowners/debugging
|
||||
/components/esptool_py/ @esp-idf-codeowners/tools
|
||||
/components/fatfs/ @esp-idf-codeowners/storage
|
||||
/components/freertos/ @esp-idf-codeowners/system
|
||||
/components/hal/ @esp-idf-codeowners/peripherals
|
||||
/components/hal/test_apps/crypto/ @esp-idf-codeowners/peripherals @esp-idf-codeowners/security
|
||||
/components/hal/test_apps/tee/ @esp-idf-codeowners/peripherals @esp-idf-codeowners/security
|
||||
/components/heap/ @esp-idf-codeowners/system
|
||||
/components/http_parser/ @esp-idf-codeowners/app-utilities
|
||||
/components/idf_test/ @esp-idf-codeowners/peripherals @esp-idf-codeowners/system
|
||||
/components/ieee802154/ @esp-idf-codeowners/ieee802154
|
||||
/components/linux/ @esp-idf-codeowners/system
|
||||
/components/log/ @esp-idf-codeowners/system
|
||||
/components/lwip/ @esp-idf-codeowners/lwip
|
||||
/components/mbedtls/ @esp-idf-codeowners/app-utilities/mbedtls @esp-idf-codeowners/security
|
||||
/components/mqtt/ @esp-idf-codeowners/network
|
||||
/components/nvs_flash/ @esp-idf-codeowners/storage
|
||||
/components/nvs_sec_provider/ @esp-idf-codeowners/storage @esp-idf-codeowners/security
|
||||
/components/openthread/ @esp-idf-codeowners/ieee802154
|
||||
/components/partition_table/ @esp-idf-codeowners/system
|
||||
/components/perfmon/ @esp-idf-codeowners/debugging
|
||||
/components/protobuf-c/ @esp-idf-codeowners/app-utilities
|
||||
/components/protocomm/ @esp-idf-codeowners/app-utilities/provisioning
|
||||
/components/pthread/ @esp-idf-codeowners/system
|
||||
/components/riscv/ @esp-idf-codeowners/system
|
||||
/components/rt/ @esp-idf-codeowners/system
|
||||
/components/sdmmc/ @esp-idf-codeowners/storage
|
||||
/components/soc/ @esp-idf-codeowners/peripherals @esp-idf-codeowners/system
|
||||
/components/spi_flash/ @esp-idf-codeowners/peripherals
|
||||
/components/spiffs/ @esp-idf-codeowners/storage
|
||||
/components/tcp_transport/ @esp-idf-codeowners/network
|
||||
/components/ulp/ @esp-idf-codeowners/system
|
||||
/components/unity/ @esp-idf-codeowners/peripherals @esp-idf-codeowners/system
|
||||
/components/vfs/ @esp-idf-codeowners/storage
|
||||
/components/wear_levelling/ @esp-idf-codeowners/storage
|
||||
/components/wpa_supplicant/ @esp-idf-codeowners/wifi @esp-idf-codeowners/app-utilities/mbedtls
|
||||
/components/xtensa/ @esp-idf-codeowners/system
|
||||
|
||||
/docs/ @esp-idf-codeowners/docs
|
||||
/docs/docs_not_updated/ @esp-idf-codeowners/all-maintainers
|
||||
/docs/**/api-guides/tools/ @esp-idf-codeowners/tools
|
||||
/docs/**/api-guides/kconfig/ @esp-idf-codeowners/tools
|
||||
/docs/en/api-guides/core_dump.rst @esp-idf-codeowners/debugging
|
||||
/docs/**/api-guides/wifi* @esp-idf-codeowners/wifi
|
||||
/docs/**/api-guides/esp-wifi-mesh.rst @esp-idf-codeowners/wifi
|
||||
/docs/en/api-guides/jtag-debugging/ @esp-idf-codeowners/debugging
|
||||
/docs/**/api-reference/bluetooth/ @esp-idf-codeowners/bluetooth
|
||||
/docs/**/api-reference/network/ @esp-idf-codeowners/network @esp-idf-codeowners/wifi
|
||||
/docs/**/api-reference/peripherals/ @esp-idf-codeowners/peripherals
|
||||
/docs/**/api-reference/peripherals/usb* @esp-idf-codeowners/peripherals @esp-idf-codeowners/peripherals/usb
|
||||
/docs/**/api-reference/peripherals/usb*/ @esp-idf-codeowners/peripherals @esp-idf-codeowners/peripherals/usb
|
||||
/docs/**/api-reference/protocols/ @esp-idf-codeowners/network @esp-idf-codeowners/app-utilities
|
||||
/docs/**/api-reference/provisioning/ @esp-idf-codeowners/app-utilities/provisioning
|
||||
/docs/**/api-reference/storage/ @esp-idf-codeowners/storage
|
||||
/docs/**/api-reference/system/ @esp-idf-codeowners/system
|
||||
/docs/**/security/ @esp-idf-codeowners/security
|
||||
/docs/**/migration-guides/ @esp-idf-codeowners/docs @esp-idf-codeowners/all-maintainers
|
||||
/docs/**/contribute/install-pre-commit-hook.rst @esp-idf-codeowners/ci @esp-idf-codeowners/tools
|
||||
|
||||
/examples/README.md @esp-idf-codeowners/docs @esp-idf-codeowners/ci
|
||||
/examples/**/*.py @esp-idf-codeowners/ci @esp-idf-codeowners/tools
|
||||
/examples/bluetooth/ @esp-idf-codeowners/bluetooth
|
||||
/examples/build_system/ @esp-idf-codeowners/build-config
|
||||
/examples/common_components/ @esp-idf-codeowners/system @esp-idf-codeowners/wifi @esp-idf-codeowners/lwip @esp-idf-codeowners/network
|
||||
/examples/custom_bootloader/ @esp-idf-codeowners/system
|
||||
/examples/cxx/ @esp-idf-codeowners/system
|
||||
/examples/ethernet/ @esp-idf-codeowners/network
|
||||
/examples/get-started/ @esp-idf-codeowners/system
|
||||
/examples/ieee802154/ @esp-idf-codeowners/ieee802154
|
||||
/examples/lowpower/ @esp-idf-codeowners/power-management @esp-idf-codeowners/system
|
||||
/examples/mesh/ @esp-idf-codeowners/wifi
|
||||
/examples/network/ @esp-idf-codeowners/network @esp-idf-codeowners/wifi
|
||||
/examples/openthread/ @esp-idf-codeowners/ieee802154
|
||||
/examples/peripherals/ @esp-idf-codeowners/peripherals
|
||||
/examples/peripherals/usb/ @esp-idf-codeowners/peripherals @esp-idf-codeowners/peripherals/usb
|
||||
/examples/phy/ @esp-idf-codeowners/bluetooth @esp-idf-codeowners/wifi @esp-idf-codeowners/ieee802154
|
||||
/examples/protocols/ @esp-idf-codeowners/network @esp-idf-codeowners/app-utilities
|
||||
/examples/security/ @esp-idf-codeowners/security
|
||||
/examples/storage/ @esp-idf-codeowners/storage
|
||||
/examples/system/ @esp-idf-codeowners/system
|
||||
/examples/system/ota/ @esp-idf-codeowners/app-utilities
|
||||
/examples/wifi/ @esp-idf-codeowners/wifi
|
||||
/examples/zigbee/ @esp-idf-codeowners/ieee802154
|
||||
|
||||
/tools/ @esp-idf-codeowners/tools
|
||||
/tools/ble/ @esp-idf-codeowners/app-utilities
|
||||
/tools/bt/ @esp-idf-codeowners/bluetooth
|
||||
/tools/ci/ @esp-idf-codeowners/ci
|
||||
/tools/cmake/ @esp-idf-codeowners/build-config
|
||||
/tools/cmake/toolchain-*.cmake @esp-idf-codeowners/toolchain
|
||||
/tools/cmakev2/ @esp-idf-codeowners/build-config
|
||||
/tools/esp_app_trace/ @esp-idf-codeowners/debugging
|
||||
/tools/gdb_panic_server.py @esp-idf-codeowners/debugging
|
||||
/tools/kconfig*/ @esp-idf-codeowners/build-config
|
||||
/tools/ldgen/ @esp-idf-codeowners/build-config
|
||||
/tools/mass_mfg/ @esp-idf-codeowners/app-utilities
|
||||
/tools/mocks/ @esp-idf-codeowners/system
|
||||
|
||||
/tools/test_apps/ @esp-idf-codeowners/ci
|
||||
/tools/test_apps/README.md @esp-idf-codeowners/docs @esp-idf-codeowners/ci
|
||||
|
||||
## Note: owners here should be the same as the owners for the same example subdir, above
|
||||
/tools/test_apps/build_system/ @esp-idf-codeowners/build-config
|
||||
/tools/test_apps/components/test_utils/ @esp-idf-codeowners/peripherals @esp-idf-codeowners/system
|
||||
/tools/test_apps/configs/ @esp-idf-codeowners/system
|
||||
/tools/test_apps/linux_compatible/ @esp-idf-codeowners/system
|
||||
/tools/test_apps/phy/ @esp-idf-codeowners/bluetooth @esp-idf-codeowners/wifi @esp-idf-codeowners/ieee802154
|
||||
/tools/test_apps/protocols/ @esp-idf-codeowners/network @esp-idf-codeowners/app-utilities
|
||||
/tools/test_apps/security/ @esp-idf-codeowners/security
|
||||
/tools/test_apps/storage/ @esp-idf-codeowners/storage
|
||||
/tools/test_apps/system/ @esp-idf-codeowners/system
|
||||
|
||||
/tools/test_apps/**/*.py @esp-idf-codeowners/ci @esp-idf-codeowners/tools
|
||||
|
||||
/tools/test_build_system/ @esp-idf-codeowners/tools @esp-idf-codeowners/build-config
|
||||
|
||||
/tools/tools.json @esp-idf-codeowners/tools @esp-idf-codeowners/toolchain @esp-idf-codeowners/debugging
|
||||
|
||||
# sort-order-reset
|
||||
|
||||
/components/**/test_apps/**/*.py @esp-idf-codeowners/ci @esp-idf-codeowners/tools
|
||||
|
||||
# ignore lists
|
||||
/tools/ci/check_copyright_config.yaml @esp-idf-codeowners/all-maintainers
|
||||
/tools/ci/check_copyright_ignore.txt @esp-idf-codeowners/all-maintainers
|
||||
/tools/ci/mypy_ignore_list.txt @esp-idf-codeowners/tools
|
||||
* @esp-idf-codeowners/all-maintainers
|
||||
|
||||
@@ -145,11 +145,11 @@ check if there's a suitable `.if-<if-anchor-you-need>` anchor
|
||||
1. if there is, create a rule following [`rules` Template Naming Rules](#rules-template-naming-rules).For detail information, please refer to [GitLab Documentation `rules-if`](https://docs.gitlab.com/ee/ci/yaml/README.html#rulesif). Here's an example.
|
||||
|
||||
```yaml
|
||||
.rules:patterns:clang_tidy:
|
||||
.rules:patterns:python-files:
|
||||
rules:
|
||||
- <<: *if-protected
|
||||
- <<: *if-dev-push
|
||||
changes: *patterns-c-files
|
||||
changes: *patterns-python-files
|
||||
```
|
||||
|
||||
2. if there isn't
|
||||
@@ -250,9 +250,76 @@ We're using the latest version of [idf-build-apps][idf-build-apps]. Please refer
|
||||
In ESP-IDF CI, there's a few more special rules are additionally supported to disable the check app dependencies feature:
|
||||
|
||||
- Add MR labels `BUILD_AND_TEST_ALL_APPS`
|
||||
- Pipeline variable `IDF_CI_SELECT_ALL_PYTEST_CASES=1`
|
||||
- Run in protected branches
|
||||
|
||||
## Upload/Download Artifacts to Internal Minio Server
|
||||
|
||||
Please refer to the documentation [here](https://docs.espressif.com/projects/idf-ci/en/latest/guides/cli.html)
|
||||
### Users Without Access to Minio
|
||||
|
||||
If you don't have access to the internal Minio server, you can still download the artifacts from the shared link in the job log.
|
||||
|
||||
The log will look like this:
|
||||
|
||||
```shell
|
||||
Pipeline ID : 587355
|
||||
Job name : build_clang_test_apps_esp32
|
||||
Job ID : 40272275
|
||||
Created archive file: 40272275.zip, uploading as 587355/build_dir_without_map_and_elf_files/build_clang_test_apps_esp32/40272275.zip
|
||||
Please download the archive file includes build_dir_without_map_and_elf_files from [INTERNAL_URL]
|
||||
```
|
||||
|
||||
### Users With Access to Minio
|
||||
|
||||
#### Env Vars for Minio
|
||||
|
||||
Minio takes these env vars to connect to the server:
|
||||
|
||||
- `IDF_S3_SERVER`
|
||||
- `IDF_S3_ACCESS_KEY`
|
||||
- `IDF_S3_SECRET_KEY`
|
||||
- `IDF_S3_BUCKET`
|
||||
|
||||
#### Artifacts Types and File Patterns
|
||||
|
||||
The artifacts types and corresponding file patterns are defined in tools/ci/artifacts_handler.py, inside `ArtifactType` and `TYPE_PATTERNS_DICT`.
|
||||
|
||||
#### Upload
|
||||
|
||||
```shell
|
||||
python tools/ci/artifacts_handler.py upload
|
||||
```
|
||||
|
||||
will upload the files that match the file patterns to minio object storage with name:
|
||||
|
||||
`<pipeline_id>/<artifact_type>/<job_name>/<job_id>.zip`
|
||||
|
||||
For example, job 39043328 will upload these four files:
|
||||
|
||||
- `575500/map_and_elf_files/build_pytest_examples_esp32/39043328.zip`
|
||||
- `575500/build_dir_without_map_and_elf_files/build_pytest_examples_esp32/39043328.zip`
|
||||
- `575500/logs/build_pytest_examples_esp32/39043328.zip`
|
||||
- `575500/size_reports/build_pytest_examples_esp32/39043328.zip`
|
||||
|
||||
#### Download
|
||||
|
||||
You may run
|
||||
|
||||
```shell
|
||||
python tools/ci/artifacts_handler.py download --pipeline_id <pipeline_id>
|
||||
```
|
||||
|
||||
to download all files of the pipeline, or
|
||||
|
||||
```shell
|
||||
python tools/ci/artifacts_handler.py download --pipeline_id <pipeline_id> --job_name <job_name_or_pattern>
|
||||
```
|
||||
|
||||
to download all files with the specified job name or pattern, or
|
||||
|
||||
```shell
|
||||
python tools/ci/artifacts_handler.py download --pipeline_id <pipeline_id> --job_name <job_name_or_pattern> --type <artifact_type> <artifact_type> ...
|
||||
```
|
||||
|
||||
to download all files with the specified job name or pattern and artifact type(s).
|
||||
|
||||
You may check all detailed documentation with `python tools/ci/artifacts_handler.py download -h`
|
||||
|
||||
@@ -1,28 +1,361 @@
|
||||
.build_template:
|
||||
stage: build
|
||||
extends:
|
||||
- .before_script:build
|
||||
- .after_script:build
|
||||
- .after_script:build:ccache
|
||||
image: $ESP_ENV_IMAGE
|
||||
tags: [build, shiny]
|
||||
tags:
|
||||
- build
|
||||
# build only on shiny servers since shiny storage server is at the same location
|
||||
- shiny
|
||||
variables:
|
||||
# Enable ccache for all build jobs. See configure_ci_environment.sh for more ccache related settings.
|
||||
IDF_CCACHE_ENABLE: "1"
|
||||
dependencies: # set dependencies to null to avoid missing artifacts issue
|
||||
dependencies: []
|
||||
|
||||
.build_cmake_template:
|
||||
extends:
|
||||
- .build_template
|
||||
- .before_script:build
|
||||
- .after_script:build:ccache
|
||||
dependencies: # set dependencies to null to avoid missing artifacts issue
|
||||
needs:
|
||||
- job: fast_template_app
|
||||
artifacts: false
|
||||
- pipeline_variables
|
||||
artifacts:
|
||||
paths:
|
||||
# The other artifacts patterns are defined under tools/ci/artifacts_handler.py
|
||||
# Now we're uploading/downloading the binary files from our internal storage server
|
||||
#
|
||||
# keep the log file to help debug
|
||||
- "**/build*/build_log.txt"
|
||||
# keep the size info to help track the binary size
|
||||
- size_info.txt
|
||||
- "**/build*/size.json"
|
||||
when: always
|
||||
expire_in: 4 days
|
||||
script:
|
||||
# CI specific options start from "--parallel-count xxx". could ignore when running locally
|
||||
- run_cmd python tools/ci/ci_build_apps.py $TEST_DIR -v
|
||||
-t $IDF_TARGET
|
||||
--copy-sdkconfig
|
||||
--parallel-count ${CI_NODE_TOTAL:-1}
|
||||
--parallel-index ${CI_NODE_INDEX:-1}
|
||||
--extra-preserve-dirs
|
||||
examples/bluetooth/esp_ble_mesh/ble_mesh_console
|
||||
examples/bluetooth/hci/controller_hci_uart_esp32
|
||||
examples/wifi/iperf
|
||||
--modified-components ${MODIFIED_COMPONENTS}
|
||||
--modified-files ${MODIFIED_FILES}
|
||||
# for detailed documents, please refer to .gitlab/ci/README.md#uploaddownload-artifacts-to-internal-minio-server
|
||||
- python tools/ci/artifacts_handler.py upload
|
||||
|
||||
.build_cmake_clang_template:
|
||||
extends:
|
||||
- .build_cmake_template
|
||||
variables:
|
||||
IDF_TOOLCHAIN: clang
|
||||
TEST_BUILD_OPTS_EXTRA: ""
|
||||
TEST_DIR: tools/test_apps/system/cxx_pthread_bluetooth
|
||||
script:
|
||||
# CI specific options start from "--parallel-count xxx". could ignore when running locally
|
||||
- run_cmd python tools/ci/ci_build_apps.py $TEST_DIR -v
|
||||
-t $IDF_TARGET
|
||||
--copy-sdkconfig
|
||||
--parallel-count ${CI_NODE_TOTAL:-1}
|
||||
--parallel-index ${CI_NODE_INDEX:-1}
|
||||
--modified-components ${MODIFIED_COMPONENTS}
|
||||
--modified-files ${MODIFIED_FILES}
|
||||
$TEST_BUILD_OPTS_EXTRA
|
||||
- python tools/ci/artifacts_handler.py upload
|
||||
|
||||
.build_pytest_template:
|
||||
extends:
|
||||
- .build_cmake_template
|
||||
script:
|
||||
# CI specific options start from "--parallel-count xxx". could ignore when running locally
|
||||
- run_cmd python tools/ci/ci_build_apps.py $TEST_DIR -v
|
||||
-t $IDF_TARGET
|
||||
-m \"not host_test\"
|
||||
--pytest-apps
|
||||
--parallel-count ${CI_NODE_TOTAL:-1}
|
||||
--parallel-index ${CI_NODE_INDEX:-1}
|
||||
--collect-app-info "list_job_${CI_JOB_NAME_SLUG}.txt"
|
||||
--modified-components ${MODIFIED_COMPONENTS}
|
||||
--modified-files ${MODIFIED_FILES}
|
||||
- python tools/ci/artifacts_handler.py upload
|
||||
|
||||
.build_pytest_no_jtag_template:
|
||||
extends:
|
||||
- .build_cmake_template
|
||||
script:
|
||||
# CI specific options start from "--parallel-count xxx". could ignore when running locally
|
||||
- run_cmd python tools/ci/ci_build_apps.py $TEST_DIR -v
|
||||
-t $IDF_TARGET
|
||||
-m \"not host_test and not jtag\"
|
||||
--pytest-apps
|
||||
--parallel-count ${CI_NODE_TOTAL:-1}
|
||||
--parallel-index ${CI_NODE_INDEX:-1}
|
||||
--collect-app-info "list_job_${CI_JOB_NAME_SLUG}.txt"
|
||||
--modified-components ${MODIFIED_COMPONENTS}
|
||||
--modified-files ${MODIFIED_FILES}
|
||||
- python tools/ci/artifacts_handler.py upload
|
||||
|
||||
.build_pytest_jtag_template:
|
||||
extends:
|
||||
- .build_cmake_template
|
||||
script:
|
||||
# CI specific options start from "--parallel-count xxx". could ignore when running locally
|
||||
- run_cmd python tools/ci/ci_build_apps.py $TEST_DIR -v
|
||||
-t $IDF_TARGET
|
||||
-m \"not host_test and jtag\"
|
||||
--pytest-apps
|
||||
--parallel-count ${CI_NODE_TOTAL:-1}
|
||||
--parallel-index ${CI_NODE_INDEX:-1}
|
||||
--collect-app-info "list_job_${CI_JOB_NAME_SLUG}.txt"
|
||||
--modified-components ${MODIFIED_COMPONENTS}
|
||||
--modified-files ${MODIFIED_FILES}
|
||||
- python tools/ci/artifacts_handler.py upload
|
||||
|
||||
build_pytest_examples_esp32:
|
||||
extends:
|
||||
- .build_pytest_no_jtag_template
|
||||
- .rules:build:example_test-esp32
|
||||
parallel: 6
|
||||
variables:
|
||||
IDF_TARGET: esp32
|
||||
TEST_DIR: examples
|
||||
|
||||
build_pytest_examples_esp32s2:
|
||||
extends:
|
||||
- .build_pytest_no_jtag_template
|
||||
- .rules:build:example_test-esp32s2
|
||||
parallel: 3
|
||||
variables:
|
||||
IDF_TARGET: esp32s2
|
||||
TEST_DIR: examples
|
||||
|
||||
build_pytest_examples_esp32s3:
|
||||
extends:
|
||||
- .build_pytest_no_jtag_template
|
||||
- .rules:build:example_test-esp32s3
|
||||
parallel: 4
|
||||
variables:
|
||||
IDF_TARGET: esp32s3
|
||||
TEST_DIR: examples
|
||||
|
||||
build_pytest_examples_esp32c3:
|
||||
extends:
|
||||
- .build_pytest_no_jtag_template
|
||||
- .rules:build:example_test-esp32c3
|
||||
parallel: 4
|
||||
variables:
|
||||
IDF_TARGET: esp32c3
|
||||
TEST_DIR: examples
|
||||
|
||||
build_pytest_examples_esp32c2:
|
||||
extends:
|
||||
- .build_pytest_no_jtag_template
|
||||
- .rules:build:example_test-esp32c2
|
||||
parallel: 2
|
||||
variables:
|
||||
IDF_TARGET: esp32c2
|
||||
TEST_DIR: examples
|
||||
|
||||
build_pytest_examples_esp32c6:
|
||||
extends:
|
||||
- .build_pytest_no_jtag_template
|
||||
- .rules:build:example_test-esp32c6
|
||||
parallel: 2
|
||||
variables:
|
||||
IDF_TARGET: esp32c6
|
||||
TEST_DIR: examples
|
||||
|
||||
build_pytest_examples_esp32h2:
|
||||
extends:
|
||||
- .build_pytest_no_jtag_template
|
||||
- .rules:build:example_test-esp32h2
|
||||
parallel: 2
|
||||
variables:
|
||||
IDF_TARGET: esp32h2
|
||||
TEST_DIR: examples
|
||||
|
||||
build_pytest_examples_jtag: # for all targets
|
||||
extends:
|
||||
- .build_pytest_jtag_template
|
||||
- .rules:build:example_test
|
||||
variables:
|
||||
IDF_TARGET: all
|
||||
TEST_DIR: examples
|
||||
|
||||
build_pytest_components_esp32:
|
||||
extends:
|
||||
- .build_pytest_template
|
||||
- .rules:build:component_ut-esp32
|
||||
parallel: 5
|
||||
variables:
|
||||
IDF_TARGET: esp32
|
||||
TEST_DIR: components
|
||||
|
||||
build_pytest_components_esp32s2:
|
||||
extends:
|
||||
- .build_pytest_template
|
||||
- .rules:build:component_ut-esp32s2
|
||||
parallel: 4
|
||||
variables:
|
||||
IDF_TARGET: esp32s2
|
||||
TEST_DIR: components
|
||||
|
||||
build_pytest_components_esp32s3:
|
||||
extends:
|
||||
- .build_pytest_template
|
||||
- .rules:build:component_ut-esp32s3
|
||||
parallel: 4
|
||||
variables:
|
||||
IDF_TARGET: esp32s3
|
||||
TEST_DIR: components
|
||||
|
||||
build_pytest_components_esp32c3:
|
||||
extends:
|
||||
- .build_pytest_template
|
||||
- .rules:build:component_ut-esp32c3
|
||||
parallel: 4
|
||||
variables:
|
||||
IDF_TARGET: esp32c3
|
||||
TEST_DIR: components
|
||||
|
||||
build_pytest_components_esp32c2:
|
||||
extends:
|
||||
- .build_pytest_template
|
||||
- .rules:build:component_ut-esp32c2
|
||||
parallel: 3
|
||||
variables:
|
||||
IDF_TARGET: esp32c2
|
||||
TEST_DIR: components
|
||||
|
||||
build_pytest_components_esp32c6:
|
||||
extends:
|
||||
- .build_pytest_template
|
||||
- .rules:build:component_ut-esp32c6
|
||||
parallel: 3
|
||||
variables:
|
||||
IDF_TARGET: esp32c6
|
||||
TEST_DIR: components
|
||||
|
||||
build_pytest_components_esp32h2:
|
||||
extends:
|
||||
- .build_pytest_template
|
||||
- .rules:build:component_ut-esp32h2
|
||||
parallel: 4
|
||||
variables:
|
||||
IDF_TARGET: esp32h2
|
||||
TEST_DIR: components
|
||||
|
||||
build_only_components_apps:
|
||||
extends:
|
||||
- .build_cmake_template
|
||||
- .rules:build:component_ut
|
||||
parallel: 5
|
||||
script:
|
||||
- set_component_ut_vars
|
||||
# CI specific options start from "--parallel-count xxx". could ignore when running locally
|
||||
- run_cmd python tools/ci/ci_build_apps.py $COMPONENT_UT_DIRS -v
|
||||
-t all
|
||||
--parallel-count ${CI_NODE_TOTAL:-1}
|
||||
--parallel-index ${CI_NODE_INDEX:-1}
|
||||
--modified-components ${MODIFIED_COMPONENTS}
|
||||
--modified-files ${MODIFIED_FILES}
|
||||
- python tools/ci/artifacts_handler.py upload
|
||||
|
||||
build_pytest_test_apps_esp32:
|
||||
extends:
|
||||
- .build_pytest_template
|
||||
- .rules:build:custom_test-esp32
|
||||
variables:
|
||||
IDF_TARGET: esp32
|
||||
TEST_DIR: tools/test_apps
|
||||
|
||||
build_pytest_test_apps_esp32s2:
|
||||
extends:
|
||||
- .build_pytest_template
|
||||
- .rules:build:custom_test-esp32s2
|
||||
variables:
|
||||
IDF_TARGET: esp32s2
|
||||
TEST_DIR: tools/test_apps
|
||||
|
||||
build_pytest_test_apps_esp32s3:
|
||||
extends:
|
||||
- .build_pytest_template
|
||||
- .rules:build:custom_test-esp32s3
|
||||
parallel: 2
|
||||
variables:
|
||||
IDF_TARGET: esp32s3
|
||||
TEST_DIR: tools/test_apps
|
||||
|
||||
build_pytest_test_apps_esp32c3:
|
||||
extends:
|
||||
- .build_pytest_template
|
||||
- .rules:build:custom_test-esp32c3
|
||||
variables:
|
||||
IDF_TARGET: esp32c3
|
||||
TEST_DIR: tools/test_apps
|
||||
|
||||
build_pytest_test_apps_esp32c2:
|
||||
extends:
|
||||
- .build_pytest_template
|
||||
- .rules:build:custom_test-esp32c2
|
||||
variables:
|
||||
IDF_TARGET: esp32c2
|
||||
TEST_DIR: tools/test_apps
|
||||
|
||||
build_pytest_test_apps_esp32c6:
|
||||
extends:
|
||||
- .build_pytest_template
|
||||
- .rules:build:custom_test-esp32c6
|
||||
variables:
|
||||
IDF_TARGET: esp32c6
|
||||
TEST_DIR: tools/test_apps
|
||||
|
||||
build_pytest_test_apps_esp32h2:
|
||||
extends:
|
||||
- .build_pytest_template
|
||||
- .rules:build:custom_test-esp32h2
|
||||
variables:
|
||||
IDF_TARGET: esp32h2
|
||||
TEST_DIR: tools/test_apps
|
||||
|
||||
build_only_tools_test_apps:
|
||||
extends:
|
||||
- .build_cmake_template
|
||||
- .rules:build:custom_test
|
||||
parallel: 9
|
||||
script:
|
||||
# CI specific options start from "--parallel-count xxx". could ignore when running locally
|
||||
- run_cmd python tools/ci/ci_build_apps.py tools/test_apps -v
|
||||
-t all
|
||||
--parallel-count ${CI_NODE_TOTAL:-1}
|
||||
--parallel-index ${CI_NODE_INDEX:-1}
|
||||
--modified-components ${MODIFIED_COMPONENTS}
|
||||
--modified-files ${MODIFIED_FILES}
|
||||
- python tools/ci/artifacts_handler.py upload
|
||||
|
||||
######################
|
||||
# build_template_app #
|
||||
######################
|
||||
.build_template_app_template:
|
||||
extends:
|
||||
- .build_template
|
||||
- .before_script:build
|
||||
variables:
|
||||
LOG_PATH: "${CI_PROJECT_DIR}/log_template_app"
|
||||
BUILD_PATH: "${CI_PROJECT_DIR}/build_template_app"
|
||||
BUILD_DIR: "${BUILD_PATH}/@t/@w"
|
||||
BUILD_LOG_CMAKE: "${LOG_PATH}/cmake_@t_@w.txt"
|
||||
BUILD_COMMAND_ARGS: ""
|
||||
artifacts:
|
||||
when: always
|
||||
paths:
|
||||
- log_template_app/*
|
||||
- size_info.txt
|
||||
- build_template_app/**/size*.json
|
||||
- build_template_app/**/size.json
|
||||
expire_in: 1 week
|
||||
when: always
|
||||
script:
|
||||
# Set the variable for 'esp-idf-template' testing
|
||||
- ESP_IDF_TEMPLATE_GIT=${ESP_IDF_TEMPLATE_GIT:-"https://github.com/espressif/esp-idf-template.git"}
|
||||
@@ -34,15 +367,212 @@
|
||||
# Only do the default cmake build for each target, remaining part are done in the build_template_app job
|
||||
- tools/ci/build_template_app.sh ${BUILD_COMMAND_ARGS}
|
||||
|
||||
# build-related-pre-check-jobs ------------------------------------------------
|
||||
# Build at least one project for each target at earliest stage to reduce build cost for obvious failing commits
|
||||
fast_template_app:
|
||||
extends:
|
||||
- .build_template_app_template
|
||||
- .rules:build
|
||||
- .rules:build:target_test
|
||||
stage: pre_check
|
||||
tags: [fast_run, shiny]
|
||||
variables:
|
||||
BUILD_COMMAND_ARGS: "-p"
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
build_examples_cmake_esp32:
|
||||
extends:
|
||||
- .build_cmake_template
|
||||
- .rules:build:example_test-esp32
|
||||
parallel: 8
|
||||
variables:
|
||||
IDF_TARGET: esp32
|
||||
TEST_DIR: examples
|
||||
|
||||
build_examples_cmake_esp32s2:
|
||||
extends:
|
||||
- .build_cmake_template
|
||||
- .rules:build:example_test-esp32s2
|
||||
parallel: 7
|
||||
variables:
|
||||
IDF_TARGET: esp32s2
|
||||
TEST_DIR: examples
|
||||
|
||||
build_examples_cmake_esp32s3:
|
||||
extends:
|
||||
- .build_cmake_template
|
||||
- .rules:build:example_test-esp32s3
|
||||
parallel: 11
|
||||
variables:
|
||||
IDF_TARGET: esp32s3
|
||||
TEST_DIR: examples
|
||||
|
||||
build_examples_cmake_esp32c2:
|
||||
extends:
|
||||
- .build_cmake_template
|
||||
- .rules:build:example_test-esp32c2
|
||||
parallel: 7
|
||||
variables:
|
||||
IDF_TARGET: esp32c2
|
||||
TEST_DIR: examples
|
||||
|
||||
build_examples_cmake_esp32c3:
|
||||
extends:
|
||||
- .build_cmake_template
|
||||
- .rules:build:example_test-esp32c3
|
||||
parallel: 9
|
||||
variables:
|
||||
IDF_TARGET: esp32c3
|
||||
TEST_DIR: examples
|
||||
|
||||
build_examples_cmake_esp32c6:
|
||||
extends:
|
||||
- .build_cmake_template
|
||||
- .rules:build:example_test-esp32c6
|
||||
parallel: 11
|
||||
variables:
|
||||
IDF_TARGET: esp32c6
|
||||
TEST_DIR: examples
|
||||
|
||||
build_examples_cmake_esp32h2:
|
||||
extends:
|
||||
- .build_cmake_template
|
||||
- .rules:build:example_test-esp32h2
|
||||
parallel: 9
|
||||
variables:
|
||||
IDF_TARGET: esp32h2
|
||||
TEST_DIR: examples
|
||||
|
||||
build_examples_cmake_esp32p4:
|
||||
extends:
|
||||
- .build_cmake_template
|
||||
- .rules:build:example_test-esp32p4
|
||||
parallel: 4
|
||||
variables:
|
||||
IDF_TARGET: esp32p4
|
||||
TEST_DIR: examples
|
||||
|
||||
build_clang_test_apps_esp32:
|
||||
extends:
|
||||
- .build_cmake_clang_template
|
||||
- .rules:build:custom_test-esp32
|
||||
variables:
|
||||
IDF_TARGET: esp32
|
||||
|
||||
build_clang_test_apps_esp32s2:
|
||||
extends:
|
||||
- .build_cmake_clang_template
|
||||
- .rules:build:custom_test-esp32s2
|
||||
variables:
|
||||
IDF_TARGET: esp32s2
|
||||
|
||||
build_clang_test_apps_esp32s3:
|
||||
extends:
|
||||
- .build_cmake_clang_template
|
||||
- .rules:build:custom_test-esp32s3
|
||||
variables:
|
||||
IDF_TARGET: esp32s3
|
||||
|
||||
.build_clang_test_apps_riscv:
|
||||
extends:
|
||||
- .build_cmake_clang_template
|
||||
variables:
|
||||
# For RISCV clang generates '.linker-options' sections of type 'llvm_linker_options' in asm files.
|
||||
# See (https://llvm.org/docs/Extensions.html#linker-options-section-linker-options).
|
||||
# Binutils gas ignores them with warning.
|
||||
# TODO: LLVM-112, Use integrated assembler.
|
||||
TEST_BUILD_OPTS_EXTRA: "--ignore-warning-str 'Warning: unrecognized section type'"
|
||||
|
||||
build_clang_test_apps_esp32c3:
|
||||
extends:
|
||||
- .build_clang_test_apps_riscv
|
||||
- .rules:build:custom_test-esp32c3
|
||||
variables:
|
||||
IDF_TARGET: esp32c3
|
||||
|
||||
build_clang_test_apps_esp32c2:
|
||||
extends:
|
||||
- .build_clang_test_apps_riscv
|
||||
- .rules:build:custom_test-esp32c2
|
||||
variables:
|
||||
IDF_TARGET: esp32c2
|
||||
|
||||
build_clang_test_apps_esp32c6:
|
||||
extends:
|
||||
- .build_clang_test_apps_riscv
|
||||
- .rules:build:custom_test-esp32c6
|
||||
# TODO: c6 builds fail in master due to missing headers
|
||||
allow_failure: true
|
||||
variables:
|
||||
IDF_TARGET: esp32c6
|
||||
|
||||
.test_build_system_template:
|
||||
stage: host_test
|
||||
extends:
|
||||
- .build_template
|
||||
- .rules:build:check
|
||||
needs:
|
||||
- job: fast_template_app
|
||||
artifacts: false
|
||||
optional: true
|
||||
script:
|
||||
- ${IDF_PATH}/tools/ci/test_configure_ci_environment.sh
|
||||
- cd ${IDF_PATH}/tools/test_build_system
|
||||
- retry_failed git clone $KNOWN_FAILURE_CASES_REPO known_failure_cases
|
||||
- pytest --parallel-count ${CI_NODE_TOTAL:-1} --parallel-index ${CI_NODE_INDEX:-1}
|
||||
--work-dir ${CI_PROJECT_DIR}/test_build_system --junitxml=${CI_PROJECT_DIR}/XUNIT_RESULT.xml
|
||||
--ignore-result-files known_failure_cases/known_failure_cases.txt
|
||||
|
||||
pytest_build_system:
|
||||
extends: .test_build_system_template
|
||||
parallel: 3
|
||||
artifacts:
|
||||
paths:
|
||||
- XUNIT_RESULT.xml
|
||||
- test_build_system
|
||||
when: always
|
||||
expire_in: 2 days
|
||||
reports:
|
||||
junit: XUNIT_RESULT.xml
|
||||
|
||||
pytest_build_system_macos:
|
||||
extends:
|
||||
- .test_build_system_template
|
||||
- .before_script:build:macos
|
||||
- .rules:build:macos
|
||||
tags:
|
||||
- macos_shell
|
||||
parallel: 3
|
||||
artifacts:
|
||||
paths:
|
||||
- XUNIT_RESULT.xml
|
||||
- test_build_system
|
||||
when: always
|
||||
expire_in: 2 days
|
||||
reports:
|
||||
junit: XUNIT_RESULT.xml
|
||||
|
||||
build_docker:
|
||||
extends:
|
||||
- .before_script:minimal
|
||||
- .rules:build:docker
|
||||
stage: host_test
|
||||
needs: []
|
||||
image: espressif/docker-builder:1
|
||||
tags:
|
||||
- build_docker_amd64_brno
|
||||
variables:
|
||||
DOCKER_TMP_IMAGE_NAME: "idf_tmp_image"
|
||||
script:
|
||||
- export LOCAL_CI_REPOSITORY_URL=$CI_REPOSITORY_URL
|
||||
- if [ -n "$LOCAL_GITLAB_HTTPS_HOST" ]; then export LOCAL_CI_REPOSITORY_URL="https://gitlab-ci-token:${CI_JOB_TOKEN}@${LOCAL_GITLAB_HTTPS_HOST}/${CI_PROJECT_PATH}"; fi
|
||||
- if [ -n "$LOCAL_GIT_MIRROR" ]; then export LOCAL_CI_REPOSITORY_URL="${LOCAL_GIT_MIRROR}/${CI_PROJECT_PATH}"; fi
|
||||
- echo "Using repository at $LOCAL_CI_REPOSITORY_URL"
|
||||
- export DOCKER_BUILD_ARGS="--build-arg IDF_CLONE_URL=${LOCAL_CI_REPOSITORY_URL} --build-arg IDF_CLONE_BRANCH_OR_TAG=${CI_COMMIT_REF_NAME} --build-arg IDF_CHECKOUT_REF=${CI_COMMIT_TAG:-$PIPELINE_COMMIT_SHA}"
|
||||
# Build
|
||||
- docker build --tag ${DOCKER_TMP_IMAGE_NAME} ${DOCKER_BUILD_ARGS} tools/docker/
|
||||
# We can't mount $PWD/examples/get-started/blink into the container, see https://gitlab.com/gitlab-org/gitlab-ce/issues/41227.
|
||||
# The workaround mentioned there works, but leaves around directories which need to be cleaned up manually.
|
||||
# Therefore, build a copy of the example located inside the container.
|
||||
- docker run --rm --workdir /opt/esp/idf/examples/get-started/blink ${DOCKER_TMP_IMAGE_NAME} idf.py build
|
||||
|
||||
# This job builds template app with permutations of targets and optimization levels
|
||||
build_template_app:
|
||||
@@ -53,125 +583,3 @@ build_template_app:
|
||||
needs:
|
||||
- job: fast_template_app
|
||||
artifacts: false
|
||||
|
||||
########################################
|
||||
# Clang Build Apps Without Tests Cases #
|
||||
########################################
|
||||
.build_cmake_clang_template:
|
||||
extends:
|
||||
- .build_template
|
||||
- .rules:build
|
||||
needs:
|
||||
- job: fast_template_app
|
||||
artifacts: false
|
||||
- pipeline_variables
|
||||
artifacts:
|
||||
paths:
|
||||
# The other artifacts patterns are defined under .idf_ci.toml
|
||||
# Now we're uploading/downloading the binary files from our internal storage server
|
||||
#
|
||||
# keep the log file to help debug
|
||||
- "**/build*/build_log.txt"
|
||||
# keep the size info to help track the binary size
|
||||
- size_info.txt
|
||||
- "**/build*/size*.json"
|
||||
expire_in: 1 week
|
||||
when: always
|
||||
variables:
|
||||
IDF_TOOLCHAIN: clang
|
||||
TEST_BUILD_OPTS_EXTRA: ""
|
||||
script:
|
||||
# CI specific options start from "--parallel-count xxx". could ignore when running locally
|
||||
- run_cmd idf-build-apps build
|
||||
-p tools/test_apps/system/clang_build_test
|
||||
-t $IDF_TARGET
|
||||
--parallel-count ${CI_NODE_TOTAL:-1}
|
||||
--parallel-index ${CI_NODE_INDEX:-1}
|
||||
--modified-components ${MR_MODIFIED_COMPONENTS}
|
||||
--modified-files ${MR_MODIFIED_FILES}
|
||||
$TEST_BUILD_OPTS_EXTRA
|
||||
|
||||
build_clang_test_apps_xtensa:
|
||||
extends: .build_cmake_clang_template
|
||||
parallel:
|
||||
matrix:
|
||||
- IDF_TARGET: [esp32, esp32s2, esp32s3]
|
||||
|
||||
build_clang_test_apps_riscv:
|
||||
extends: .build_cmake_clang_template
|
||||
variables:
|
||||
# https://reviews.llvm.org/D90108.
|
||||
# GNU 'as' lets .weak override .globl since binutils-gdb
|
||||
# https://github.com/bminor/binutils-gdb/commit/5ca547dc2399a0a5d9f20626d4bf5547c3ccfddd (1996)
|
||||
# while MC lets the last directive win (PR38921).
|
||||
# For RISCV chips we use integrated assembler by default, so suppress this warning to pass CI pipeline.
|
||||
TEST_BUILD_OPTS_EXTRA: "--ignore-warning-str 'changed binding to STB_WEAK'"
|
||||
parallel:
|
||||
matrix:
|
||||
- IDF_TARGET: [esp32c3, esp32c2, esp32c6, esp32c5, esp32h2, esp32p4]
|
||||
|
||||
####################
|
||||
# Dynamic Pipeline #
|
||||
####################
|
||||
generate_build_child_pipeline:
|
||||
extends:
|
||||
- .build_template
|
||||
tags: [fast_run, shiny]
|
||||
dependencies: # set dependencies to null to avoid missing artifacts issue
|
||||
needs:
|
||||
- pipeline_variables
|
||||
- job: baseline_manifest_sha
|
||||
optional: true
|
||||
artifacts:
|
||||
paths:
|
||||
- build_child_pipeline.yml
|
||||
- test_related_apps.txt
|
||||
- non_test_related_apps.txt
|
||||
expire_in: 1 week
|
||||
when: always
|
||||
script:
|
||||
- run_cmd idf-ci --debug gitlab build-child-pipeline
|
||||
-p components
|
||||
-p examples
|
||||
-p tools/test_apps
|
||||
--modified-files $MR_MODIFIED_FILES
|
||||
|
||||
build_child_pipeline:
|
||||
stage: build
|
||||
needs:
|
||||
- job: fast_template_app
|
||||
optional: true
|
||||
artifacts: false
|
||||
- pipeline_variables
|
||||
- generate_build_child_pipeline
|
||||
variables:
|
||||
MR_MODIFIED_COMPONENTS: $MR_MODIFIED_COMPONENTS
|
||||
MR_MODIFIED_FILES: $MR_MODIFIED_FILES
|
||||
PARENT_PIPELINE_ID: $CI_PIPELINE_ID
|
||||
# https://gitlab.com/gitlab-org/gitlab/-/issues/214340
|
||||
inherit:
|
||||
variables: false
|
||||
trigger:
|
||||
include:
|
||||
- artifact: build_child_pipeline.yml
|
||||
job: generate_build_child_pipeline
|
||||
strategy: depend
|
||||
|
||||
generate_disabled_apps_report:
|
||||
extends:
|
||||
- .build_template
|
||||
tags: [fast_run, shiny]
|
||||
dependencies: # set dependencies to null to avoid missing artifacts issue
|
||||
needs:
|
||||
- pipeline_variables
|
||||
- job: baseline_manifest_sha
|
||||
optional: true
|
||||
artifacts:
|
||||
paths:
|
||||
- disabled_report.html
|
||||
expire_in: 1 week
|
||||
when: always
|
||||
script:
|
||||
- pip install dominate idf-build-apps
|
||||
- run_cmd python tools/ci/gen_disabled_report.py --output disabled_report.html --verbose --enable-preview-targets
|
||||
- echo "Report generated at https://${CI_PAGES_HOSTNAME}:${CI_SERVER_PORT}/-/esp-idf/-/jobs/${CI_JOB_ID}/artifacts/disabled_report.html"
|
||||
|
||||
@@ -6,9 +6,9 @@ stages:
|
||||
- pre_check
|
||||
- build
|
||||
- assign_test
|
||||
- build_doc
|
||||
- target_test
|
||||
- host_test
|
||||
- build_doc
|
||||
- test_deploy
|
||||
- deploy
|
||||
- post_deploy
|
||||
@@ -20,23 +20,20 @@ variables:
|
||||
MAKEFLAGS: "-j5 --no-keep-going"
|
||||
|
||||
# GitLab-CI environment
|
||||
# Thanks to pack-objects cache, clone strategy should behave faster than fetch
|
||||
# so we pick "clone" as default git strategy
|
||||
# Shiny runners by default remove the CI_PROJECT_DIR every time at the beginning of one job
|
||||
# and clone with a --depth=1
|
||||
# Brew runners will fetch from locally mirror first, and cache the local CI_PROJECT_DIR
|
||||
# In conclusion
|
||||
# - set GIT_STRATEGY: "clone" to shiny runners
|
||||
# - set GIT_STRATEGY: "fetch" to brew runners
|
||||
|
||||
# now we have pack-objects cache, so clone strategy is faster than fetch
|
||||
GIT_STRATEGY: clone
|
||||
GIT_DEPTH: 1
|
||||
GIT_SUBMODULE_STRATEGY: none # here we use cache for submodules, so we don't need to fetch them every time
|
||||
# we will download archive for each submodule instead of clone.
|
||||
# we don't do "recursive" when fetch submodule as they're not used in CI now.
|
||||
GIT_SUBMODULE_STRATEGY: none
|
||||
# since we're using merged-result pipelines, the last commit should work for most cases
|
||||
GIT_DEPTH: 1
|
||||
# --no-recurse-submodules: we use cache for submodules
|
||||
# --prune --prune-tags: in case remote branch or tag is force pushed
|
||||
GIT_FETCH_EXTRA_FLAGS: "--no-recurse-submodules --prune --prune-tags"
|
||||
# we're using .cache folder for caches
|
||||
GIT_CLEAN_FLAGS: -ffdx -e .cache/
|
||||
LATEST_GIT_TAG: v6.1-dev
|
||||
LATEST_GIT_TAG: v5.2-dev
|
||||
|
||||
SUBMODULE_FETCH_TOOL: "tools/ci/ci_fetch_submodule.py"
|
||||
# by default we will fetch all submodules
|
||||
@@ -49,12 +46,19 @@ variables:
|
||||
IDF_PATH: "$CI_PROJECT_DIR"
|
||||
V: "0"
|
||||
CHECKOUT_REF_SCRIPT: "$CI_PROJECT_DIR/tools/ci/checkout_project_ref.py"
|
||||
PYTHON_VER: 3.8.17
|
||||
|
||||
# Docker images
|
||||
ESP_ENV_IMAGE: "${CI_DOCKER_REGISTRY}/esp-env-v6.1:1"
|
||||
ESP_IDF_DOC_ENV_IMAGE: "${CI_DOCKER_REGISTRY}/esp-idf-doc-env-v6.1:1-1"
|
||||
TARGET_TEST_ENV_IMAGE: "${CI_DOCKER_REGISTRY}/target-test-env-v6.1:1"
|
||||
ESP_ENV_IMAGE: "${CI_DOCKER_REGISTRY}/esp-env-v5.2:2"
|
||||
ESP_IDF_DOC_ENV_IMAGE: "${CI_DOCKER_REGISTRY}/esp-idf-doc-env-v5.2:2-1"
|
||||
QEMU_IMAGE: "${CI_DOCKER_REGISTRY}/qemu-v5.2:2-20230522"
|
||||
TARGET_TEST_ENV_IMAGE: "${CI_DOCKER_REGISTRY}/target-test-env-v5.2:2"
|
||||
|
||||
SONARQUBE_SCANNER_IMAGE: "${CI_DOCKER_REGISTRY}/sonarqube-scanner:5"
|
||||
PRE_COMMIT_IMAGE: "${CI_DOCKER_REGISTRY}/esp-idf-pre-commit:1"
|
||||
|
||||
# target test repo parameters
|
||||
TEST_ENV_CONFIG_REPO: "https://gitlab-ci-token:${BOT_TOKEN}@${CI_SERVER_HOST}:${CI_SERVER_PORT}/qa/ci-test-runner-configs.git"
|
||||
|
||||
# cache python dependencies
|
||||
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
|
||||
@@ -65,7 +69,7 @@ variables:
|
||||
CI_PYTHON_CONSTRAINT_BRANCH: ""
|
||||
|
||||
# Update the filename for a specific ESP-IDF release. It is used only with CI_PYTHON_CONSTRAINT_BRANCH.
|
||||
CI_PYTHON_CONSTRAINT_FILE: "espidf.constraints.v6.1.txt"
|
||||
CI_PYTHON_CONSTRAINT_FILE: "espidf.constraints.v5.2.txt"
|
||||
|
||||
# Set this variable to repository name of a Python tool you wish to install and test in the context of ESP-IDF CI.
|
||||
# Keep the variable empty when not used.
|
||||
@@ -76,34 +80,13 @@ variables:
|
||||
# This is used only if CI_PYTHON_TOOL_REPO is not empty.
|
||||
CI_PYTHON_TOOL_BRANCH: ""
|
||||
|
||||
# Set this variable to Clang toolchain distro URL to be used.
|
||||
# NOTE: We have separate toolchains for Xtensa and RISCV, therefore jobs for one arch will fail.
|
||||
# This is OK as far as we use CI_CLANG_DISTRO_URL for pre-release tests purposes only.
|
||||
# Keep the variable empty when not used.
|
||||
CI_CLANG_DISTRO_URL: ""
|
||||
|
||||
# Set this variable to specify the file name for the known failure cases.
|
||||
KNOWN_FAILURE_CASES_FILE_NAME: "master.txt"
|
||||
|
||||
IDF_CI_BUILD: 1
|
||||
|
||||
# ccache settings
|
||||
# some settings need to set in .gitlab-ci.yml as it takes effect while start-up the job
|
||||
# https://ccache.dev/manual/latest.html#_configuring_ccache
|
||||
|
||||
# host mapping volume to share ccache between runner concurrent jobs
|
||||
CCACHE_DIR: "/cache/idf_ccache"
|
||||
CCACHE_MAXSIZE: "50G"
|
||||
|
||||
FF_USE_NEW_BASH_EVAL_STRATEGY: "true"
|
||||
FORCE_COLOR: "1" # rich print with color
|
||||
|
||||
################################################
|
||||
# `before_script` and `after_script` Templates #
|
||||
################################################
|
||||
.common_before_scripts: &common-before_scripts |
|
||||
source tools/ci/utils.sh
|
||||
|
||||
is_based_on_commits $REQUIRED_ANCESTOR_COMMITS
|
||||
|
||||
if [[ -n "$IDF_DONT_USE_MIRRORS" ]]; then
|
||||
@@ -119,7 +102,7 @@ variables:
|
||||
source tools/ci/configure_ci_environment.sh
|
||||
|
||||
# add extra python packages
|
||||
export PYTHONPATH="$IDF_PATH/tools:$IDF_PATH/tools/ci:$IDF_PATH/tools/esp_app_trace:$IDF_PATH/components/partition_table:$IDF_PATH/tools/ci/python_packages:$PYTHONPATH"
|
||||
export PYTHONPATH="$IDF_PATH/tools:$IDF_PATH/tools/esp_app_trace:$IDF_PATH/components/partition_table:$IDF_PATH/tools/ci/python_packages:$PYTHONPATH"
|
||||
|
||||
.setup_tools_and_idf_python_venv: &setup_tools_and_idf_python_venv |
|
||||
# must use after setup_tools_except_target_test
|
||||
@@ -127,7 +110,7 @@ variables:
|
||||
|
||||
# download constraint file for dev
|
||||
if [[ -n "$CI_PYTHON_CONSTRAINT_BRANCH" ]]; then
|
||||
wget -O /tmp/constraint.txt --header="Authorization:Bearer ${ESPCI_TOKEN}" "${GITLAB_HTTP_SERVER}/api/v4/projects/2581/repository/files/${CI_PYTHON_CONSTRAINT_FILE}/raw?ref=${CI_PYTHON_CONSTRAINT_BRANCH}"
|
||||
wget -O /tmp/constraint.txt --header="Authorization:Bearer ${ESPCI_TOKEN}" ${GITLAB_HTTP_SERVER}/api/v4/projects/2581/repository/files/${CI_PYTHON_CONSTRAINT_FILE}/raw?ref=${CI_PYTHON_CONSTRAINT_BRANCH}
|
||||
mkdir -p ~/.espressif
|
||||
mv /tmp/constraint.txt ~/.espressif/${CI_PYTHON_CONSTRAINT_FILE}
|
||||
fi
|
||||
@@ -136,67 +119,36 @@ variables:
|
||||
if [[ -n "$IDF_DONT_USE_MIRRORS" ]]; then
|
||||
export IDF_MIRROR_PREFIX_MAP=
|
||||
fi
|
||||
# Optimize pip install
|
||||
if echo "${CI_RUNNER_TAGS}" | grep "shiny"; then
|
||||
export PIP_INDEX_URL="${PIP_INDEX_URL_SHINY}"
|
||||
fi
|
||||
if [[ "$(uname -m)" == "x86_64" ]] || [[ "$(uname -m)" == "aarch64" ]]; then
|
||||
export IDF_PIP_WHEELS_URL=""
|
||||
fi
|
||||
|
||||
# install.sh
|
||||
if [[ "${CI_JOB_STAGE}" != "target_test" ]]; then
|
||||
section_start "running_install_sh" "Running install.sh"
|
||||
if [[ "${CI_JOB_STAGE}" == "build_doc" ]]; then
|
||||
run_cmd bash install.sh --enable-ci --enable-docs
|
||||
else
|
||||
run_cmd bash install.sh --enable-ci
|
||||
fi
|
||||
section_end "running_install_sh"
|
||||
else
|
||||
section_start "install_python_env" "Install Python environment, skip required tools check"
|
||||
run_cmd python tools/idf_tools.py install-python-env --features ci,test-specific
|
||||
export IDF_SKIP_TOOLS_CHECK=1
|
||||
section_end "install_python_env"
|
||||
fi
|
||||
|
||||
section_start "source_export" "Source export.sh"
|
||||
source ./export.sh
|
||||
section_end "source_export"
|
||||
|
||||
# Eager upgrade of CI dependencies
|
||||
# Done after sourcing export.sh so that we could easily invoke the right pip
|
||||
section_start "upgrade_ci_dependencies" "Upgrading CI dependencies"
|
||||
pip install --upgrade --upgrade-strategy=eager -r $IDF_PATH/tools/requirements/requirements.ci.txt -c ~/.espressif/${CI_PYTHON_CONSTRAINT_FILE}
|
||||
# install latest python packages
|
||||
# target test jobs
|
||||
if [[ "${CI_JOB_STAGE}" == "target_test" ]]; then
|
||||
pip install --upgrade --upgrade-strategy=eager -r $IDF_PATH/tools/requirements/requirements.test-specific.txt -c ~/.espressif/${CI_PYTHON_CONSTRAINT_FILE}
|
||||
fi
|
||||
section_end "upgrade_ci_dependencies"
|
||||
|
||||
REEXPORT_NEEDED=0
|
||||
if [[ ! -z "$INSTALL_EXTRA_TOOLS" ]]; then
|
||||
section_start "installing_optional_tools" "Install optional tools ${INSTALL_EXTRA_TOOLS}"
|
||||
run_cmd $IDF_PATH/tools/idf_tools.py --non-interactive install $INSTALL_EXTRA_TOOLS
|
||||
section_end "installing_optional_tools"
|
||||
|
||||
REEXPORT_NEEDED=1
|
||||
run_cmd bash install.sh --enable-ci --enable-pytest
|
||||
elif [[ "${CI_JOB_STAGE}" == "build_doc" ]]; then
|
||||
run_cmd bash install.sh --enable-ci --enable-docs
|
||||
elif [[ "${CI_JOB_STAGE}" == "build" ]]; then
|
||||
run_cmd bash install.sh --enable-ci --enable-pytest
|
||||
else
|
||||
if ! echo "${CI_JOB_NAME}" | egrep ".*pytest.*"; then
|
||||
run_cmd bash install.sh --enable-ci
|
||||
else
|
||||
run_cmd bash install.sh --enable-ci --enable-pytest
|
||||
fi
|
||||
fi
|
||||
|
||||
# Install esp-clang if necessary (esp-clang is separately installed)
|
||||
if [[ "$IDF_TOOLCHAIN" == "clang" && -z "$CI_CLANG_DISTRO_URL" ]]; then
|
||||
# Install esp-clang if necessary
|
||||
if [[ "$IDF_TOOLCHAIN" == "clang" ]]; then
|
||||
$IDF_PATH/tools/idf_tools.py --non-interactive install esp-clang
|
||||
|
||||
REEXPORT_NEEDED=1
|
||||
fi
|
||||
|
||||
if [[ $REEXPORT_NEEDED -eq 1 ]]; then
|
||||
section_start "re_source_export" "Re-source export.sh"
|
||||
source ./export.sh
|
||||
section_end "re_source_export"
|
||||
fi
|
||||
# Since the version 3.21 CMake passes source files and include dirs to ninja using absolute paths.
|
||||
# Needed for pytest junit reports.
|
||||
$IDF_PATH/tools/idf_tools.py --non-interactive install cmake
|
||||
|
||||
# Custom clang toolchain
|
||||
if [[ "$IDF_TOOLCHAIN" == "clang" && ! -z "$CI_CLANG_DISTRO_URL" ]]; then
|
||||
source ./export.sh
|
||||
|
||||
# Custom clang
|
||||
if [[ ! -z "$CI_CLANG_DISTRO_URL" ]]; then
|
||||
echo "Using custom clang from ${CI_CLANG_DISTRO_URL}"
|
||||
wget $CI_CLANG_DISTRO_URL
|
||||
ARCH_NAME=$(basename $CI_CLANG_DISTRO_URL)
|
||||
@@ -205,21 +157,13 @@ variables:
|
||||
fi
|
||||
|
||||
# Custom OpenOCD
|
||||
if [[ "$CI_JOB_STAGE" == "target_test" ]]; then
|
||||
machine="$(uname -m)"
|
||||
if [[ "$machine" == "armv7l" ]] ; then
|
||||
OOCD_DISTRO_URL="$OOCD_DISTRO_URL_ARMHF"
|
||||
elif [[ "$machine" == "aarch64" ]] ; then
|
||||
OOCD_DISTRO_URL="$OOCD_DISTRO_URL_ARM64"
|
||||
fi
|
||||
if [[ ! -z "$OOCD_DISTRO_URL" ]]; then
|
||||
echo "Using custom OpenOCD from ${OOCD_DISTRO_URL}"
|
||||
wget $OOCD_DISTRO_URL
|
||||
ARCH_NAME=$(basename $OOCD_DISTRO_URL)
|
||||
tar -x -f $ARCH_NAME
|
||||
export OPENOCD_SCRIPTS=$PWD/openocd-esp32/share/openocd/scripts
|
||||
export PATH=$PWD/openocd-esp32/bin:$PATH
|
||||
fi
|
||||
if [[ ! -z "$OOCD_DISTRO_URL" && "$CI_JOB_STAGE" == "target_test" ]]; then
|
||||
echo "Using custom OpenOCD from ${OOCD_DISTRO_URL}"
|
||||
wget $OOCD_DISTRO_URL
|
||||
ARCH_NAME=$(basename $OOCD_DISTRO_URL)
|
||||
tar -x -f $ARCH_NAME
|
||||
export OPENOCD_SCRIPTS=$PWD/openocd-esp32/share/openocd/scripts
|
||||
export PATH=$PWD/openocd-esp32/bin:$PATH
|
||||
fi
|
||||
|
||||
if [[ -n "$CI_PYTHON_TOOL_REPO" ]]; then
|
||||
@@ -228,23 +172,29 @@ variables:
|
||||
rm -rf ${CI_PYTHON_TOOL_REPO}
|
||||
fi
|
||||
|
||||
info "setup tools and python venv done"
|
||||
|
||||
.show_ccache_statistics: &show_ccache_statistics |
|
||||
# Show ccache statistics if enabled globally
|
||||
section_start "ccache_show_stats" "Show ccache statistics"
|
||||
test "$CI_CCACHE_STATS" == 1 && test -n "$(which ccache)" && ccache --show-stats -vv || true
|
||||
section_end "ccache_show_stats"
|
||||
test "$CI_CCACHE_STATS" == 1 && test -n "$(which ccache)" && ccache --show-stats || true
|
||||
|
||||
.upload_failed_job_log_artifacts: &upload_failed_job_log_artifacts |
|
||||
if [ $CI_JOB_STATUS = "failed" ]; then
|
||||
run_cmd idf-ci gitlab upload-artifacts --type log
|
||||
python tools/ci/artifacts_handler.py upload --type logs
|
||||
fi
|
||||
|
||||
.before_script:minimal:
|
||||
before_script:
|
||||
- *common-before_scripts
|
||||
|
||||
.before_script:build:macos:
|
||||
before_script:
|
||||
- *common-before_scripts
|
||||
# On macOS, these tools need to be installed
|
||||
- export IDF_TOOLS_PATH="${HOME}/.espressif_runner_${CI_RUNNER_ID}_${CI_CONCURRENT_ID}"
|
||||
- $IDF_PATH/tools/idf_tools.py --non-interactive install cmake ninja
|
||||
# This adds tools (compilers) and the version-specific Python environment to PATH
|
||||
- *setup_tools_and_idf_python_venv
|
||||
- fetch_submodules
|
||||
|
||||
.before_script:build:
|
||||
before_script:
|
||||
- *common-before_scripts
|
||||
@@ -254,9 +204,8 @@ variables:
|
||||
- export EXTRA_CFLAGS=${PEDANTIC_CFLAGS}
|
||||
- export EXTRA_CXXFLAGS=${PEDANTIC_CXXFLAGS}
|
||||
|
||||
.after_script:build:
|
||||
.after_script:build:ccache:
|
||||
after_script:
|
||||
- source tools/ci/utils.sh
|
||||
- *show_ccache_statistics
|
||||
- *upload_failed_job_log_artifacts
|
||||
|
||||
@@ -294,9 +243,9 @@ variables:
|
||||
git remote add origin "${CI_REPOSITORY_URL}"
|
||||
fi
|
||||
|
||||
.git_checkout_ci_commit_sha: &git_checkout_ci_commit_sha |
|
||||
git checkout $CI_COMMIT_SHA
|
||||
eval "git clean ${GIT_CLEAN_FLAGS}"
|
||||
.git_checkout_fetch_head: &git_checkout_fetch_head |
|
||||
git checkout FETCH_HEAD
|
||||
git clean ${GIT_CLEAN_FLAGS}
|
||||
|
||||
# git diff requires two commits, with different CI env var
|
||||
#
|
||||
@@ -314,35 +263,43 @@ variables:
|
||||
- *git_init
|
||||
- *git_fetch_from_mirror_url_if_exists
|
||||
- |
|
||||
# Store the diff output in a temporary file
|
||||
TEMP_FILE=$(mktemp)
|
||||
# merged results pipelines, by default
|
||||
if [[ -n $CI_MERGE_REQUEST_SOURCE_BRANCH_SHA ]]; then
|
||||
git fetch origin $CI_MERGE_REQUEST_TARGET_BRANCH_SHA
|
||||
git fetch origin $CI_MERGE_REQUEST_SOURCE_BRANCH_SHA
|
||||
|
||||
git diff --name-only $CI_MERGE_REQUEST_TARGET_BRANCH_SHA...$CI_MERGE_REQUEST_SOURCE_BRANCH_SHA > "$TEMP_FILE"
|
||||
GIT_DIFF_OUTPUT=$(cat "$TEMP_FILE")
|
||||
git fetch origin $CI_COMMIT_SHA --depth=1 ${GIT_FETCH_EXTRA_FLAGS}
|
||||
git fetch origin $CI_MERGE_REQUEST_DIFF_BASE_SHA --depth=1 ${GIT_FETCH_EXTRA_FLAGS}
|
||||
git fetch origin $CI_MERGE_REQUEST_SOURCE_BRANCH_SHA --depth=1 ${GIT_FETCH_EXTRA_FLAGS}
|
||||
export GIT_DIFF_OUTPUT=$(git diff --name-only $CI_MERGE_REQUEST_DIFF_BASE_SHA $CI_MERGE_REQUEST_SOURCE_BRANCH_SHA)
|
||||
# merge request pipelines, when the mr got conflicts
|
||||
elif [[ -n $CI_MERGE_REQUEST_DIFF_BASE_SHA ]]; then
|
||||
git fetch origin $CI_MERGE_REQUEST_DIFF_BASE_SHA --depth=1 ${GIT_FETCH_EXTRA_FLAGS}
|
||||
git fetch origin $CI_COMMIT_SHA --depth=1 ${GIT_FETCH_EXTRA_FLAGS}
|
||||
git diff --name-only $CI_MERGE_REQUEST_DIFF_BASE_SHA $CI_COMMIT_SHA > "$TEMP_FILE"
|
||||
GIT_DIFF_OUTPUT=$(cat "$TEMP_FILE")
|
||||
export GIT_DIFF_OUTPUT=$(git diff --name-only $CI_MERGE_REQUEST_DIFF_BASE_SHA $CI_COMMIT_SHA)
|
||||
# other pipelines, like the protected branches pipelines
|
||||
elif [[ "$CI_COMMIT_BEFORE_SHA" != "0000000000000000000000000000000000000000" ]]; then
|
||||
git fetch origin $CI_COMMIT_BEFORE_SHA --depth=1 ${GIT_FETCH_EXTRA_FLAGS}
|
||||
git fetch origin $CI_COMMIT_SHA --depth=1 ${GIT_FETCH_EXTRA_FLAGS}
|
||||
git diff --name-only $CI_COMMIT_BEFORE_SHA $CI_COMMIT_SHA > "$TEMP_FILE"
|
||||
GIT_DIFF_OUTPUT=$(cat "$TEMP_FILE")
|
||||
export GIT_DIFF_OUTPUT=$(git diff --name-only $CI_COMMIT_BEFORE_SHA $CI_COMMIT_SHA)
|
||||
else
|
||||
# pipeline source could be web, scheduler, etc.
|
||||
git fetch origin $CI_COMMIT_SHA --depth=2 ${GIT_FETCH_EXTRA_FLAGS}
|
||||
git diff --name-only $CI_COMMIT_SHA~1 $CI_COMMIT_SHA > "$TEMP_FILE"
|
||||
GIT_DIFF_OUTPUT=$(cat "$TEMP_FILE")
|
||||
export GIT_DIFF_OUTPUT=$(git diff --name-only $CI_COMMIT_SHA~1 $CI_COMMIT_SHA)
|
||||
fi
|
||||
- *git_checkout_ci_commit_sha
|
||||
- *git_checkout_fetch_head
|
||||
- *common-before_scripts
|
||||
- *setup_tools_and_idf_python_venv
|
||||
- add_gitlab_ssh_keys
|
||||
|
||||
# git describe requires commit history until the latest tag
|
||||
.before_script:fetch:git_describe:
|
||||
variables:
|
||||
GIT_STRAEGY: none
|
||||
before_script:
|
||||
- *git_init
|
||||
- *git_fetch_from_mirror_url_if_exists
|
||||
- |
|
||||
git fetch origin refs/tags/"${LATEST_GIT_TAG}":refs/tags/"${LATEST_GIT_TAG}" --depth=1
|
||||
git repack -d
|
||||
git fetch origin $CI_COMMIT_SHA --shallow-since=$(git log -1 --format=%as "${LATEST_GIT_TAG}")
|
||||
- *git_checkout_fetch_head
|
||||
- *common-before_scripts
|
||||
- *setup_tools_and_idf_python_venv
|
||||
- add_gitlab_ssh_keys
|
||||
@@ -355,48 +312,13 @@ variables:
|
||||
before_script:
|
||||
- *git_init
|
||||
- *git_fetch_from_mirror_url_if_exists
|
||||
- eval "git fetch --depth=1 ${GIT_FETCH_EXTRA_FLAGS} origin ${CI_COMMIT_SHA}"
|
||||
- *git_checkout_ci_commit_sha
|
||||
- git fetch origin "${CI_COMMIT_SHA}" --depth=1 ${GIT_FETCH_EXTRA_FLAGS}
|
||||
- *git_checkout_fetch_head
|
||||
- *common-before_scripts
|
||||
- *setup_tools_and_idf_python_venv
|
||||
- add_gitlab_ssh_keys
|
||||
# no submodules
|
||||
|
||||
.brew-macos-settings:
|
||||
variables:
|
||||
GIT_STRATEGY: none # we do manual git clone to use local mirror
|
||||
IDF_CCACHE_ENABLE: "0"
|
||||
CCACHE_DIR: "/var/tmp/cache/idf_ccache"
|
||||
tags:
|
||||
- macos-tart
|
||||
image: macos-sequoia-idf-v6.1
|
||||
cache: [] # pip cache is created under amd64, and submodules are downloaded with brew mirror, so disable cache here
|
||||
before_script:
|
||||
# assert LOCAL_GIT_MIRROR is set
|
||||
- echo -e "section_start:`date +%s`:check_out\r\e[0Kchecking out from local git mirror, then reset to CI_COMMIT_SHA"
|
||||
- |
|
||||
if [ -z "${LOCAL_GIT_MIRROR:-}" ]; then
|
||||
echo "Error: LOCAL_GIT_MIRROR not set, cannot clone from mirror."
|
||||
exit 1
|
||||
fi
|
||||
- MIRROR_REPO_URL="${LOCAL_GIT_MIRROR}/${CI_PROJECT_PATH}"
|
||||
- cd "${CI_PROJECT_DIR}"
|
||||
# since .cache exists in CI_PROJECT_DIR, so can't direct `git clone .`
|
||||
- git clone -b ${CI_MERGE_REQUEST_TARGET_BRANCH_NAME:-${CI_COMMIT_BRANCH}} --depth=1 --recursive --shallow-submodules "${MIRROR_REPO_URL}" tmp
|
||||
- mv tmp/.git ./
|
||||
- rm -rf tmp
|
||||
- git reset --hard
|
||||
# set remote url back
|
||||
- git remote set-url origin "${CI_REPOSITORY_URL}"
|
||||
- eval "git fetch --depth=1 ${GIT_FETCH_EXTRA_FLAGS} origin ${CI_COMMIT_SHA}"
|
||||
- git checkout FETCH_HEAD
|
||||
- git submodule update --init --recursive --depth=1
|
||||
- echo -e "section_end:`date +%s`:check_out\r\e[0K"
|
||||
- *common-before_scripts
|
||||
- *setup_tools_and_idf_python_venv
|
||||
after_script: [] # ccache now is disabled for macos brew runners
|
||||
timeout: 30m
|
||||
|
||||
#############
|
||||
# `default` #
|
||||
#############
|
||||
@@ -404,15 +326,11 @@ default:
|
||||
cache:
|
||||
# pull only for most of the use cases since it's cache dir.
|
||||
# Only set "push" policy for "upload_cache" stage jobs
|
||||
- key: pip-cache-${LATEST_GIT_TAG}
|
||||
fallback_keys:
|
||||
- pip-cache
|
||||
- key: pip-cache
|
||||
paths:
|
||||
- .cache/pip
|
||||
policy: pull
|
||||
- key: submodule-cache-${LATEST_GIT_TAG}
|
||||
fallback_keys:
|
||||
- submodule-cache
|
||||
- key: submodule-cache
|
||||
paths:
|
||||
- .cache/submodule_archives
|
||||
policy: pull
|
||||
@@ -421,12 +339,6 @@ default:
|
||||
- *setup_tools_and_idf_python_venv
|
||||
- add_gitlab_ssh_keys
|
||||
- fetch_submodules
|
||||
# gitlab bug, setting them here doesn't work
|
||||
# - expire_in: https://gitlab.com/gitlab-org/gitlab/-/issues/404563
|
||||
# - when: https://gitlab.com/gitlab-org/gitlab/-/issues/440672
|
||||
# artifacts:
|
||||
# expire_in: 1 week
|
||||
# when: always
|
||||
retry:
|
||||
max: 2
|
||||
when:
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
# External DangerJS
|
||||
include:
|
||||
- project: espressif/shared-ci-dangerjs
|
||||
ref: master
|
||||
file: danger.yaml
|
||||
|
||||
run-danger-mr-linter:
|
||||
stage: pre_check
|
||||
variables:
|
||||
GIT_STRATEGY: none # no repo checkout
|
||||
ENABLE_CHECK_AREA_LABELS: 'true'
|
||||
ENABLE_CHECK_DOCS_TRANSLATION: 'true'
|
||||
ENABLE_CHECK_UPDATED_CHANGELOG: 'false'
|
||||
before_script: []
|
||||
cache: []
|
||||
tags:
|
||||
- dangerjs
|
||||
@@ -2,18 +2,15 @@
|
||||
# - extra_default_build_targets:
|
||||
# besides of the SUPPORTED_TARGETS in IDF,
|
||||
# enable build for the specified targets by default as well.
|
||||
# !!! DEPRECATED: use `additional_build_targets` in .idf_build_apps.toml instead
|
||||
#
|
||||
# - bypass_check_test_targets:
|
||||
# suppress the check_build_test_rules check-test-script warnings for the specified targets
|
||||
#
|
||||
# This file should ONLY be used during bringup. Should be reset to empty after the bringup process
|
||||
extra_default_build_targets:
|
||||
- esp32p4
|
||||
|
||||
bypass_check_test_targets:
|
||||
- esp32h21
|
||||
- esp32h4
|
||||
- esp32c5
|
||||
|
||||
- esp32p4
|
||||
#
|
||||
# These lines would
|
||||
# - enable the README.md check for esp32c6. Don't forget to add the build jobs in .gitlab/ci/build.yml
|
||||
|
||||
@@ -1,3 +1,18 @@
|
||||
.all_targets: &all_targets
|
||||
- esp32
|
||||
- esp32s2
|
||||
- esp32s3
|
||||
- esp32c3
|
||||
- esp32c2
|
||||
- esp32c6
|
||||
- esp32h2
|
||||
- esp32p4
|
||||
|
||||
.target_test: &target_test
|
||||
- example_test
|
||||
- custom_test
|
||||
- component_ut
|
||||
|
||||
##############
|
||||
# Build Jobs #
|
||||
##############
|
||||
@@ -9,6 +24,7 @@
|
||||
- build_system
|
||||
- downloadable-tools
|
||||
included_in:
|
||||
- build:target_test
|
||||
- build:check
|
||||
|
||||
# -------------------
|
||||
@@ -26,13 +42,13 @@
|
||||
|
||||
"build:macos":
|
||||
labels:
|
||||
# - build
|
||||
- build
|
||||
- macos
|
||||
- macos_test # for backward compatibility
|
||||
# patterns:
|
||||
# - build_system
|
||||
# - build_macos
|
||||
# - downloadable-tools
|
||||
patterns:
|
||||
- build_system
|
||||
- build_macos
|
||||
- downloadable-tools
|
||||
|
||||
# ---------------------------
|
||||
# Add patterns to build rules
|
||||
@@ -40,6 +56,8 @@
|
||||
"patterns:template-app":
|
||||
patterns:
|
||||
- build_template-app
|
||||
included_in:
|
||||
- build:target_test
|
||||
|
||||
"patterns:build-check":
|
||||
patterns:
|
||||
@@ -47,6 +65,104 @@
|
||||
included_in:
|
||||
- build:check
|
||||
|
||||
# ---------------
|
||||
# Build Test Jobs
|
||||
# ---------------
|
||||
"build:{0}-{1}":
|
||||
matrix:
|
||||
- *target_test
|
||||
- *all_targets
|
||||
labels:
|
||||
- build
|
||||
patterns:
|
||||
- build_components
|
||||
- build_system
|
||||
- build_target_test
|
||||
- downloadable-tools
|
||||
included_in:
|
||||
- "build:{0}"
|
||||
- build:target_test
|
||||
|
||||
####################
|
||||
# Target Test Jobs #
|
||||
####################
|
||||
"test:{0}-{1}":
|
||||
matrix:
|
||||
- *target_test
|
||||
- *all_targets
|
||||
labels: # For each rule, use labels <test_type> and <test_type>-<target>
|
||||
- "{0}"
|
||||
- "{0}_{1}"
|
||||
- target_test
|
||||
patterns: # For each rule, use patterns <test_type> and build-<test_type>
|
||||
- "{0}"
|
||||
- "build-{0}"
|
||||
included_in: # Parent rules
|
||||
- "build:{0}"
|
||||
- "build:{0}-{1}"
|
||||
- build:target_test
|
||||
|
||||
# -------------
|
||||
# Special Cases
|
||||
# -------------
|
||||
|
||||
# To reduce the specific runners' usage.
|
||||
# Do not create these jobs by default patterns on development branches
|
||||
# Can be triggered by labels or related changes
|
||||
"test:{0}-{1}-{2}":
|
||||
matrix:
|
||||
- *target_test
|
||||
- *all_targets
|
||||
- - wifi # pytest*wifi*
|
||||
- ethernet # pytest*ethernet*
|
||||
- sdio # pytest*sdio*
|
||||
- usb # USB Device & Host tests
|
||||
- adc # pytest*adc*
|
||||
- i154
|
||||
- flash_multi
|
||||
- ecdsa
|
||||
- nvs_encr_hmac
|
||||
patterns:
|
||||
- "{0}-{1}-{2}"
|
||||
- "{0}-{2}"
|
||||
- "target_test-{2}"
|
||||
labels:
|
||||
- "{0}_{1}"
|
||||
- "{0}"
|
||||
- target_test
|
||||
included_in:
|
||||
- "build:{0}-{1}"
|
||||
- "build:{0}"
|
||||
- build:target_test
|
||||
|
||||
# For example_test*flash_encryption_wifi_high_traffic jobs
|
||||
# set `INCLUDE_NIGHTLY_RUN` variable when triggered on development branches
|
||||
"test:example_test-{0}-include_nightly_run-rule":
|
||||
matrix:
|
||||
- - esp32
|
||||
- esp32c3
|
||||
specific_rules:
|
||||
- "if-example_test-ota-include_nightly_run-rule"
|
||||
included_in:
|
||||
- "build:example_test-{0}"
|
||||
- "build:example_test"
|
||||
- build:target_test
|
||||
|
||||
# For i154 runners
|
||||
"test:example_test-i154":
|
||||
patterns:
|
||||
- "example_test-i154"
|
||||
- "target_test-i154"
|
||||
labels:
|
||||
- target_test
|
||||
- example_test
|
||||
included_in:
|
||||
- "build:example_test-esp32s3"
|
||||
- "build:example_test-esp32c6"
|
||||
- "build:example_test-esp32h2"
|
||||
- "build:example_test"
|
||||
- build:target_test
|
||||
|
||||
"test:host_test":
|
||||
labels:
|
||||
- host_test
|
||||
@@ -65,15 +181,3 @@
|
||||
"labels:nvs_coverage": # host_test
|
||||
labels:
|
||||
- nvs_coverage
|
||||
|
||||
"labels:windows_pytest_build_system":
|
||||
labels:
|
||||
- windows
|
||||
specific_rules:
|
||||
- if-schedule-test-build-system-windows
|
||||
patterns:
|
||||
- build_system_win
|
||||
|
||||
"labels:buildv2":
|
||||
labels:
|
||||
- buildv2
|
||||
|
||||
@@ -3,27 +3,7 @@
|
||||
image: $ESP_ENV_IMAGE
|
||||
tags: [ deploy ]
|
||||
|
||||
.metrics_template:
|
||||
stage: deploy
|
||||
tags: [ fast_run, shiny ]
|
||||
image: python:3.13-slim
|
||||
dependencies: []
|
||||
needs: []
|
||||
variables:
|
||||
PIP_CACHE_DIR: ".cache/pip"
|
||||
# Metrics - related env vars
|
||||
ESP_METRICS_PROJECT_URL: "$CI_PROJECT_URL"
|
||||
ESP_METRICS_PROJECT_ID: "$CI_PROJECT_ID"
|
||||
ESP_METRICS_COMMIT_SHA: "$PIPELINE_COMMIT_SHA"
|
||||
ESP_METRICS_BRANCH_NAME: "$CI_COMMIT_REF_NAME"
|
||||
cache:
|
||||
key: metrics-pip
|
||||
paths:
|
||||
- .cache/pip
|
||||
before_script:
|
||||
- echo "Installing esp-metrics-cli tool"
|
||||
- pip install "esp-metrics-cli>=0.3,<1"
|
||||
|
||||
# Check this before push_to_github
|
||||
check_submodule_sync:
|
||||
extends:
|
||||
- .deploy_job_template
|
||||
@@ -32,13 +12,13 @@ check_submodule_sync:
|
||||
tags: [ brew, github_sync ]
|
||||
retry: 2
|
||||
variables:
|
||||
GIT_STRATEGY: fetch # use brew local mirror first
|
||||
# for brew runners, we always set GIT_STRATEGY to fetch
|
||||
GIT_STRATEGY: fetch
|
||||
SUBMODULES_TO_FETCH: "none"
|
||||
PUBLIC_IDF_URL: "https://github.com/espressif/esp-idf.git"
|
||||
dependencies: []
|
||||
script:
|
||||
- git submodule deinit --force .
|
||||
- rm -rf .git/modules # remove all the cached metadata
|
||||
# setting the default remote URL to the public one, to resolve relative location URLs
|
||||
- git config remote.origin.url ${PUBLIC_IDF_URL}
|
||||
# check if all submodules are correctly synced to public repository
|
||||
@@ -51,65 +31,31 @@ push_to_github:
|
||||
extends:
|
||||
- .deploy_job_template
|
||||
- .before_script:minimal
|
||||
- .rules:protected:deploy
|
||||
- .rules:push_to_github
|
||||
needs:
|
||||
# submodule must be synced before pushing to github
|
||||
- check_submodule_sync
|
||||
tags: [ brew, github_sync ]
|
||||
variables:
|
||||
GIT_STRATEGY: fetch # use brew local mirror first
|
||||
GIT_DEPTH: 0 # github needs full record of commits
|
||||
# for brew runners, we always set GIT_STRATEGY to fetch
|
||||
GIT_STRATEGY: fetch
|
||||
# github also need full record of commits
|
||||
GIT_DEPTH: 0
|
||||
script:
|
||||
- add_github_ssh_keys
|
||||
- git remote remove github &>/dev/null || true
|
||||
- git remote add github git@github.com:espressif/esp-idf.git
|
||||
- tools/ci/push_to_github.sh
|
||||
environment:
|
||||
name: push_to_github_production
|
||||
deployment_tier: production
|
||||
url: "https://github.com/espressif/esp-idf"
|
||||
|
||||
deploy_update_SHA_in_esp-dockerfiles:
|
||||
extends:
|
||||
- .deploy_job_template
|
||||
- .before_script:minimal
|
||||
- .rules:protected:deploy
|
||||
- .rules:protected-no_label-always
|
||||
dependencies: []
|
||||
variables:
|
||||
GIT_DEPTH: 2
|
||||
tags: [build, shiny]
|
||||
tags: [ shiny, build ]
|
||||
script:
|
||||
- 'curl --header "PRIVATE-TOKEN: ${ESPCI_SCRIPTS_TOKEN}" -o create_MR_in_esp_dockerfile.sh $GITLAB_HTTP_SERVER/api/v4/projects/1260/repository/files/create_MR_in_esp_dockerfile%2Fcreate_MR_in_esp_dockerfile.sh/raw\?ref\=master'
|
||||
- chmod +x create_MR_in_esp_dockerfile.sh
|
||||
- ./create_MR_in_esp_dockerfile.sh
|
||||
environment:
|
||||
name: deploy_update_SHA_in_esp-dockerfiles_production
|
||||
deployment_tier: production
|
||||
|
||||
upload_junit_report:
|
||||
extends:
|
||||
- .deploy_job_template
|
||||
tags: [ fast_run, shiny ]
|
||||
needs:
|
||||
- pipeline_variables
|
||||
- job: build_child_pipeline
|
||||
artifacts: false
|
||||
script:
|
||||
- run_cmd idf-ci gitlab download-artifacts --type junit
|
||||
rules:
|
||||
- when: always
|
||||
artifacts:
|
||||
reports:
|
||||
junit: XUNIT_RESULT_*.xml
|
||||
expire_in: 1 week
|
||||
when: always
|
||||
|
||||
target-examples-count-metrics:
|
||||
extends:
|
||||
- .metrics_template
|
||||
allow_failure: true
|
||||
script:
|
||||
- echo "Generating ESP-IDF examples count metrics"
|
||||
- cd tools/ci/metrics/examples_count
|
||||
- python3 generate_metrics.py
|
||||
- esp-metrics-cli upload -d schema.yaml -i metrics.json
|
||||
|
||||
@@ -4,11 +4,11 @@
|
||||
- "**/*.rst"
|
||||
- "CONTRIBUTING.rst"
|
||||
- "**/soc_caps.h"
|
||||
- "**/Kconfig*"
|
||||
|
||||
.patterns-docs-partial: &patterns-docs-partial
|
||||
- "components/**/*.h"
|
||||
- "components/**/CMakeLists.txt"
|
||||
- "components/**/Kconfig*"
|
||||
- "components/**/CMakeList.txt"
|
||||
- "components/**/sdkconfig*"
|
||||
- "tools/tools.json"
|
||||
- "tools/idf_tools.py"
|
||||
@@ -19,9 +19,12 @@
|
||||
.patterns-docs-preview: &patterns-docs-preview
|
||||
- "docs/**/*"
|
||||
|
||||
.if-protected-check: &if-protected-check
|
||||
.if-protected: &if-protected
|
||||
if: '($CI_COMMIT_REF_NAME == "master" || $CI_COMMIT_BRANCH =~ /^release\/v/ || $CI_COMMIT_TAG =~ /^v\d+\.\d+(\.\d+)?($|-)/)'
|
||||
|
||||
.if-protected-no_label: &if-protected-no_label
|
||||
if: '($CI_COMMIT_REF_NAME == "master" || $CI_COMMIT_BRANCH =~ /^release\/v/ || $CI_COMMIT_TAG =~ /^v\d+\.\d+(\.\d+)?($|-)/) && $BOT_TRIGGER_WITH_LABEL == null'
|
||||
|
||||
.if-qa-test-tag: &if-qa-test-tag
|
||||
if: '$CI_COMMIT_TAG =~ /^qa-test/'
|
||||
|
||||
@@ -34,16 +37,25 @@
|
||||
.if-dev-push: &if-dev-push
|
||||
if: '$CI_COMMIT_REF_NAME != "master" && $CI_COMMIT_BRANCH !~ /^release\/v/ && $CI_COMMIT_TAG !~ /^v\d+\.\d+(\.\d+)?($|-)/ && $CI_COMMIT_TAG !~ /^qa-test/ && ($CI_PIPELINE_SOURCE == "push" || $CI_PIPELINE_SOURCE == "merge_request_event")'
|
||||
|
||||
.if-schedule: &if-schedule
|
||||
if: '$CI_PIPELINE_SOURCE == "schedule"'
|
||||
|
||||
.doc-rules:build:docs-full:
|
||||
rules:
|
||||
- <<: *if-qa-test-tag
|
||||
when: never
|
||||
- <<: *if-protected-check
|
||||
- <<: *if-schedule
|
||||
- <<: *if-label-build_docs
|
||||
- <<: *if-label-docs_full
|
||||
- <<: *if-dev-push
|
||||
changes: *patterns-docs-full
|
||||
|
||||
.doc-rules:build:docs-full-prod:
|
||||
rules:
|
||||
- <<: *if-qa-test-tag
|
||||
when: never
|
||||
- <<: *if-protected-no_label
|
||||
|
||||
.doc-rules:build:docs-partial:
|
||||
rules:
|
||||
- <<: *if-qa-test-tag
|
||||
@@ -58,10 +70,10 @@
|
||||
check_readme_links:
|
||||
extends:
|
||||
- .pre_check_template
|
||||
tags: ["amd64", "brew"]
|
||||
tags: ["build", "amd64", "internet"]
|
||||
allow_failure: true
|
||||
rules:
|
||||
- <<: *if-protected-check
|
||||
- <<: *if-protected
|
||||
- <<: *if-dev-push
|
||||
changes: *patterns-example-readme
|
||||
script:
|
||||
@@ -80,20 +92,13 @@ check_docs_lang_sync:
|
||||
stage: build_doc
|
||||
tags:
|
||||
- build_docs
|
||||
needs:
|
||||
- job: fast_template_app
|
||||
artifacts: false
|
||||
optional: true
|
||||
script:
|
||||
- if [ -n "${BREATHE_ALT_INSTALL_URL_PY39}" ]; then
|
||||
pip uninstall -y breathe && pip install -U ${BREATHE_ALT_INSTALL_URL_PY39};
|
||||
fi
|
||||
- cd docs
|
||||
- build-docs -t $DOCTGT -bs $DOC_BUILDERS -l $DOCLANG build
|
||||
parallel:
|
||||
matrix:
|
||||
- DOCLANG: ["en", "zh_CN"]
|
||||
DOCTGT: ["esp32", "esp32s2", "esp32s3", "esp32c3", "esp32c2", "esp32c6", "esp32c61", "esp32c5", "esp32h2", "esp32h4", "esp32h21", "esp32p4"]
|
||||
DOCTGT: ["esp32", "esp32s2", "esp32s3", "esp32c3", "esp32c2", "esp32c6", "esp32h2", "esp32p4"]
|
||||
|
||||
check_docs_gh_links:
|
||||
image: $ESP_IDF_DOC_ENV_IMAGE
|
||||
@@ -110,6 +115,24 @@ build_docs_html_full:
|
||||
extends:
|
||||
- .build_docs_template
|
||||
- .doc-rules:build:docs-full
|
||||
needs:
|
||||
- job: fast_template_app
|
||||
artifacts: false
|
||||
optional: true
|
||||
artifacts:
|
||||
when: always
|
||||
paths:
|
||||
- docs/_build/*/*/*.txt
|
||||
- docs/_build/*/*/html/*
|
||||
expire_in: 4 days
|
||||
variables:
|
||||
DOC_BUILDERS: "html"
|
||||
|
||||
build_docs_html_full_prod:
|
||||
extends:
|
||||
- .build_docs_template
|
||||
- .doc-rules:build:docs-full-prod
|
||||
dependencies: [] # Stop build_docs jobs from downloading all previous job's artifacts
|
||||
artifacts:
|
||||
when: always
|
||||
paths:
|
||||
@@ -123,6 +146,10 @@ build_docs_html_partial:
|
||||
extends:
|
||||
- .build_docs_template
|
||||
- .doc-rules:build:docs-partial
|
||||
needs:
|
||||
- job: fast_template_app
|
||||
artifacts: false
|
||||
optional: true
|
||||
artifacts:
|
||||
when: always
|
||||
paths:
|
||||
@@ -138,21 +165,45 @@ build_docs_html_partial:
|
||||
- DOCLANG: "zh_CN"
|
||||
DOCTGT: "esp32p4"
|
||||
|
||||
build_docs_pdf:
|
||||
extends:
|
||||
- .build_docs_template
|
||||
- .doc-rules:build:docs-full
|
||||
needs:
|
||||
- job: fast_template_app
|
||||
artifacts: false
|
||||
optional: true
|
||||
artifacts:
|
||||
when: always
|
||||
paths:
|
||||
- docs/_build/*/*/latex/*
|
||||
expire_in: 4 days
|
||||
variables:
|
||||
DOC_BUILDERS: "latex"
|
||||
|
||||
build_docs_pdf_prod:
|
||||
extends:
|
||||
- .build_docs_template
|
||||
- .doc-rules:build:docs-full-prod
|
||||
dependencies: [] # Stop build_docs jobs from downloading all previous job's artifacts
|
||||
artifacts:
|
||||
when: always
|
||||
paths:
|
||||
- docs/_build/*/*/latex/*
|
||||
expire_in: 4 days
|
||||
variables:
|
||||
DOC_BUILDERS: "latex"
|
||||
|
||||
.deploy_docs_template:
|
||||
image: $ESP_IDF_DOC_ENV_IMAGE
|
||||
variables:
|
||||
DOCS_BUILD_DIR: "${IDF_PATH}/docs/_build/"
|
||||
PYTHONUNBUFFERED: 1
|
||||
# ensure all tags are fetched, need to know the latest/stable tag for the docs
|
||||
GIT_STRATEGY: clone
|
||||
GIT_DEPTH: 0
|
||||
stage: test_deploy
|
||||
tags:
|
||||
- brew
|
||||
- amd64
|
||||
- deploy
|
||||
- shiny
|
||||
script:
|
||||
# ensure all tags are fetched, need to know the latest/stable tag for the docs
|
||||
- git fetch --tags --prune
|
||||
- add_doc_server_ssh_keys $DOCS_DEPLOY_PRIVATEKEY $DOCS_DEPLOY_SERVER $DOCS_DEPLOY_SERVER_USER
|
||||
- export GIT_VER=$(git describe --always ${PIPELINE_COMMIT_SHA} --)
|
||||
- deploy-docs
|
||||
@@ -171,6 +222,8 @@ deploy_docs_preview:
|
||||
optional: true
|
||||
- job: build_docs_html_full
|
||||
optional: true
|
||||
- job: build_docs_pdf
|
||||
optional: true
|
||||
variables:
|
||||
TYPE: "preview"
|
||||
# older branches use DOCS_DEPLOY_KEY, DOCS_SERVER, DOCS_SERVER_USER, DOCS_PATH for preview server so we keep these names for 'preview'
|
||||
@@ -179,21 +232,18 @@ deploy_docs_preview:
|
||||
DOCS_DEPLOY_SERVER_USER: "$DOCS_SERVER_USER"
|
||||
DOCS_DEPLOY_PATH: "$DOCS_PATH"
|
||||
DOCS_DEPLOY_URL_BASE: "https://$DOCS_PREVIEW_SERVER_URL/docs/esp-idf"
|
||||
environment:
|
||||
name: deploy_docs_preview
|
||||
deployment_tier: staging
|
||||
url: "https://$DOCS_PREVIEW_SERVER_URL/docs/esp-idf"
|
||||
|
||||
# stage: post_deploy
|
||||
deploy_docs_production:
|
||||
# The DOCS_PROD_* variables used by this job are "Protected" so these branches must all be marked "Protected" in Gitlab settings
|
||||
extends:
|
||||
- .deploy_docs_template
|
||||
- .rules:protected:deploy
|
||||
- .doc-rules:build:docs-full-prod
|
||||
stage: post_deploy
|
||||
dependencies: # set dependencies to null to avoid missing artifacts issue
|
||||
needs: # ensure runs after push_to_github succeeded
|
||||
- build_docs_html_full
|
||||
- build_docs_html_full_prod
|
||||
- build_docs_pdf_prod
|
||||
- job: push_to_github
|
||||
artifacts: false
|
||||
variables:
|
||||
@@ -204,15 +254,11 @@ deploy_docs_production:
|
||||
DOCS_DEPLOY_PATH: "$DOCS_PROD_PATH"
|
||||
DOCS_DEPLOY_URL_BASE: "https://docs.espressif.com/projects/esp-idf"
|
||||
DEPLOY_STABLE: 1
|
||||
environment:
|
||||
name: deploy_docs_production
|
||||
deployment_tier: production
|
||||
url: "https://docs.espressif.com/projects/esp-idf"
|
||||
|
||||
check_doc_links:
|
||||
extends:
|
||||
- .build_docs_template
|
||||
- .rules:protected:deploy
|
||||
- .doc-rules:build:docs-full-prod
|
||||
stage: post_deploy
|
||||
needs:
|
||||
- job: deploy_docs_production
|
||||
|
||||
@@ -2,41 +2,24 @@
|
||||
extends: .rules:test:host_test
|
||||
stage: host_test
|
||||
image: $ESP_ENV_IMAGE
|
||||
tags: [build, shiny]
|
||||
tags:
|
||||
- host_test
|
||||
dependencies: # set dependencies to null to avoid missing artifacts issue
|
||||
# run host_test jobs immediately, only after upload cache
|
||||
needs:
|
||||
- pipeline_variables
|
||||
- job: upload-pip-cache
|
||||
optional: true
|
||||
artifacts: false
|
||||
- job: upload-submodules-cache
|
||||
optional: true
|
||||
artifacts: false
|
||||
- job: fast_template_app
|
||||
optional: true
|
||||
artifacts: false
|
||||
artifacts:
|
||||
expire_in: 1 week
|
||||
when: always
|
||||
- pipeline_variables
|
||||
|
||||
check_public_headers:
|
||||
extends:
|
||||
- .host_test_template
|
||||
- .rules:build:check
|
||||
test_nvs_on_host:
|
||||
extends: .host_test_template
|
||||
script:
|
||||
- IDF_TARGET=esp32 python tools/ci/check_public_headers.py --jobs 4 --prefix xtensa-esp32-elf-
|
||||
- IDF_TARGET=esp32s2 python tools/ci/check_public_headers.py --jobs 4 --prefix xtensa-esp32s2-elf-
|
||||
- IDF_TARGET=esp32s3 python tools/ci/check_public_headers.py --jobs 4 --prefix xtensa-esp32s3-elf-
|
||||
- IDF_TARGET=esp32c3 python tools/ci/check_public_headers.py --jobs 4 --prefix riscv32-esp-elf-
|
||||
- IDF_TARGET=esp32c2 python tools/ci/check_public_headers.py --jobs 4 --prefix riscv32-esp-elf-
|
||||
- IDF_TARGET=esp32c6 python tools/ci/check_public_headers.py --jobs 4 --prefix riscv32-esp-elf-
|
||||
- IDF_TARGET=esp32c5 python tools/ci/check_public_headers.py --jobs 4 --prefix riscv32-esp-elf-
|
||||
- IDF_TARGET=esp32h2 python tools/ci/check_public_headers.py --jobs 4 --prefix riscv32-esp-elf-
|
||||
- IDF_TARGET=esp32p4 python tools/ci/check_public_headers.py --jobs 4 --prefix riscv32-esp-elf-
|
||||
- IDF_TARGET=esp32c61 python tools/ci/check_public_headers.py --jobs 4 --prefix riscv32-esp-elf-
|
||||
- IDF_TARGET=esp32h21 python tools/ci/check_public_headers.py --jobs 4 --prefix riscv32-esp-elf-
|
||||
- IDF_TARGET=esp32h4 python tools/ci/check_public_headers.py --jobs 4 --prefix riscv32-esp-elf-
|
||||
- cd components/nvs_flash/test_nvs_host
|
||||
- make test
|
||||
|
||||
test_nvs_coverage:
|
||||
extends:
|
||||
@@ -44,10 +27,11 @@ test_nvs_coverage:
|
||||
- .rules:labels:nvs_coverage
|
||||
artifacts:
|
||||
paths:
|
||||
- components/nvs_flash/host_test/nvs_host_test/coverage_report
|
||||
- components/nvs_flash/test_nvs_host/coverage_report
|
||||
expire_in: 1 week
|
||||
script:
|
||||
- cd components/nvs_flash/host_test/nvs_host_test
|
||||
- idf.py build coverage
|
||||
- cd components/nvs_flash/test_nvs_host
|
||||
- make coverage_report
|
||||
# the 'long' host tests take approx 11 hours on our current runners. Adding some margin here for possible CPU contention
|
||||
timeout: 18 hours
|
||||
|
||||
@@ -55,38 +39,78 @@ test_partition_table_on_host:
|
||||
extends: .host_test_template
|
||||
script:
|
||||
- cd components/partition_table/test_gen_esp32part_host
|
||||
- pytest_for_ut ./gen_esp32part_tests.py
|
||||
- ./gen_esp32part_tests.py
|
||||
|
||||
test_ldgen_on_host:
|
||||
extends: .host_test_template
|
||||
script:
|
||||
- cd tools/ldgen/test
|
||||
- export PYTHONPATH=$PYTHONPATH:..
|
||||
- pytest_for_ut .
|
||||
- python -m unittest
|
||||
variables:
|
||||
LC_ALL: C.UTF-8
|
||||
|
||||
test_reproducible_build:
|
||||
extends: .host_test_template
|
||||
script:
|
||||
- ./tools/ci/test_reproducible_build.sh
|
||||
artifacts:
|
||||
when: on_failure
|
||||
paths:
|
||||
- "**/sdkconfig"
|
||||
- "**/build*/*.bin"
|
||||
- "**/build*/*.elf"
|
||||
- "**/build*/*.map"
|
||||
- "**/build*/flasher_args.json"
|
||||
- "**/build*/*.bin"
|
||||
- "**/build*/bootloader/*.bin"
|
||||
- "**/build*/partition_table/*.bin"
|
||||
expire_in: 1 week
|
||||
|
||||
test_spiffs_on_host:
|
||||
extends: .host_test_template
|
||||
script:
|
||||
- cd components/spiffs/test_spiffsgen/
|
||||
- pytest_for_ut ./test_spiffsgen.py
|
||||
- ./test_spiffsgen.py
|
||||
|
||||
test_fatfsgen_on_host:
|
||||
extends: .host_test_template
|
||||
script:
|
||||
- cd components/fatfs/test_fatfsgen/
|
||||
- pytest_for_ut ./test_fatfsgen.py ./test_wl_fatfsgen.py ./test_fatfsparse.py
|
||||
- ./test_fatfsgen.py
|
||||
- ./test_wl_fatfsgen.py
|
||||
- ./test_fatfsparse.py
|
||||
|
||||
test_multi_heap_on_host:
|
||||
extends: .host_test_template
|
||||
script:
|
||||
- cd components/heap/test_multi_heap_host
|
||||
- ./test_all_configs.sh
|
||||
|
||||
test_certificate_bundle_on_host:
|
||||
extends: .host_test_template
|
||||
script:
|
||||
- cd components/mbedtls/esp_crt_bundle/test_gen_crt_bundle/
|
||||
- pytest_for_ut ./test_gen_crt_bundle.py
|
||||
- ./test_gen_crt_bundle.py
|
||||
|
||||
test_gdbstub_on_host:
|
||||
extends: .host_test_template
|
||||
script:
|
||||
- cd components/esp_gdbstub/test_gdbstub_host
|
||||
- make test
|
||||
|
||||
test_idf_py:
|
||||
extends: .host_test_template
|
||||
variables:
|
||||
LC_ALL: C.UTF-8
|
||||
script:
|
||||
- cd ${IDF_PATH}/tools/test_idf_py
|
||||
- ./test_idf_py.py
|
||||
- ./test_hints.py
|
||||
|
||||
# Test for create virtualenv. It must be invoked from Python, not from virtualenv.
|
||||
# Use docker image system python without any extra dependencies
|
||||
test_cli_installer:
|
||||
test_idf_tools:
|
||||
extends:
|
||||
- .host_test_template
|
||||
- .before_script:minimal
|
||||
@@ -95,37 +119,61 @@ test_cli_installer:
|
||||
paths:
|
||||
- tools/tools.new.json
|
||||
- tools/test_idf_tools/test_python_env_logs.txt
|
||||
expire_in: 1 week
|
||||
image:
|
||||
name: $ESP_ENV_IMAGE
|
||||
entrypoint: [""] # use system python3. no extra pip package installed
|
||||
script:
|
||||
# Tools must be downloaded for testing
|
||||
# We could use "idf_tools.py download all", but we don't want to install clang because of its huge size
|
||||
# cmake@version that is supported
|
||||
- python3 ${IDF_PATH}/tools/idf_tools.py download required qemu-riscv32 qemu-xtensa cmake cmake@3.22.1
|
||||
- python3 ${IDF_PATH}/tools/idf_tools.py download required qemu-riscv32 qemu-xtensa
|
||||
- cd ${IDF_PATH}/tools/test_idf_tools
|
||||
- python3 -m pip install jsonschema
|
||||
# Testing with system python3, so don't use any third-party packages
|
||||
- python3 ./test_idf_tools.py -v
|
||||
- python3 ./test_idf_tools_python_env.py
|
||||
# It runs at the end because it modifies dependencies
|
||||
- IDF_TEST_MAY_BREAK_DEPENDENCIES=1 python3 ./test_idf_tools.py -v TestSystemDependencies.test_commands_when_nodeps
|
||||
|
||||
test_efuse_table_on_host:
|
||||
.test_efuse_table_on_host_template:
|
||||
extends: .host_test_template
|
||||
parallel:
|
||||
matrix:
|
||||
- IDF_TARGET: [esp32, esp32s2, esp32c3, esp32s3, esp32c2, esp32c6, esp32h2, esp32p4, esp32c5, esp32c61]
|
||||
variables:
|
||||
IDF_TARGET: "esp32"
|
||||
artifacts:
|
||||
when: on_failure
|
||||
paths:
|
||||
- components/efuse/${IDF_TARGET}/esp_efuse_table.c
|
||||
expire_in: 1 week
|
||||
script:
|
||||
- cd ${IDF_PATH}/components/efuse/
|
||||
- ./efuse_table_gen.py -t "${IDF_TARGET}" ${IDF_PATH}/components/efuse/${IDF_TARGET}/esp_efuse_table.csv
|
||||
- git diff --exit-code -- ${IDF_TARGET}/esp_efuse_table.c || { echo 'Differences found for ${IDF_TARGET} target. Please run idf.py efuse-common-table and commit the changes.'; exit 1; }
|
||||
- cd ${IDF_PATH}/components/efuse/test_efuse_host
|
||||
- pytest_for_ut ./efuse_tests.py
|
||||
- ./efuse_tests.py
|
||||
|
||||
test_efuse_table_on_host_esp32:
|
||||
extends: .test_efuse_table_on_host_template
|
||||
|
||||
test_efuse_table_on_host_esp32s2:
|
||||
extends: .test_efuse_table_on_host_template
|
||||
variables:
|
||||
IDF_TARGET: esp32s2
|
||||
|
||||
test_efuse_table_on_host_esp32s3:
|
||||
extends: .test_efuse_table_on_host_template
|
||||
variables:
|
||||
IDF_TARGET: esp32s3
|
||||
|
||||
test_efuse_table_on_host_esp32c3:
|
||||
extends: .test_efuse_table_on_host_template
|
||||
variables:
|
||||
IDF_TARGET: esp32c3
|
||||
|
||||
test_efuse_table_on_host_esp32h2:
|
||||
extends: .test_efuse_table_on_host_template
|
||||
variables:
|
||||
IDF_TARGET: esp32h2
|
||||
|
||||
test_efuse_table_on_host_esp32c6:
|
||||
extends: .test_efuse_table_on_host_template
|
||||
variables:
|
||||
IDF_TARGET: esp32c6
|
||||
|
||||
test_logtrace_proc:
|
||||
extends: .host_test_template
|
||||
@@ -134,6 +182,7 @@ test_logtrace_proc:
|
||||
paths:
|
||||
- tools/esp_app_trace/test/logtrace/output
|
||||
- tools/esp_app_trace/test/logtrace/.coverage
|
||||
expire_in: 1 week
|
||||
script:
|
||||
- cd ${IDF_PATH}/tools/esp_app_trace/test/logtrace
|
||||
- ./test.sh
|
||||
@@ -145,58 +194,58 @@ test_sysviewtrace_proc:
|
||||
paths:
|
||||
- tools/esp_app_trace/test/sysview/output
|
||||
- tools/esp_app_trace/test/sysview/.coverage
|
||||
expire_in: 1 week
|
||||
script:
|
||||
- cd ${IDF_PATH}/tools/esp_app_trace/test/sysview
|
||||
- ./test.sh
|
||||
|
||||
test_tools:
|
||||
test_mkdfu:
|
||||
extends: .host_test_template
|
||||
variables:
|
||||
LC_ALL: C.UTF-8
|
||||
script:
|
||||
- cd ${IDF_PATH}/tools/test_mkdfu
|
||||
- ./test_mkdfu.py
|
||||
|
||||
test_autocomplete:
|
||||
extends:
|
||||
- .host_test_template
|
||||
artifacts:
|
||||
when: on_failure
|
||||
paths:
|
||||
- ${IDF_PATH}/*.out
|
||||
- ${IDF_PATH}/XUNIT_*.xml
|
||||
reports:
|
||||
junit: ${IDF_PATH}/XUNIT_*.xml
|
||||
variables:
|
||||
LC_ALL: C.UTF-8
|
||||
INSTALL_EXTRA_TOOLS: "qemu-xtensa qemu-riscv32" # for test_idf_qemu.py
|
||||
expire_in: 1 week
|
||||
script:
|
||||
- ${IDF_PATH}/tools/ci/test_autocomplete.py
|
||||
|
||||
test_detect_python:
|
||||
extends:
|
||||
- .host_test_template
|
||||
script:
|
||||
- stat=0
|
||||
- run_cmd idf-ci gitlab download-known-failure-cases-file ${KNOWN_FAILURE_CASES_FILE_NAME}
|
||||
- cd ${IDF_PATH}/tools/ci/test_autocomplete
|
||||
- run_cmd pytest --noconftest test_autocomplete.py --junitxml=${IDF_PATH}/XUNIT_AUTOCOMP.xml --ignore-result-files ${KNOWN_FAILURE_CASES_FILE_NAME} || stat=1
|
||||
- cd ${IDF_PATH}/tools/test_idf_py
|
||||
- run_cmd pytest --noconftest test_idf_py.py --junitxml=${IDF_PATH}/XUNIT_IDF_PY.xml --ignore-result-files ${KNOWN_FAILURE_CASES_FILE_NAME} || stat=1
|
||||
- run_cmd pytest --noconftest test_hints.py --junitxml=${IDF_PATH}/XUNIT_HINTS.xml --ignore-result-files ${KNOWN_FAILURE_CASES_FILE_NAME} || stat=1
|
||||
- run_cmd pytest --noconftest test_idf_qemu.py --junitxml=${IDF_PATH}/XUNIT_IDF_PY_QEMU.xml --ignore-result-files ${KNOWN_FAILURE_CASES_FILE_NAME} || stat=1
|
||||
- cd ${IDF_PATH}/tools/test_bsasm
|
||||
- run_cmd pytest --noconftest test_bsasm.py --junitxml=${IDF_PATH}/XUNIT_BSASM.xml --ignore-result-files ${KNOWN_FAILURE_CASES_FILE_NAME} || stat=1
|
||||
- cd ${IDF_PATH}/tools/test_mkdfu
|
||||
- run_cmd pytest --noconftest test_mkdfu.py --junitxml=${IDF_PATH}/XUNIT_MKDFU.xml --ignore-result-files ${KNOWN_FAILURE_CASES_FILE_NAME} || stat=1
|
||||
- cd ${IDF_PATH}/tools/test_idf_size
|
||||
- run_cmd pytest --noconftest test_idf_size.py --junitxml=${IDF_PATH}/XUNIT_IDF_SIZE.xml --ignore-result-files ${KNOWN_FAILURE_CASES_FILE_NAME} || stat=1
|
||||
- cd ${IDF_PATH}/tools/test_idf_diag
|
||||
- run_cmd pytest --noconftest test_idf_diag.py --junitxml=${IDF_PATH}/XUNIT_IDF_DIAG.xml --ignore-result-files ${KNOWN_FAILURE_CASES_FILE_NAME} || stat=1
|
||||
- cd ${IDF_PATH}
|
||||
- shellcheck -s sh tools/detect_python.sh || stat=1
|
||||
- shellcheck -s bash tools/detect_python.sh || stat=1
|
||||
- shellcheck -s dash tools/detect_python.sh || stat=1
|
||||
- shellcheck -s sh tools/detect_python.sh
|
||||
- shellcheck -s bash tools/detect_python.sh
|
||||
- shellcheck -s dash tools/detect_python.sh
|
||||
- "bash -c '. tools/detect_python.sh && echo Our Python: ${ESP_PYTHON?Python is not set}'"
|
||||
- "dash -c '. tools/detect_python.sh && echo Our Python: ${ESP_PYTHON?Python is not set}'"
|
||||
- "zsh -c '. tools/detect_python.sh && echo Our Python: ${ESP_PYTHON?Python is not set}'"
|
||||
- "fish -c 'source tools/detect_python.fish && echo Our Python: $ESP_PYTHON'"
|
||||
- exit "$stat"
|
||||
|
||||
test_split_path_by_spaces:
|
||||
extends: .host_test_template
|
||||
script:
|
||||
- cd ${IDF_PATH}/tools
|
||||
- pytest_for_ut ./split_paths_by_spaces.py
|
||||
- python -m unittest split_paths_by_spaces.py
|
||||
|
||||
test_mqtt_on_host:
|
||||
extends: .host_test_template
|
||||
script:
|
||||
- cd ${IDF_PATH}/components/mqtt/esp-mqtt/host_test
|
||||
- idf.py build
|
||||
- LSAN_OPTIONS=verbosity=1:log_threads=1 build/host_mqtt_client_test.elf
|
||||
|
||||
test_transport_on_host:
|
||||
extends: .host_test_template
|
||||
allow_failure: true # IDFCI-2781 [v5.5, v5.4] test_transport_on_host fails on ubuntu 24.04
|
||||
script:
|
||||
- cd ${IDF_PATH}/components/tcp_transport/host_test
|
||||
- idf.py build
|
||||
@@ -231,274 +280,67 @@ test_gen_soc_caps_kconfig:
|
||||
extends: .host_test_template
|
||||
script:
|
||||
- cd ${IDF_PATH}/tools/gen_soc_caps_kconfig/
|
||||
- pytest_for_ut ./test/test_gen_soc_caps_kconfig.py
|
||||
- ./test/test_gen_soc_caps_kconfig.py
|
||||
|
||||
test_idf_build_apps_load_soc_caps:
|
||||
extends: .host_test_template
|
||||
script:
|
||||
- cd tools/ci
|
||||
- pytest_for_ut ./test_soc_headers_load_in_idf_build_apps.py
|
||||
|
||||
test_nvs_gen_check:
|
||||
extends: .host_test_template
|
||||
artifacts:
|
||||
paths:
|
||||
- XUNIT_RESULT.xml
|
||||
- components/nvs_flash/nvs_partition_tool
|
||||
reports:
|
||||
junit: XUNIT_RESULT.xml
|
||||
variables:
|
||||
LC_ALL: C.UTF-8
|
||||
script:
|
||||
- cd ${IDF_PATH}/components/nvs_flash/nvs_partition_tool
|
||||
- pytest_for_ut test_nvs_gen_check.py
|
||||
|
||||
test_esp_rom:
|
||||
extends: .host_test_template
|
||||
artifacts:
|
||||
paths:
|
||||
- XUNIT_RESULT.xml
|
||||
reports:
|
||||
junit: XUNIT_RESULT.xml
|
||||
script:
|
||||
- cd ${IDF_PATH}/components/esp_rom/
|
||||
- pytest_for_ut test_esp_rom.py
|
||||
|
||||
make_sure_soc_caps_compatible_in_idf_build_apps:
|
||||
extends:
|
||||
- .host_test_template
|
||||
- .rules:dev-push
|
||||
artifacts:
|
||||
paths:
|
||||
- new.json
|
||||
- base.json
|
||||
when: always
|
||||
when: manual
|
||||
script:
|
||||
- python tools/ci/idf_build_apps_dump_soc_caps.py new.json
|
||||
- git fetch --depth=1 origin $CI_MERGE_REQUEST_DIFF_BASE_SHA
|
||||
- git checkout -f $CI_MERGE_REQUEST_DIFF_BASE_SHA
|
||||
- git checkout $CI_COMMIT_SHA -- tools/ci/idf_build_apps_dump_soc_caps.py
|
||||
- python tools/ci/idf_build_apps_dump_soc_caps.py base.json
|
||||
- diff new.json base.json
|
||||
|
||||
build_docker:
|
||||
extends:
|
||||
- .before_script:minimal
|
||||
- .rules:build:docker
|
||||
stage: host_test
|
||||
needs: []
|
||||
image: espressif/docker-builder:1
|
||||
tags: [shiny, dind]
|
||||
variables:
|
||||
DOCKER_TMP_IMAGE_NAME: "idf_tmp_image"
|
||||
script:
|
||||
- export DOCKER_BUILD_ARGS="--build-arg IDF_CLONE_URL=${CI_REPOSITORY_URL} --build-arg IDF_CLONE_BRANCH_OR_TAG=${CI_COMMIT_REF_NAME} --build-arg IDF_CHECKOUT_REF=${CI_COMMIT_TAG:-$CI_COMMIT_SHA} --build-arg IDF_CLONE_SHALLOW=1 --build-arg IDF_GITHUB_ASSETS=${INTERNAL_GITHUB_ASSETS}"
|
||||
- docker build --tag ${DOCKER_TMP_IMAGE_NAME} ${DOCKER_BUILD_ARGS} tools/docker/
|
||||
# We can't mount $PWD/examples/get-started/blink into the container, see https://gitlab.com/gitlab-org/gitlab-ce/issues/41227.
|
||||
# The workaround mentioned there works, but leaves around directories which need to be cleaned up manually.
|
||||
# Therefore, build a copy of the example located inside the container.
|
||||
- docker run --rm --workdir /opt/esp/idf/examples/get-started/blink ${DOCKER_TMP_IMAGE_NAME} idf.py build
|
||||
|
||||
############################
|
||||
# Host test with test apps #
|
||||
############################
|
||||
test_pytest_qemu:
|
||||
extends:
|
||||
- .host_test_template
|
||||
- .before_script:build
|
||||
image: $QEMU_IMAGE
|
||||
artifacts:
|
||||
when: always
|
||||
paths:
|
||||
- XUNIT_RESULT.xml
|
||||
- pytest-embedded/
|
||||
- "**/build*/*.bin"
|
||||
- pytest_embedded_log/
|
||||
reports:
|
||||
junit: XUNIT_RESULT.xml
|
||||
expire_in: 1 week
|
||||
allow_failure: true # IDFCI-1752
|
||||
parallel:
|
||||
matrix:
|
||||
- IDF_TARGET: "esp32"
|
||||
INSTALL_EXTRA_TOOLS: "qemu-xtensa"
|
||||
# Skip Clang + Xtensa tests due to bootloader size issue
|
||||
IDF_TOOLCHAIN: [gcc]
|
||||
- IDF_TARGET: "esp32c3"
|
||||
INSTALL_EXTRA_TOOLS: "qemu-riscv32"
|
||||
IDF_TOOLCHAIN: [gcc, clang]
|
||||
- IDF_TARGET: [esp32, esp32c3]
|
||||
script:
|
||||
- run_cmd idf-ci build run
|
||||
--build-system cmake
|
||||
- run_cmd python tools/ci/ci_build_apps.py . -vv
|
||||
--target $IDF_TARGET
|
||||
--pytest-apps
|
||||
-m qemu
|
||||
--modified-files ${MR_MODIFIED_FILES}
|
||||
- run_cmd idf-ci gitlab download-known-failure-cases-file ${KNOWN_FAILURE_CASES_FILE_NAME}
|
||||
--collect-app-info "list_job_${CI_JOB_NAME_SLUG}.txt"
|
||||
--modified-components ${MODIFIED_COMPONENTS}
|
||||
--modified-files ${MODIFIED_FILES}
|
||||
- retry_failed git clone $KNOWN_FAILURE_CASES_REPO known_failure_cases
|
||||
- run_cmd pytest
|
||||
--target $IDF_TARGET
|
||||
-m qemu
|
||||
--embedded-services idf,qemu
|
||||
--junitxml=XUNIT_RESULT.xml
|
||||
--ignore-result-files ${KNOWN_FAILURE_CASES_FILE_NAME}
|
||||
--qemu-extra-args \"-global driver=timer.$IDF_TARGET.timg,property=wdt_disable,value=true\"
|
||||
--ignore-result-files known_failure_cases/known_failure_cases.txt
|
||||
--app-info-filepattern \"list_job_*.txt\"
|
||||
|
||||
test_pytest_linux:
|
||||
extends:
|
||||
- .host_test_template
|
||||
- .before_script:build
|
||||
artifacts:
|
||||
paths:
|
||||
- XUNIT_RESULT.xml
|
||||
- pytest-embedded/
|
||||
- "**/build*/build_log.txt"
|
||||
reports:
|
||||
junit: XUNIT_RESULT.xml
|
||||
script:
|
||||
- run_cmd idf-ci build run
|
||||
--build-system cmake
|
||||
-p components -p examples -p tools/test_apps
|
||||
--target linux
|
||||
--only-test-related
|
||||
--modified-files ${MR_MODIFIED_FILES}
|
||||
- run_cmd idf-ci gitlab download-known-failure-cases-file ${KNOWN_FAILURE_CASES_FILE_NAME}
|
||||
- run_cmd pytest
|
||||
--target linux
|
||||
--embedded-services idf
|
||||
--junitxml=XUNIT_RESULT.xml
|
||||
--ignore-result-files ${KNOWN_FAILURE_CASES_FILE_NAME}
|
||||
|
||||
test_pytest_macos:
|
||||
extends:
|
||||
- .host_test_template
|
||||
- .brew-macos-settings
|
||||
artifacts:
|
||||
paths:
|
||||
- XUNIT_RESULT.xml
|
||||
- pytest-embedded/
|
||||
- "**/build*/build_log.txt"
|
||||
reports:
|
||||
junit: XUNIT_RESULT.xml
|
||||
script:
|
||||
- run_cmd idf-ci build run
|
||||
-p components -p examples -p tools/test_apps
|
||||
--build-system cmake
|
||||
--target linux
|
||||
--only-test-related
|
||||
-m macos
|
||||
--modified-files ${MR_MODIFIED_FILES}
|
||||
- run_cmd idf-ci gitlab download-known-failure-cases-file ${KNOWN_FAILURE_CASES_FILE_NAME}
|
||||
- run_cmd pytest
|
||||
--target linux
|
||||
-m macos
|
||||
--junitxml=XUNIT_RESULT.xml
|
||||
--ignore-result-files ${KNOWN_FAILURE_CASES_FILE_NAME}
|
||||
|
||||
######################
|
||||
# Build System Tests #
|
||||
######################
|
||||
.test_build_system_template:
|
||||
stage: host_test
|
||||
extends:
|
||||
- .build_template
|
||||
- .rules:build:check
|
||||
dependencies: # set dependencies to null to avoid missing artifacts issue
|
||||
needs:
|
||||
- job: fast_template_app
|
||||
artifacts: false
|
||||
optional: true
|
||||
artifacts:
|
||||
reports:
|
||||
junit: XUNIT_RESULT.xml
|
||||
paths:
|
||||
- XUNIT_RESULT.xml
|
||||
- test_build_system
|
||||
expire_in: 1 week
|
||||
when: always
|
||||
paths:
|
||||
- XUNIT_RESULT.xml
|
||||
- pytest_embedded_log/
|
||||
- "**/build*/build_log.txt"
|
||||
reports:
|
||||
junit: XUNIT_RESULT.xml
|
||||
expire_in: 1 week
|
||||
script:
|
||||
- ${IDF_PATH}/tools/ci/test_configure_ci_environment.sh
|
||||
- cd ${IDF_PATH}/tools/test_build_system
|
||||
- run_cmd idf-ci gitlab download-known-failure-cases-file ${KNOWN_FAILURE_CASES_FILE_NAME}
|
||||
- pytest
|
||||
--cleanup-idf-copy
|
||||
--parallel-count ${CI_NODE_TOTAL:-1}
|
||||
--parallel-index ${CI_NODE_INDEX:-1}
|
||||
--work-dir ${CI_PROJECT_DIR}/test_build_system
|
||||
--junitxml ${CI_PROJECT_DIR}/XUNIT_RESULT.xml
|
||||
--ignore-result-files ${KNOWN_FAILURE_CASES_FILE_NAME}
|
||||
|
||||
.test_build_system_minimal_cmake_template:
|
||||
extends: .test_build_system_template
|
||||
variables:
|
||||
INSTALL_EXTRA_TOOLS: cmake@3.22.1
|
||||
script:
|
||||
- MINIMAL_SUPPORTED_CMAKE_VERSION=$(echo "${INSTALL_EXTRA_TOOLS}" | sed -n 's/.*cmake@\([0-9.]*\).*/\1/p')
|
||||
- export PATH=$(echo "$PATH" | sed -E "s|/tools/cmake/[0-9.]+|/tools/cmake/${MINIMAL_SUPPORTED_CMAKE_VERSION}|")
|
||||
- ACTUAL_CMAKE_VERSION=$(cmake --version | head -n1 | awk '{print $3}')
|
||||
- |
|
||||
if [ "${ACTUAL_CMAKE_VERSION}" != "${MINIMAL_SUPPORTED_CMAKE_VERSION}" ]; then
|
||||
echo "ERROR: Wrong minimal CMake version! Detected: ${ACTUAL_CMAKE_VERSION}, but should be: ${MINIMAL_SUPPORTED_CMAKE_VERSION}"
|
||||
exit 1
|
||||
fi
|
||||
- ${IDF_PATH}/tools/ci/test_configure_ci_environment.sh
|
||||
- cd ${IDF_PATH}/tools/test_build_system
|
||||
- run_cmd idf-ci gitlab download-known-failure-cases-file ${KNOWN_FAILURE_CASES_FILE_NAME}
|
||||
- pytest
|
||||
-k cmake
|
||||
--cleanup-idf-copy
|
||||
--parallel-count ${CI_NODE_TOTAL:-1}
|
||||
--parallel-index ${CI_NODE_INDEX:-1}
|
||||
--work-dir ${CI_PROJECT_DIR}/test_build_system
|
||||
--junitxml ${CI_PROJECT_DIR}/XUNIT_RESULT.xml
|
||||
--ignore-result-files ${KNOWN_FAILURE_CASES_FILE_NAME}
|
||||
|
||||
pytest_build_system:
|
||||
extends: .test_build_system_template
|
||||
parallel: 3
|
||||
|
||||
pytest_buildv2_system:
|
||||
extends:
|
||||
- .test_build_system_template
|
||||
- .rules:labels:buildv2
|
||||
parallel: 3
|
||||
script:
|
||||
- ${IDF_PATH}/tools/ci/test_configure_ci_environment.sh
|
||||
- cd ${IDF_PATH}/tools/test_build_system
|
||||
- run_cmd idf-ci gitlab download-known-failure-cases-file ${KNOWN_FAILURE_CASES_FILE_NAME}
|
||||
- pytest
|
||||
--buildv2
|
||||
--cleanup-idf-copy
|
||||
--parallel-count ${CI_NODE_TOTAL:-1}
|
||||
--parallel-index ${CI_NODE_INDEX:-1}
|
||||
--work-dir ${CI_PROJECT_DIR}/test_build_system
|
||||
--junitxml ${CI_PROJECT_DIR}/XUNIT_RESULT.xml
|
||||
--ignore-result-files ${KNOWN_FAILURE_CASES_FILE_NAME}
|
||||
--
|
||||
test_non_default_target.py
|
||||
test_component_manager.py
|
||||
test_build.py
|
||||
test_bootloader.py
|
||||
test_git.py
|
||||
test_kconfig.py
|
||||
test_partition.py
|
||||
test_reproducible_build.py
|
||||
test_sdkconfig.py
|
||||
test_versions.py
|
||||
test_common.py
|
||||
test_components.py
|
||||
test_cmake.py
|
||||
test_idf_extension.py
|
||||
test_rebuild.py
|
||||
|
||||
pytest_build_system_macos:
|
||||
extends:
|
||||
- .test_build_system_template
|
||||
- .brew-macos-settings
|
||||
- .rules:build:macos
|
||||
parallel: 3
|
||||
|
||||
pytest_build_system_minimal_cmake:
|
||||
extends: .test_build_system_minimal_cmake_template
|
||||
|
||||
pytest_build_system_macos_minimal_cmake:
|
||||
extends:
|
||||
- .test_build_system_minimal_cmake_template
|
||||
- .brew-macos-settings
|
||||
- .rules:build:macos
|
||||
variables:
|
||||
INSTALL_EXTRA_TOOLS: ninja cmake@3.22.1
|
||||
- run_cmd python tools/ci/ci_build_apps.py components examples tools/test_apps -vv
|
||||
--target linux
|
||||
--pytest-apps
|
||||
-m host_test
|
||||
--collect-app-info "list_job_${CI_JOB_NAME_SLUG}.txt"
|
||||
--modified-components ${MODIFIED_COMPONENTS}
|
||||
--modified-files ${MODIFIED_FILES}
|
||||
- retry_failed git clone $KNOWN_FAILURE_CASES_REPO known_failure_cases
|
||||
- run_cmd pytest
|
||||
--target linux
|
||||
-m host_test
|
||||
--junitxml=XUNIT_RESULT.xml
|
||||
--ignore-result-files known_failure_cases/known_failure_cases.txt
|
||||
--app-info-filepattern \"list_job_*.txt\"
|
||||
|
||||
@@ -34,7 +34,8 @@ gen_integration_pipeline:
|
||||
image: ${CI_INTEGRATION_ASSIGN_ENV}
|
||||
stage: assign_test
|
||||
cache: []
|
||||
tags: [fast_run, shiny]
|
||||
tags:
|
||||
- assign_test
|
||||
variables:
|
||||
SUBMODULES_TO_FETCH: "none"
|
||||
GIT_LFS_SKIP_SMUDGE: 1
|
||||
@@ -46,7 +47,6 @@ gen_integration_pipeline:
|
||||
paths:
|
||||
- idf-integration-ci/child_pipeline/
|
||||
expire_in: 2 weeks
|
||||
when: always
|
||||
script:
|
||||
- add_gitlab_ssh_keys
|
||||
- retry_failed git clone ${CI_GEN_INTEGRATION_PIPELINE_REPO} idf-integration-ci
|
||||
@@ -60,9 +60,6 @@ child_integration_test_pipeline:
|
||||
stage: assign_test
|
||||
needs:
|
||||
- gen_integration_pipeline
|
||||
variables:
|
||||
IDF_S3_SERVER: $IDF_S3_NEW_SERVER
|
||||
IDF_S3_ACCESS_KEY: $IDF_S3_NEW_ACCESS_KEY
|
||||
trigger:
|
||||
include:
|
||||
- artifact: idf-integration-ci/child_pipeline/pipeline.yml
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
.post_deploy_template:
|
||||
stage: post_deploy
|
||||
image: $ESP_ENV_IMAGE
|
||||
|
||||
generate_failed_jobs_report:
|
||||
extends:
|
||||
- .post_deploy_template
|
||||
tags: [build, shiny]
|
||||
when: always
|
||||
dependencies: # Do not download artifacts from the previous stages
|
||||
needs:
|
||||
- pipeline_variables
|
||||
artifacts:
|
||||
expire_in: 2 week
|
||||
when: always
|
||||
paths:
|
||||
- job_report.html
|
||||
script:
|
||||
- python tools/ci/dynamic_pipelines/scripts/generate_report.py --report-type job
|
||||
|
||||
sync_support_status:
|
||||
extends:
|
||||
- .post_deploy_template
|
||||
- .rules:master:push
|
||||
tags: [ brew, github_sync ]
|
||||
needs:
|
||||
- push_to_github
|
||||
cache: []
|
||||
before_script: []
|
||||
script:
|
||||
- curl --fail --request POST --form token="$IDF_STATUS_TRIG_TOKEN" --form ref="$IDF_STATUS_BRANCH" --form "variables[UPLOAD_TO_S3]=true" "$IDF_STATUS_TRIG_URL"
|
||||
environment:
|
||||
name: sync_support_status_production
|
||||
deployment_tier: production
|
||||
@@ -1,22 +1,55 @@
|
||||
.pre_check_template:
|
||||
stage: pre_check
|
||||
image: $ESP_ENV_IMAGE
|
||||
tags: [build, shiny]
|
||||
dependencies: # set dependencies to null to avoid missing artifacts issue
|
||||
tags:
|
||||
- host_test
|
||||
dependencies: []
|
||||
|
||||
check_pre_commit:
|
||||
extends:
|
||||
- .pre_check_template
|
||||
- .before_script:minimal
|
||||
image: $PRE_COMMIT_IMAGE
|
||||
needs:
|
||||
- pipeline_variables
|
||||
script:
|
||||
- fetch_submodules
|
||||
- pre-commit run --files $MODIFIED_FILES
|
||||
|
||||
check_MR_style_dangerjs:
|
||||
extends:
|
||||
- .pre_check_template
|
||||
image: node:18.15.0-alpine3.16
|
||||
variables:
|
||||
DANGER_GITLAB_API_TOKEN: ${ESPCI_TOKEN}
|
||||
DANGER_GITLAB_HOST: ${GITLAB_HTTP_SERVER}
|
||||
DANGER_GITLAB_API_BASE_URL: ${GITLAB_HTTP_SERVER}/api/v4
|
||||
DANGER_JIRA_USER: ${DANGER_JIRA_USER}
|
||||
DANGER_JIRA_PASSWORD: ${DANGER_JIRA_PASSWORD}
|
||||
cache:
|
||||
# pull only for most of the use cases since it's cache dir.
|
||||
# Only set "push" policy for "upload_cache" stage jobs
|
||||
key:
|
||||
files:
|
||||
- .gitlab/dangerjs/package-lock.json
|
||||
paths:
|
||||
- .gitlab/dangerjs/node_modules/
|
||||
policy: pull
|
||||
before_script:
|
||||
- cd .gitlab/dangerjs
|
||||
- npm install --no-progress --no-update-notifier # Install danger dependencies
|
||||
script:
|
||||
- npx danger ci --failOnErrors -v
|
||||
rules:
|
||||
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
|
||||
|
||||
check_version:
|
||||
# Don't run this for feature/bugfix branches, so that it is possible to modify
|
||||
# esp_idf_version.h in a branch before tagging the next version.
|
||||
extends:
|
||||
- .pre_check_template
|
||||
- .rules:protected:check
|
||||
tags: [ brew, github_sync ]
|
||||
variables:
|
||||
# need a full clone to get the latest tag
|
||||
# the --shallow-since=$(git log -1 --format=%as $LATEST_GIT_TAG) option is not accurate
|
||||
GIT_STRATEGY: fetch
|
||||
SUBMODULES_TO_FETCH: "none"
|
||||
GIT_DEPTH: 0
|
||||
- .rules:protected
|
||||
- .before_script:fetch:git_describe
|
||||
script:
|
||||
- export IDF_PATH=$PWD
|
||||
- tools/ci/check_idf_version.sh
|
||||
@@ -24,8 +57,7 @@ check_version:
|
||||
check_api_usage:
|
||||
extends: .pre_check_template
|
||||
script:
|
||||
- python -m pip install ast-grep-cli # use ast-grep to describe customized lint rules
|
||||
- ast-grep scan
|
||||
- tools/ci/check_examples_rom_header.sh
|
||||
- tools/ci/check_api_violation.sh
|
||||
- tools/ci/check_examples_extra_component_dirs.sh
|
||||
|
||||
@@ -43,9 +75,6 @@ check_blobs:
|
||||
- IDF_TARGET=esp32c2 $IDF_PATH/components/esp_wifi/test_md5/test_md5.sh
|
||||
- IDF_TARGET=esp32c3 $IDF_PATH/components/esp_wifi/test_md5/test_md5.sh
|
||||
- IDF_TARGET=esp32c6 $IDF_PATH/components/esp_wifi/test_md5/test_md5.sh
|
||||
- IDF_TARGET=esp32c5 $IDF_PATH/components/esp_wifi/test_md5/test_md5.sh
|
||||
- IDF_TARGET=esp32c61 $IDF_PATH/components/esp_wifi/test_md5/test_md5.sh
|
||||
- IDF_TARGET=esp32_host $IDF_PATH/components/esp_wifi/test_md5/test_md5.sh
|
||||
# Check if Coexistence library header files match between IDF and the version used when compiling the libraries
|
||||
- IDF_TARGET=esp32 $IDF_PATH/components/esp_coex/test_md5/test_md5.sh
|
||||
- IDF_TARGET=esp32s2 $IDF_PATH/components/esp_coex/test_md5/test_md5.sh
|
||||
@@ -54,11 +83,23 @@ check_blobs:
|
||||
- IDF_TARGET=esp32c3 $IDF_PATH/components/esp_coex/test_md5/test_md5.sh
|
||||
- IDF_TARGET=esp32c6 $IDF_PATH/components/esp_coex/test_md5/test_md5.sh
|
||||
- IDF_TARGET=esp32h2 $IDF_PATH/components/esp_coex/test_md5/test_md5.sh
|
||||
- IDF_TARGET=esp32c5 $IDF_PATH/components/esp_coex/test_md5/test_md5.sh
|
||||
- IDF_TARGET=esp32c61 $IDF_PATH/components/esp_coex/test_md5/test_md5.sh
|
||||
# Check if Wi-Fi, PHY, BT blobs contain references to specific symbols
|
||||
- bash $IDF_PATH/tools/ci/check_blobs.sh
|
||||
|
||||
check_public_headers:
|
||||
extends:
|
||||
- .pre_check_template
|
||||
- .rules:build:check
|
||||
script:
|
||||
- IDF_TARGET=esp32 python tools/ci/check_public_headers.py --jobs 4 --prefix xtensa-esp32-elf-
|
||||
- IDF_TARGET=esp32s2 python tools/ci/check_public_headers.py --jobs 4 --prefix xtensa-esp32s2-elf-
|
||||
- IDF_TARGET=esp32s3 python tools/ci/check_public_headers.py --jobs 4 --prefix xtensa-esp32s3-elf-
|
||||
- IDF_TARGET=esp32c3 python tools/ci/check_public_headers.py --jobs 4 --prefix riscv32-esp-elf-
|
||||
- IDF_TARGET=esp32c2 python tools/ci/check_public_headers.py --jobs 4 --prefix riscv32-esp-elf-
|
||||
- IDF_TARGET=esp32c6 python tools/ci/check_public_headers.py --jobs 4 --prefix riscv32-esp-elf-
|
||||
- IDF_TARGET=esp32h2 python tools/ci/check_public_headers.py --jobs 4 --prefix riscv32-esp-elf-
|
||||
- IDF_TARGET=esp32p4 python tools/ci/check_public_headers.py --jobs 4 --prefix riscv32-esp-elf-
|
||||
|
||||
check_chip_support_components:
|
||||
extends:
|
||||
- .pre_check_template
|
||||
@@ -71,7 +112,7 @@ check_chip_support_components:
|
||||
expire_in: 1 week
|
||||
script:
|
||||
- python tools/ci/check_soc_headers_leak.py
|
||||
- find ${IDF_PATH}/components/soc/**/include/soc/ ${IDF_PATH}/components/soc/**/register/soc/ -name "*_struct.h" -print0 | xargs -0 -n1 ./tools/ci/check_soc_struct_headers.py
|
||||
- find ${IDF_PATH}/components/soc/*/include/soc/ -name "*_struct.h" -print0 | xargs -0 -n1 ./tools/ci/check_soc_struct_headers.py
|
||||
- tools/ci/check_esp_memory_utils_headers.sh
|
||||
|
||||
check_esp_err_to_name:
|
||||
@@ -102,113 +143,51 @@ check_version_tag:
|
||||
extends:
|
||||
- .pre_check_template
|
||||
- .rules:tag:release
|
||||
tags: [ brew, github_sync ]
|
||||
variables:
|
||||
# need a full clone to get the latest tag
|
||||
# the --shallow-since=$(git log -1 --format=%as $LATEST_GIT_TAG) option is not accurate
|
||||
GIT_STRATEGY: fetch
|
||||
SUBMODULES_TO_FETCH: "none"
|
||||
GIT_DEPTH: 0
|
||||
- .before_script:fetch:git_describe
|
||||
script:
|
||||
- (git cat-file -t $CI_COMMIT_REF_NAME | grep tag) || (echo "ESP-IDF versions must be annotated tags." && exit 1)
|
||||
|
||||
check_artifacts_expire_time:
|
||||
extends: .pre_check_template
|
||||
script:
|
||||
# check if we have set expire time for all artifacts
|
||||
- python tools/ci/check_artifacts_expire_time.py
|
||||
|
||||
check_test_scripts_build_test_rules:
|
||||
extends:
|
||||
- .pre_check_template
|
||||
- .before_script:build
|
||||
script:
|
||||
# requires basic pytest dependencies
|
||||
# required pytest related packages
|
||||
- run_cmd bash install.sh --enable-pytest
|
||||
- python tools/ci/check_build_test_rules.py check-test-scripts examples/ tools/test_apps components
|
||||
|
||||
check_configure_ci_environment_parsing:
|
||||
extends:
|
||||
- .pre_check_template
|
||||
- .before_script:build
|
||||
- .rules:build
|
||||
script:
|
||||
- cd tools/ci
|
||||
- python -m unittest ci_build_apps.py
|
||||
|
||||
pipeline_variables:
|
||||
extends:
|
||||
- .pre_check_template
|
||||
- .before_script:fetch:git_diff
|
||||
tags: [fast_run, shiny]
|
||||
tags:
|
||||
- build
|
||||
script:
|
||||
# MODIFIED_FILES is a list of files that changed, could be used everywhere
|
||||
- MODIFIED_FILES=$(echo "$GIT_DIFF_OUTPUT" | xargs)
|
||||
- echo "MODIFIED_FILES=$MODIFIED_FILES" >> pipeline.env
|
||||
# MR_MODIFIED_FILES and MR_MODIFIED_COMPONENTS are semicolon separated lists that is used in MR only
|
||||
# for non MR pipeline, these are empty lists
|
||||
- echo "MODIFIED_COMPONENTS=$(run_cmd python tools/ci/ci_get_mr_info.py components --modified-files $MODIFIED_FILES | xargs)" >> pipeline.env
|
||||
- |
|
||||
if [ -z "$CI_MERGE_REQUEST_IID" ]; then
|
||||
echo "MR_MODIFIED_FILES=\"\"" >> pipeline.env
|
||||
echo "MR_MODIFIED_COMPONENTS=\"\"" >> pipeline.env
|
||||
else
|
||||
MR_MODIFIED_FILES=$(echo "$GIT_DIFF_OUTPUT" | tr '\n' ';')
|
||||
echo "MR_MODIFIED_FILES=\"$MR_MODIFIED_FILES\"" >> pipeline.env
|
||||
|
||||
MR_MODIFIED_COMPONENTS=$(run_cmd python tools/ci/ci_get_mr_info.py components --modified-files $MODIFIED_FILES | tr '\n' ';')
|
||||
echo "MR_MODIFIED_COMPONENTS=\"$MR_MODIFIED_COMPONENTS\"" >> pipeline.env
|
||||
if echo "$CI_MERGE_REQUEST_LABELS" | egrep "(^|,)BUILD_AND_TEST_ALL_APPS(,|$)"; then
|
||||
echo "BUILD_AND_TEST_ALL_APPS=1" >> pipeline.env
|
||||
fi
|
||||
- |
|
||||
if [ -n "$PIPELINE_COMMIT_SHA" ]; then
|
||||
echo "PIPELINE_COMMIT_SHA=$PIPELINE_COMMIT_SHA" >> pipeline.env
|
||||
fi
|
||||
- echo "OOCD_DISTRO_URL_ARMHF=$OOCD_DISTRO_URL_ARMHF" >> pipeline.env
|
||||
- echo "OOCD_DISTRO_URL_ARM64=$OOCD_DISTRO_URL_ARM64" >> pipeline.env
|
||||
- run_cmd idf-ci gitlab pipeline-variables >> pipeline.env
|
||||
- cat pipeline.env
|
||||
- run_cmd idf-ci gitlab upload-artifacts --type env
|
||||
- python tools/ci/artifacts_handler.py upload --type modified_files_and_components_report
|
||||
artifacts:
|
||||
reports:
|
||||
dotenv: pipeline.env
|
||||
paths:
|
||||
- pipeline.env
|
||||
expire_in: 1 week
|
||||
when: always
|
||||
|
||||
baseline_manifest_sha:
|
||||
extends:
|
||||
- .pre_check_template
|
||||
- .rules:dev-push
|
||||
tags: [fast_run, shiny]
|
||||
script:
|
||||
- |
|
||||
# merged results pipelines, by default
|
||||
# diff between target-branch-head and merged-result-head
|
||||
if [ -n "$CI_MERGE_REQUEST_TARGET_BRANCH_SHA" ]; then
|
||||
git fetch origin $CI_MERGE_REQUEST_TARGET_BRANCH_SHA --depth=1
|
||||
git checkout FETCH_HEAD
|
||||
idf-build-apps dump-manifest-sha \
|
||||
--manifest-files $(find . -name ".build-test-rules.yml" | xargs) \
|
||||
--output .manifest_sha
|
||||
# merge request pipelines, when the mr got conflicts
|
||||
# diff between diff-base-sha and merge-request-head
|
||||
elif [ -n "$CI_MERGE_REQUEST_DIFF_BASE_SHA" ]; then
|
||||
git fetch origin $CI_MERGE_REQUEST_DIFF_BASE_SHA --depth=1
|
||||
git checkout FETCH_HEAD
|
||||
idf-build-apps dump-manifest-sha \
|
||||
--manifest-files $(find . -name ".build-test-rules.yml" | xargs) \
|
||||
--output .manifest_sha
|
||||
# other pipelines, like the protected branches pipelines
|
||||
# not triggered in this job
|
||||
fi
|
||||
artifacts:
|
||||
paths:
|
||||
- .manifest_sha
|
||||
expire_in: 1 week
|
||||
when: always
|
||||
|
||||
gcc_static_analyzer:
|
||||
extends:
|
||||
- .pre_check_template
|
||||
- .rules:build
|
||||
variables:
|
||||
CI_CCACHE_DISABLE: 1
|
||||
ANALYZING_APP: "examples/get-started/hello_world"
|
||||
script:
|
||||
- echo "CONFIG_COMPILER_STATIC_ANALYZER=y" >> ${ANALYZING_APP}/sdkconfig.defaults
|
||||
- idf-build-apps build -p ${ANALYZING_APP}
|
||||
|
||||
retry_failed_jobs:
|
||||
extends:
|
||||
- .pre_check_template
|
||||
- .rules:dev-push
|
||||
tags: [shiny, fast_run]
|
||||
allow_failure: true
|
||||
script:
|
||||
- echo "Retrieving and retrying all failed jobs for the pipeline..."
|
||||
- python tools/ci/python_packages/gitlab_api.py retry_failed_jobs $CI_MERGE_REQUEST_PROJECT_ID --pipeline_id $CI_PIPELINE_ID
|
||||
when: manual
|
||||
expire_in: 4 days
|
||||
|
||||
@@ -1,58 +0,0 @@
|
||||
.check_pre_commit_template:
|
||||
extends:
|
||||
- .before_script:minimal
|
||||
stage: pre_check
|
||||
image: "${CI_DOCKER_REGISTRY}/esp-idf-pre-commit:2"
|
||||
tags: [pre-commit]
|
||||
variables:
|
||||
# Both shiny and brew runners can pick this job
|
||||
GIT_STRATEGY: fetch
|
||||
GIT_DEPTH: 1
|
||||
SUBMODULES_TO_FETCH: "all"
|
||||
needs:
|
||||
- pipeline_variables
|
||||
script:
|
||||
- fetch_submodules
|
||||
- pre-commit run --files $MODIFIED_FILES
|
||||
- pre-commit run --hook-stage post-commit validate-sbom-manifest
|
||||
|
||||
check_pre_commit:
|
||||
extends:
|
||||
- .check_pre_commit_template
|
||||
rules:
|
||||
- if: '($CI_COMMIT_REF_NAME == "master" || $CI_COMMIT_BRANCH =~ /^release\/v/) && $CI_PIPELINE_SOURCE == "push"'
|
||||
when: never
|
||||
- when: on_success
|
||||
cache:
|
||||
- key: submodule-cache-${LATEST_GIT_TAG}
|
||||
paths:
|
||||
- .cache/submodule_archives
|
||||
policy: pull
|
||||
|
||||
check_powershell:
|
||||
extends:
|
||||
- .before_script:minimal
|
||||
stage: pre_check
|
||||
image: docker:latest
|
||||
services:
|
||||
- docker:dind
|
||||
tags:
|
||||
- dind
|
||||
- amd64
|
||||
- brew # faster "apk add"
|
||||
needs:
|
||||
- pipeline_variables
|
||||
variables:
|
||||
# brew runners always use fetch
|
||||
GIT_STRATEGY: fetch
|
||||
GIT_DEPTH: 1
|
||||
SUBMODULES_TO_FETCH: "none"
|
||||
rules:
|
||||
- changes:
|
||||
- "*.ps1"
|
||||
- ".gitlab/ci/pre_commit.yml"
|
||||
script:
|
||||
- apk add python3
|
||||
- apk add py3-pip
|
||||
- pip install pre-commit --break-system-packages
|
||||
- pre-commit run --hook-stage manual check-powershell-scripts --files $MODIFIED_FILES
|
||||
2375
.gitlab/ci/rules.yml
2375
.gitlab/ci/rules.yml
File diff suppressed because it is too large
Load Diff
@@ -6,8 +6,8 @@ clang_tidy_check:
|
||||
artifacts:
|
||||
paths:
|
||||
- clang_tidy_reports/
|
||||
expire_in: 1 week
|
||||
when: always
|
||||
expire_in: 1 day
|
||||
variables:
|
||||
IDF_TOOLCHAIN: clang
|
||||
script:
|
||||
@@ -16,84 +16,102 @@ clang_tidy_check:
|
||||
--limit-file tools/ci/static-analysis-rules.yml
|
||||
--xtensa-include-dir
|
||||
|
||||
check_pylint:
|
||||
extends:
|
||||
- .pre_check_template
|
||||
- .rules:patterns:python-files
|
||||
needs:
|
||||
- pipeline_variables
|
||||
artifacts:
|
||||
when: always
|
||||
reports:
|
||||
codequality: pylint.json
|
||||
expire_in: 1 week
|
||||
script:
|
||||
- |
|
||||
if [ -n "$CI_MERGE_REQUEST_IID" ]; then
|
||||
export files=$(echo "$GIT_DIFF_OUTPUT" | grep ".py$" | xargs);
|
||||
else
|
||||
export files=$(git ls-files "*.py" | xargs);
|
||||
fi
|
||||
- if [ -z "$files" ]; then echo "No python files found"; exit 0; fi
|
||||
- run_cmd pylint --exit-zero --load-plugins=pylint_gitlab --output-format=gitlab-codeclimate:pylint.json $files
|
||||
|
||||
# build stage
|
||||
# Sonarqube related jobs put here for this reason:
|
||||
# Here we have two jobs. code_quality_check and code_quality_report.
|
||||
#
|
||||
## build stage
|
||||
## Sonarqube related jobs put here for this reason:
|
||||
## Here we have two jobs. code_quality_check and code_quality_report.
|
||||
##
|
||||
## code_quality_check will analyze the code changes between your MR and
|
||||
## code repo stored in sonarqube server. The analysis result is only shown in
|
||||
## the comments under this MR and won't be transferred to the server.
|
||||
##
|
||||
## code_quality_report will analyze and transfer both of the newly added code
|
||||
## and the analysis result to the server.
|
||||
##
|
||||
## Put in the front to ensure that the newly merged code can be stored in
|
||||
## sonarqube server ASAP, in order to avoid reporting unrelated code issues
|
||||
#.sonar_scan_template:
|
||||
# stage: build
|
||||
# extends: .pre_check_template
|
||||
# # full clone since this image does not support fetch --shallow-since-cutoff
|
||||
# # shiny runners are used for full clone
|
||||
# tags: [build, shiny]
|
||||
# image: $SONARQUBE_SCANNER_IMAGE
|
||||
# before_script:
|
||||
# - source tools/ci/utils.sh
|
||||
# - export PYTHONPATH="$CI_PROJECT_DIR/tools:$CI_PROJECT_DIR/tools/ci/python_packages:$PYTHONPATH"
|
||||
# - fetch_submodules
|
||||
# # Exclude the submodules, all paths ends with /**
|
||||
# - submodules=$(get_all_submodules)
|
||||
# # get all exclude paths specified in tools/ci/sonar_exclude_list.txt | ignore lines start with # | xargs | replace all <space> to <comma>
|
||||
# - custom_excludes=$(cat $CI_PROJECT_DIR/tools/ci/sonar_exclude_list.txt | grep -v '^#' | xargs | sed -e 's/ /,/g')
|
||||
# # Exclude the report dir as well
|
||||
# - export EXCLUSIONS="$custom_excludes,$submodules"
|
||||
# - export SONAR_SCANNER_OPTS="-Xmx2048m"
|
||||
# variables:
|
||||
# GIT_DEPTH: 0
|
||||
# REPORT_PATTERN: clang_tidy_reports/**/*.txt
|
||||
# artifacts:
|
||||
# paths:
|
||||
# - $REPORT_PATTERN
|
||||
# expire_in: 1 week
|
||||
# when: always
|
||||
# dependencies: # Here is not a hard dependency relationship, could be skipped when only python files changed. so we do not use "needs" here.
|
||||
# - clang_tidy_check
|
||||
# code_quality_check will analyze the code changes between your MR and
|
||||
# code repo stored in sonarqube server. The analysis result is only shown in
|
||||
# the comments under this MR and won't be transferred to the server.
|
||||
#
|
||||
#code_quality_check:
|
||||
# extends:
|
||||
# - .sonar_scan_template
|
||||
# - .rules:patterns:static-code-analysis-preview
|
||||
# allow_failure: true # it's using exit code to indicate the code analysis result,
|
||||
# # we don't want to block ci when critical issues founded
|
||||
# script:
|
||||
# - export CI_MERGE_REQUEST_COMMITS=$(python ${CI_PROJECT_DIR}/tools/ci/ci_get_mr_info.py commits --src-branch ${CI_COMMIT_REF_NAME} | tr '\n' ',')
|
||||
# # test if this branch have merge request, if not, exit 0
|
||||
# - test -n "$CI_MERGE_REQUEST_IID" || exit 0
|
||||
# - test -n "$CI_MERGE_REQUEST_COMMITS" || exit 0
|
||||
# - sonar-scanner
|
||||
# -Dsonar.analysis.mode=preview
|
||||
# -Dsonar.branch.name=$CI_MERGE_REQUEST_SOURCE_BRANCH_NAME
|
||||
# -Dsonar.cxx.clangtidy.reportPath=$REPORT_PATTERN
|
||||
# -Dsonar.exclusions=$EXCLUSIONS
|
||||
# -Dsonar.gitlab.ci_merge_request_iid=$CI_MERGE_REQUEST_IID
|
||||
# -Dsonar.gitlab.commit_sha=$CI_MERGE_REQUEST_COMMITS
|
||||
# -Dsonar.gitlab.merge_request_discussion=true
|
||||
# -Dsonar.gitlab.ref_name=$CI_MERGE_REQUEST_SOURCE_BRANCH_NAME
|
||||
# -Dsonar.host.url=$SONAR_HOST_URL
|
||||
# -Dsonar.login=$SONAR_LOGIN
|
||||
# code_quality_report will analyze and transfer both of the newly added code
|
||||
# and the analysis result to the server.
|
||||
#
|
||||
#code_quality_report:
|
||||
# extends:
|
||||
# - .sonar_scan_template
|
||||
# - .rules:protected:check
|
||||
# allow_failure: true # it's using exit code to indicate the code analysis result,
|
||||
# # we don't want to block ci when critical issues founded
|
||||
# script:
|
||||
# - sonar-scanner
|
||||
# -Dsonar.branch.name=$CI_COMMIT_REF_NAME
|
||||
# -Dsonar.cxx.clangtidy.reportPath=$REPORT_PATTERN
|
||||
# -Dsonar.exclusions=$EXCLUSIONS
|
||||
# -Dsonar.gitlab.commit_sha=$PIPELINE_COMMIT_SHA
|
||||
# -Dsonar.gitlab.ref_name=$CI_COMMIT_REF_NAME
|
||||
# -Dsonar.host.url=$SONAR_HOST_URL
|
||||
# -Dsonar.login=$SONAR_LOGIN
|
||||
# Put in the front to ensure that the newly merged code can be stored in
|
||||
# sonarqube server ASAP, in order to avoid reporting unrelated code issues
|
||||
.sonar_scan_template:
|
||||
stage: build
|
||||
extends: .pre_check_template
|
||||
image:
|
||||
name: $SONARQUBE_SCANNER_IMAGE
|
||||
before_script:
|
||||
- source tools/ci/utils.sh
|
||||
- export PYTHONPATH="$CI_PROJECT_DIR/tools:$CI_PROJECT_DIR/tools/ci/python_packages:$PYTHONPATH"
|
||||
- fetch_submodules
|
||||
# Exclude the submodules, all paths ends with /**
|
||||
- submodules=$(get_all_submodules)
|
||||
# get all exclude paths specified in tools/ci/sonar_exclude_list.txt | ignore lines start with # | xargs | replace all <space> to <comma>
|
||||
- custom_excludes=$(cat $CI_PROJECT_DIR/tools/ci/sonar_exclude_list.txt | grep -v '^#' | xargs | sed -e 's/ /,/g')
|
||||
# Exclude the report dir as well
|
||||
- export EXCLUSIONS="$custom_excludes,$submodules"
|
||||
- export SONAR_SCANNER_OPTS="-Xmx2048m"
|
||||
variables:
|
||||
GIT_DEPTH: 0
|
||||
REPORT_PATTERN: clang_tidy_reports/*.txt
|
||||
artifacts:
|
||||
when: always
|
||||
paths:
|
||||
- $REPORT_PATTERN
|
||||
expire_in: 1 week
|
||||
dependencies: # Here is not a hard dependency relationship, could be skipped when only python files changed. so we do not use "needs" here.
|
||||
- clang_tidy_check
|
||||
|
||||
code_quality_check:
|
||||
extends:
|
||||
- .sonar_scan_template
|
||||
- .rules:patterns:static-code-analysis-preview
|
||||
allow_failure: true # since now it's using exit code to indicate the code analysis result,
|
||||
# we don't want to block ci when critical issues founded
|
||||
script:
|
||||
- export CI_MERGE_REQUEST_COMMITS=$(python ${CI_PROJECT_DIR}/tools/ci/ci_get_mr_info.py commits --src-branch ${CI_COMMIT_REF_NAME} | tr '\n' ',')
|
||||
# test if this branch have merge request, if not, exit 0
|
||||
- test -n "$CI_MERGE_REQUEST_IID" || exit 0
|
||||
- test -n "$CI_MERGE_REQUEST_COMMITS" || exit 0
|
||||
- sonar-scanner
|
||||
-Dsonar.analysis.mode=preview
|
||||
-Dsonar.branch.name=$CI_MERGE_REQUEST_SOURCE_BRANCH_NAME
|
||||
-Dsonar.cxx.clangtidy.reportPath=$REPORT_PATTERN
|
||||
-Dsonar.exclusions=$EXCLUSIONS
|
||||
-Dsonar.gitlab.ci_merge_request_iid=$CI_MERGE_REQUEST_IID
|
||||
-Dsonar.gitlab.commit_sha=$CI_MERGE_REQUEST_COMMITS
|
||||
-Dsonar.gitlab.merge_request_discussion=true
|
||||
-Dsonar.gitlab.ref_name=$CI_MERGE_REQUEST_SOURCE_BRANCH_NAME
|
||||
-Dsonar.host.url=$SONAR_HOST_URL
|
||||
-Dsonar.login=$SONAR_LOGIN
|
||||
|
||||
code_quality_report:
|
||||
extends:
|
||||
- .sonar_scan_template
|
||||
- .rules:protected
|
||||
allow_failure: true # since now it's using exit code to indicate the code analysis result,
|
||||
# we don't want to block ci when critical issues founded
|
||||
script:
|
||||
- sonar-scanner
|
||||
-Dsonar.branch.name=$CI_COMMIT_REF_NAME
|
||||
-Dsonar.cxx.clangtidy.reportPath=$REPORT_PATTERN
|
||||
-Dsonar.exclusions=$EXCLUSIONS
|
||||
-Dsonar.gitlab.commit_sha=$PIPELINE_COMMIT_SHA
|
||||
-Dsonar.gitlab.ref_name=$CI_COMMIT_REF_NAME
|
||||
-Dsonar.host.url=$SONAR_HOST_URL
|
||||
-Dsonar.login=$SONAR_LOGIN
|
||||
|
||||
1519
.gitlab/ci/target-test.yml
Normal file
1519
.gitlab/ci/target-test.yml
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,182 +0,0 @@
|
||||
# Host tests
|
||||
.host_test_win_template:
|
||||
extends: .rules:test:host_test
|
||||
stage: host_test
|
||||
image: $ESP_ENV_IMAGE
|
||||
tags: [windows-build, brew]
|
||||
dependencies: # set dependencies to null to avoid missing artifacts issue
|
||||
# run host_test jobs immediately, only after upload cache
|
||||
needs:
|
||||
- job: upload-pip-cache
|
||||
optional: true
|
||||
artifacts: false
|
||||
- job: upload-submodules-cache
|
||||
optional: true
|
||||
artifacts: false
|
||||
variables:
|
||||
GIT_STRATEGY: fetch # use brew local mirror first
|
||||
before_script:
|
||||
- if ($env:IDF_DONT_USE_MIRRORS) {
|
||||
$env:IDF_MIRROR_PREFIX_MAP = ""
|
||||
}
|
||||
after_script: []
|
||||
|
||||
test_cli_installer_win:
|
||||
rules:
|
||||
- when: never
|
||||
extends:
|
||||
- .host_test_win_template
|
||||
- .rules:labels:windows_pytest_build_system
|
||||
allow_failure: true
|
||||
artifacts:
|
||||
when: on_failure
|
||||
paths:
|
||||
- tools/tools.new.json
|
||||
- tools/test_idf_tools/test_python_env_logs.txt
|
||||
expire_in: 1 week
|
||||
variables:
|
||||
IDF_PATH: "$CI_PROJECT_DIR"
|
||||
timeout: 3h
|
||||
script:
|
||||
# Tools must be downloaded for testing
|
||||
# cmake@version that is supported
|
||||
- python ${IDF_PATH}\tools\idf_tools.py download required qemu-riscv32 qemu-xtensa cmake cmake@3.22.1
|
||||
- cd ${IDF_PATH}\tools\test_idf_tools
|
||||
- python -m pip install jsonschema
|
||||
- python .\test_idf_tools.py
|
||||
- python .\test_idf_tools_python_env.py
|
||||
|
||||
test_tools_win:
|
||||
extends:
|
||||
- .host_test_win_template
|
||||
- .rules:labels:windows_pytest_build_system
|
||||
parallel: 4
|
||||
artifacts:
|
||||
paths:
|
||||
- ${IDF_PATH}/*.out
|
||||
- ${IDF_PATH}/XUNIT_*.xml
|
||||
reports:
|
||||
junit: ${IDF_PATH}/XUNIT_*.xml
|
||||
expire_in: 1 week
|
||||
when: always
|
||||
variables:
|
||||
LC_ALL: C.UTF-8
|
||||
PYTHONPATH: "$PYTHONPATH;$IDF_PATH\\tools;$IDF_PATH\\tools\\esp_app_trace;$IDF_PATH\\components\\partition_table;$IDF_PATH\\tools\\ci\\python_packages"
|
||||
script:
|
||||
- python -m pip install jsonschema
|
||||
- .\install.ps1 --enable-ci
|
||||
- .\export.ps1
|
||||
- python "${SUBMODULE_FETCH_TOOL}" -s "all"
|
||||
- cd ${IDF_PATH}/tools/test_idf_py
|
||||
- idf-ci gitlab download-known-failure-cases-file ${KNOWN_FAILURE_CASES_FILE_NAME}
|
||||
- pytest --parallel-count ${CI_NODE_TOTAL} --parallel-index ${CI_NODE_INDEX} --junitxml=${IDF_PATH}/XUNIT_RESULT.xml --ignore-result-files ${KNOWN_FAILURE_CASES_FILE_NAME}
|
||||
|
||||
# Build tests
|
||||
.test_build_system_template_win:
|
||||
stage: host_test
|
||||
variables:
|
||||
# Enable ccache for all build jobs. See configure_ci_environment.sh for more ccache related settings.
|
||||
IDF_CCACHE_ENABLE: "1"
|
||||
PYTHONPATH: "$PYTHONPATH;$IDF_PATH\\tools;$IDF_PATH\\tools\\esp_app_trace;$IDF_PATH\\components\\partition_table;$IDF_PATH\\tools\\ci\\python_packages"
|
||||
before_script: []
|
||||
after_script: []
|
||||
timeout: 4 hours
|
||||
script:
|
||||
- .\install.ps1 --enable-ci
|
||||
- . .\export.ps1
|
||||
- python "${SUBMODULE_FETCH_TOOL}" -s "all"
|
||||
- cd ${IDF_PATH}\tools\test_build_system
|
||||
- idf-ci gitlab download-known-failure-cases-file ${KNOWN_FAILURE_CASES_FILE_NAME}
|
||||
- pytest --parallel-count ${CI_NODE_TOTAL} --parallel-index ${CI_NODE_INDEX} --junitxml=${CI_PROJECT_DIR}\XUNIT_RESULT.xml --ignore-result-files ${KNOWN_FAILURE_CASES_FILE_NAME}
|
||||
|
||||
pytest_build_system_win:
|
||||
extends:
|
||||
- .test_build_system_template_win
|
||||
- .rules:labels:windows_pytest_build_system
|
||||
parallel: 6
|
||||
needs: []
|
||||
tags: [windows-build, brew]
|
||||
artifacts:
|
||||
paths:
|
||||
- XUNIT_RESULT.xml
|
||||
- test_build_system
|
||||
expire_in: 2 days
|
||||
reports:
|
||||
junit: XUNIT_RESULT.xml
|
||||
when: always
|
||||
|
||||
pytest_build_system_win_minimal_cmake:
|
||||
extends:
|
||||
- .test_build_system_template_win
|
||||
- .rules:labels:windows_pytest_build_system
|
||||
needs: []
|
||||
tags: [windows-build, brew]
|
||||
artifacts:
|
||||
paths:
|
||||
- XUNIT_RESULT.xml
|
||||
- test_build_system
|
||||
expire_in: 2 days
|
||||
reports:
|
||||
junit: XUNIT_RESULT.xml
|
||||
when: always
|
||||
variables:
|
||||
MINIMAL_CMAKE_VERSION: "3.22.1"
|
||||
script:
|
||||
- .\install.ps1 --enable-ci
|
||||
- . .\export.ps1
|
||||
- python ${IDF_PATH}\tools\idf_tools.py install cmake@${MINIMAL_CMAKE_VERSION}
|
||||
- $Env:PATH = "$Env:USERPROFILE\.espressif\tools\cmake\${MINIMAL_CMAKE_VERSION}\bin;$Env:PATH"
|
||||
- |
|
||||
$actualVersion = (& cmake --version).Split()[2]
|
||||
if ($actualVersion -ne $Env:MINIMAL_CMAKE_VERSION) {
|
||||
Write-Error "ERROR: Wrong CMake version! Detected: $actualVersion, but expected: $Env:MINIMAL_CMAKE_VERSION"
|
||||
exit 1
|
||||
}
|
||||
- python "${SUBMODULE_FETCH_TOOL}" -s "all"
|
||||
- cd ${IDF_PATH}\tools\test_build_system
|
||||
- idf-ci gitlab download-known-failure-cases-file ${KNOWN_FAILURE_CASES_FILE_NAME}
|
||||
- pytest -k cmake --junitxml=${CI_PROJECT_DIR}\XUNIT_RESULT.xml --ignore-result-files ${KNOWN_FAILURE_CASES_FILE_NAME}
|
||||
|
||||
pytest_buildv2_system_win:
|
||||
extends:
|
||||
- .test_build_system_template_win
|
||||
- .rules:labels:buildv2
|
||||
parallel: 2
|
||||
needs: []
|
||||
tags: [windows-build, brew]
|
||||
artifacts:
|
||||
paths:
|
||||
- XUNIT_RESULT.xml
|
||||
- test_build_system
|
||||
expire_in: 2 days
|
||||
reports:
|
||||
junit: XUNIT_RESULT.xml
|
||||
when: always
|
||||
script:
|
||||
- .\install.ps1 --enable-ci
|
||||
- . .\export.ps1
|
||||
- python "${SUBMODULE_FETCH_TOOL}" -s "all"
|
||||
- cd ${IDF_PATH}\tools\test_build_system
|
||||
- idf-ci gitlab download-known-failure-cases-file ${KNOWN_FAILURE_CASES_FILE_NAME}
|
||||
- pytest
|
||||
--buildv2
|
||||
--parallel-count ${CI_NODE_TOTAL}
|
||||
--parallel-index ${CI_NODE_INDEX}
|
||||
--junitxml=${CI_PROJECT_DIR}\XUNIT_RESULT.xml
|
||||
--ignore-result-files ${KNOWN_FAILURE_CASES_FILE_NAME}
|
||||
--
|
||||
test_non_default_target.py
|
||||
test_component_manager.py
|
||||
test_build.py
|
||||
test_bootloader.py
|
||||
test_git.py
|
||||
test_kconfig.py
|
||||
test_partition.py
|
||||
test_reproducible_build.py
|
||||
test_sdkconfig.py
|
||||
test_versions.py
|
||||
test_common.py
|
||||
test_components.py
|
||||
test_cmake.py
|
||||
test_idf_extension.py
|
||||
test_rebuild.py
|
||||
@@ -10,18 +10,18 @@ upload-pip-cache:
|
||||
extends:
|
||||
- .upload_cache_template
|
||||
- .before_script:minimal
|
||||
- .rules:upload-python-cache
|
||||
- .rules:patterns:python-cache
|
||||
tags:
|
||||
- $GEO
|
||||
- cache
|
||||
cache:
|
||||
key: pip-cache-${LATEST_GIT_TAG}
|
||||
key: pip-cache
|
||||
paths:
|
||||
- .cache/pip
|
||||
policy: push
|
||||
script:
|
||||
- rm -rf .cache/pip # clear old packages
|
||||
- bash install.sh --enable-ci --enable-test-specific
|
||||
- bash install.sh --enable-ci --enable-pytest
|
||||
parallel:
|
||||
matrix:
|
||||
- GEO: [ 'shiny', 'brew' ]
|
||||
@@ -30,12 +30,12 @@ upload-submodules-cache:
|
||||
extends:
|
||||
- .upload_cache_template
|
||||
- .before_script:minimal
|
||||
- .rules:upload-submodule-cache
|
||||
- .rules:patterns:submodule
|
||||
tags:
|
||||
- $GEO
|
||||
- cache
|
||||
cache:
|
||||
key: submodule-cache-${LATEST_GIT_TAG}
|
||||
key: submodule-cache
|
||||
paths:
|
||||
- .cache/submodule_archives
|
||||
policy: push
|
||||
@@ -48,3 +48,27 @@ upload-submodules-cache:
|
||||
parallel:
|
||||
matrix:
|
||||
- GEO: [ 'shiny', 'brew' ]
|
||||
|
||||
upload-danger-npm-cache:
|
||||
stage: upload_cache
|
||||
image: node:18.15.0-alpine3.16
|
||||
extends:
|
||||
- .rules:patterns:dangerjs
|
||||
tags:
|
||||
- $GEO
|
||||
- cache
|
||||
cache:
|
||||
key:
|
||||
files:
|
||||
- .gitlab/dangerjs/package-lock.json
|
||||
paths:
|
||||
- .gitlab/dangerjs/node_modules/
|
||||
policy: push
|
||||
before_script:
|
||||
- echo "Skip before scripts ...."
|
||||
script:
|
||||
- cd .gitlab/dangerjs
|
||||
- npm install --no-progress --no-update-notifier
|
||||
parallel:
|
||||
matrix:
|
||||
- GEO: [ 'shiny', 'brew' ]
|
||||
|
||||
172
.gitlab/dangerjs/aiGenerateGitMessage.js
Normal file
172
.gitlab/dangerjs/aiGenerateGitMessage.js
Normal file
@@ -0,0 +1,172 @@
|
||||
const {
|
||||
minimumSummaryChars,
|
||||
maximumSummaryChars,
|
||||
maximumBodyLineChars,
|
||||
allowedTypes,
|
||||
} = require("./mrCommitsConstants.js");
|
||||
const { gptStandardModelTokens } = require("./mrCommitsConstants.js");
|
||||
|
||||
const { ChatPromptTemplate } = require("langchain/prompts");
|
||||
const { SystemMessagePromptTemplate } = require("langchain/prompts");
|
||||
const { LLMChain } = require("langchain/chains");
|
||||
const { ChatOpenAI } = require("langchain/chat_models/openai");
|
||||
const openAiTokenCount = require("openai-gpt-token-counter");
|
||||
|
||||
module.exports = async function () {
|
||||
let outputDangerMessage = `\n\nPerhaps you could use an AI-generated suggestion for your commit message. Here is one `;
|
||||
|
||||
let mrDiff = await getMrGitDiff(danger.git.modified_files);
|
||||
const mrCommitMessages = getCommitMessages(danger.gitlab.commits);
|
||||
const inputPrompt = getInputPrompt();
|
||||
const inputLlmTokens = getInputLlmTokens(
|
||||
inputPrompt,
|
||||
mrDiff,
|
||||
mrCommitMessages
|
||||
);
|
||||
console.log(`Input tokens for LLM: ${inputLlmTokens}`);
|
||||
|
||||
if (inputLlmTokens >= gptStandardModelTokens) {
|
||||
mrDiff = ""; // If the input mrDiff is larger than 16k model, don't use mrDiff, use only current commit messages
|
||||
outputDangerMessage += `(based only on your current commit messages, git-diff of this MR is too big (${inputLlmTokens} tokens) for the AI models):\n\n`;
|
||||
} else {
|
||||
outputDangerMessage += `(based on your MR git-diff and your current commit messages):\n\n`;
|
||||
}
|
||||
|
||||
// Generate AI commit message
|
||||
let generatedCommitMessage = "";
|
||||
try {
|
||||
const rawCommitMessage = await createAiGitMessage(
|
||||
inputPrompt,
|
||||
mrDiff,
|
||||
mrCommitMessages
|
||||
);
|
||||
generatedCommitMessage = postProcessCommitMessage(rawCommitMessage);
|
||||
} catch (error) {
|
||||
console.error("Error in generating AI commit message: ", error);
|
||||
outputDangerMessage +=
|
||||
"\nCould not generate commit message due to an error.\n";
|
||||
}
|
||||
|
||||
// Append closing statements ("Closes https://github.com/espressif/esp-idf/issues/XXX") to the generated commit message
|
||||
let closingStatements = extractClosingStatements(mrCommitMessages);
|
||||
if (closingStatements.length > 0) {
|
||||
generatedCommitMessage += "\n\n" + closingStatements;
|
||||
}
|
||||
|
||||
// Add the generated git message, format to the markdown code block
|
||||
outputDangerMessage += `\n\`\`\`\n${generatedCommitMessage}\n\`\`\`\n`;
|
||||
outputDangerMessage +=
|
||||
"\n**NOTE: AI-generated suggestions may not always be correct, please review the suggestion before using it.**"; // Add disclaimer
|
||||
return outputDangerMessage;
|
||||
};
|
||||
|
||||
async function getMrGitDiff(mrModifiedFiles) {
|
||||
const fileDiffs = await Promise.all(
|
||||
mrModifiedFiles.map((file) => danger.git.diffForFile(file))
|
||||
);
|
||||
return fileDiffs.map((fileDiff) => fileDiff.diff.trim()).join(" ");
|
||||
}
|
||||
|
||||
function getCommitMessages(mrCommits) {
|
||||
return mrCommits.map((commit) => commit.message);
|
||||
}
|
||||
|
||||
function getInputPrompt() {
|
||||
return `You are a helpful assistant that creates suggestions for single git commit message, that user can use to describe all the changes in their merge request.
|
||||
Use git diff: {mrDiff} and users current commit messages: {mrCommitMessages} to get the changes made in the commit.
|
||||
|
||||
Output should be git commit message following the conventional commit format.
|
||||
|
||||
Output only git commit message in desired format, without comments and other text.
|
||||
|
||||
Do not include the closing statements ("Closes https://....") in the output.
|
||||
|
||||
Here are the strict rules you must follow:
|
||||
|
||||
- Avoid mentioning any JIRA tickets (e.g., "Closes JIRA-123").
|
||||
- Be specific. Don't use vague terms (e.g., "some checks", "add new ones", "few changes").
|
||||
- The commit message structure should be: <type><(scope/component)>: <summary>
|
||||
- Types allowed: ${allowedTypes.join(", ")}
|
||||
- If 'scope/component' is used, it must start with a lowercase letter.
|
||||
- The 'summary' must NOT end with a period.
|
||||
- The 'summary' must be between ${minimumSummaryChars} and ${maximumSummaryChars} characters long.
|
||||
|
||||
If a 'body' of commit message is used:
|
||||
|
||||
- Each line must be no longer than ${maximumBodyLineChars} characters.
|
||||
- It must be separated from the 'summary' by a blank line.
|
||||
|
||||
Examples of correct commit messages:
|
||||
|
||||
- With scope and body:
|
||||
fix(freertos): Fix startup timeout issue
|
||||
|
||||
This is a text of commit message body...
|
||||
- adds support for wifi6
|
||||
- adds validations for logging script
|
||||
|
||||
- Without scope and body:
|
||||
ci: added target test job for ESP32-Wifi6`;
|
||||
}
|
||||
|
||||
function getInputLlmTokens(inputPrompt, mrDiff, mrCommitMessages) {
|
||||
const mrCommitMessagesTokens = openAiTokenCount(mrCommitMessages.join(" "));
|
||||
const gitDiffTokens = openAiTokenCount(mrDiff);
|
||||
const promptTokens = openAiTokenCount(inputPrompt);
|
||||
return mrCommitMessagesTokens + gitDiffTokens + promptTokens;
|
||||
}
|
||||
|
||||
async function createAiGitMessage(inputPrompt, mrDiff, mrCommitMessages) {
|
||||
const chat = new ChatOpenAI({ engine: "gpt-3.5-turbo", temperature: 0 });
|
||||
const chatPrompt = ChatPromptTemplate.fromPromptMessages([
|
||||
SystemMessagePromptTemplate.fromTemplate(inputPrompt),
|
||||
]);
|
||||
const chain = new LLMChain({ prompt: chatPrompt, llm: chat });
|
||||
|
||||
const response = await chain.call({
|
||||
mrDiff: mrDiff,
|
||||
mrCommitMessages: mrCommitMessages,
|
||||
});
|
||||
return response.text;
|
||||
}
|
||||
|
||||
function postProcessCommitMessage(rawCommitMessage) {
|
||||
// Split the result into lines
|
||||
let lines = rawCommitMessage.split("\n");
|
||||
|
||||
// Format each line
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
let line = lines[i].trim();
|
||||
|
||||
// If the line is longer than maximumBodyLineChars, split it into multiple lines
|
||||
if (line.length > maximumBodyLineChars) {
|
||||
let newLines = [];
|
||||
while (line.length > maximumBodyLineChars) {
|
||||
let lastSpaceIndex = line.lastIndexOf(
|
||||
" ",
|
||||
maximumBodyLineChars
|
||||
);
|
||||
newLines.push(line.substring(0, lastSpaceIndex));
|
||||
line = line.substring(lastSpaceIndex + 1);
|
||||
}
|
||||
newLines.push(line);
|
||||
lines[i] = newLines.join("\n");
|
||||
}
|
||||
}
|
||||
|
||||
// Join the lines back into a single string with a newline between each one
|
||||
return lines.join("\n");
|
||||
}
|
||||
|
||||
function extractClosingStatements(mrCommitMessages) {
|
||||
let closingStatements = [];
|
||||
mrCommitMessages.forEach((message) => {
|
||||
const lines = message.split("\n");
|
||||
lines.forEach((line) => {
|
||||
if (line.startsWith("Closes")) {
|
||||
closingStatements.push(line);
|
||||
}
|
||||
});
|
||||
});
|
||||
return closingStatements.join("\n");
|
||||
}
|
||||
56
.gitlab/dangerjs/configParameters.js
Normal file
56
.gitlab/dangerjs/configParameters.js
Normal file
@@ -0,0 +1,56 @@
|
||||
let outputStatuses = [];
|
||||
|
||||
/**
|
||||
* Logs the status of a rule with padded formatting and stores it in the `outputStatuses` array.
|
||||
* If the rule already exists in the array, its status is updated.
|
||||
* @param message The name of the rule
|
||||
* @param status The output (exit) status of the rule
|
||||
*/
|
||||
function recordRuleExitStatus(message, status) {
|
||||
// Check if the rule already exists in the array
|
||||
const existingRecord = outputStatuses.find(
|
||||
(rule) => rule.message === message
|
||||
);
|
||||
|
||||
if (existingRecord) {
|
||||
// Update the status of the existing rule
|
||||
existingRecord.status = status;
|
||||
} else {
|
||||
// If the rule doesn't exist, add it to the array
|
||||
outputStatuses.push({ message, status });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Displays all the rule output statuses stored in the `outputStatuses` array.
|
||||
* Filters out any empty lines, sorts them alphabetically, and prints the statuses
|
||||
* with a header and separator.
|
||||
* These statuses are later displayed in CI job tracelog.
|
||||
*/
|
||||
function displayAllOutputStatuses() {
|
||||
const lineLength = 100;
|
||||
const sortedStatuses = outputStatuses.sort((a, b) =>
|
||||
a.message.localeCompare(b.message)
|
||||
);
|
||||
|
||||
const formattedLines = sortedStatuses.map((statusObj) => {
|
||||
const paddingLength =
|
||||
lineLength - statusObj.message.length - statusObj.status.length;
|
||||
const paddedMessage = statusObj.message.padEnd(
|
||||
statusObj.message.length + paddingLength,
|
||||
"."
|
||||
);
|
||||
return `${paddedMessage} ${statusObj.status}`;
|
||||
});
|
||||
|
||||
console.log(
|
||||
"DangerJS checks (rules) output states:\n" + "=".repeat(lineLength + 2)
|
||||
);
|
||||
console.log(formattedLines.join("\n"));
|
||||
console.log("=".repeat(lineLength + 2));
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
displayAllOutputStatuses,
|
||||
recordRuleExitStatus,
|
||||
};
|
||||
51
.gitlab/dangerjs/dangerfile.js
Normal file
51
.gitlab/dangerjs/dangerfile.js
Normal file
@@ -0,0 +1,51 @@
|
||||
const { displayAllOutputStatuses } = require("./configParameters.js");
|
||||
|
||||
/*
|
||||
* Modules with checks are stored in ".gitlab/dangerjs/<module_name>". To import them, use path relative to "dangerfile.js"
|
||||
*/
|
||||
async function runChecks() {
|
||||
// Checks for merge request title
|
||||
require("./mrTitleNoDraftOrWip.js")();
|
||||
|
||||
// Checks for merge request description
|
||||
require("./mrDescriptionLongEnough.js")();
|
||||
require("./mrDescriptionReleaseNotes.js")();
|
||||
await require("./mrDescriptionJiraLinks.js")();
|
||||
|
||||
// Checks for documentation
|
||||
await require("./mrDocsTranslation.js")();
|
||||
|
||||
// Checks for MR commits
|
||||
require("./mrCommitsTooManyCommits.js")();
|
||||
await require("./mrCommitsCommitMessage.js")();
|
||||
require("./mrCommitsEmail.js")();
|
||||
|
||||
// Checks for MR code
|
||||
require("./mrSizeTooLarge.js")();
|
||||
|
||||
// Checks for MR area labels
|
||||
await require("./mrAreaLabels.js")();
|
||||
|
||||
// Checks for Source branch name
|
||||
require("./mrSourceBranchName.js")();
|
||||
|
||||
// Show DangerJS individual checks statuses - visible in CI job tracelog
|
||||
displayAllOutputStatuses();
|
||||
|
||||
// Add success log if no issues
|
||||
if (
|
||||
results.fails.length === 0 &&
|
||||
results.warnings.length === 0 &&
|
||||
results.messages.length === 0
|
||||
) {
|
||||
return message("🎉 Good Job! All checks are passing!");
|
||||
}
|
||||
}
|
||||
|
||||
runChecks();
|
||||
|
||||
// Add retry link
|
||||
const retryLink = `${process.env.DANGER_GITLAB_HOST}/${process.env.CI_PROJECT_PATH}/-/jobs/${process.env.CI_JOB_ID}`;
|
||||
markdown(
|
||||
`***\n#### :repeat: You can enforce automatic MR checks by retrying the [DangerJS job](${retryLink})\n***`
|
||||
);
|
||||
27
.gitlab/dangerjs/mrAreaLabels.js
Normal file
27
.gitlab/dangerjs/mrAreaLabels.js
Normal file
@@ -0,0 +1,27 @@
|
||||
const { recordRuleExitStatus } = require("./configParameters.js");
|
||||
|
||||
/**
|
||||
* Check if MR has area labels (light blue labels)
|
||||
*
|
||||
* @dangerjs WARN
|
||||
*/
|
||||
module.exports = async function () {
|
||||
const ruleName = "Merge request area labels";
|
||||
const projectId = 103; // ESP-IDF
|
||||
const areaLabelColor = /^#d2ebfa$/i; // match color code (case-insensitive)
|
||||
const projectLabels = await danger.gitlab.api.Labels.all(projectId); // Get all project labels
|
||||
const areaLabels = projectLabels
|
||||
.filter((label) => areaLabelColor.test(label.color))
|
||||
.map((label) => label.name); // Filter only area labels
|
||||
const mrLabels = danger.gitlab.mr.labels; // Get MR labels
|
||||
|
||||
if (!mrLabels.some((label) => areaLabels.includes(label))) {
|
||||
recordRuleExitStatus(ruleName, "Failed");
|
||||
return warn(
|
||||
`Please add some [area labels](${process.env.DANGER_GITLAB_HOST}/espressif/esp-idf/-/labels) to this MR.`
|
||||
);
|
||||
}
|
||||
|
||||
// At this point, the rule has passed
|
||||
recordRuleExitStatus(ruleName, "Passed");
|
||||
};
|
||||
165
.gitlab/dangerjs/mrCommitsCommitMessage.js
Normal file
165
.gitlab/dangerjs/mrCommitsCommitMessage.js
Normal file
@@ -0,0 +1,165 @@
|
||||
const {
|
||||
minimumSummaryChars,
|
||||
maximumSummaryChars,
|
||||
maximumBodyLineChars,
|
||||
allowedTypes,
|
||||
} = require("./mrCommitsConstants.js");
|
||||
const { recordRuleExitStatus } = require("./configParameters.js");
|
||||
|
||||
/**
|
||||
* Check that commit messages are based on the Espressif ESP-IDF project's rules for git commit messages.
|
||||
*
|
||||
* @dangerjs WARN
|
||||
*/
|
||||
module.exports = async function () {
|
||||
const ruleName = "Commit messages style";
|
||||
const mrCommits = danger.gitlab.commits;
|
||||
const lint = require("@commitlint/lint").default;
|
||||
|
||||
const lintingRules = {
|
||||
// rule definition: [(0-1 = off/on), (always/never = must be/mustn't be), (value)]
|
||||
"body-max-line-length": [1, "always", maximumBodyLineChars], // Max length of the body line
|
||||
"footer-leading-blank": [1, "always"], // Always have a blank line before the footer section
|
||||
"footer-max-line-length": [1, "always", maximumBodyLineChars], // Max length of the footer line
|
||||
"subject-max-length": [1, "always", maximumSummaryChars], // Max length of the "Summary"
|
||||
"subject-min-length": [1, "always", minimumSummaryChars], // Min length of the "Summary"
|
||||
"scope-case": [1, "always", "lower-case"], // "scope/component" must start with lower-case
|
||||
"subject-full-stop": [1, "never", "."], // "Summary" must not end with a full stop (period)
|
||||
"subject-empty": [1, "never"], // "Summary" is mandatory
|
||||
"type-case": [1, "always", "lower-case"], // "type/action" must start with lower-case
|
||||
"type-empty": [1, "never"], // "type/action" is mandatory
|
||||
"type-enum": [1, "always", allowedTypes], // "type/action" must be one of the allowed types
|
||||
"body-leading-blank": [1, "always"], // Always have a blank line before the body section
|
||||
};
|
||||
|
||||
// Switcher for AI suggestions (for poor messages)
|
||||
let generateAISuggestion = false;
|
||||
|
||||
// Search for the messages in each commit
|
||||
let issuesAllCommitMessages = [];
|
||||
|
||||
for (const commit of mrCommits) {
|
||||
const commitMessage = commit.message;
|
||||
const commitMessageTitle = commit.title;
|
||||
|
||||
let issuesSingleCommitMessage = [];
|
||||
let reportSingleCommitMessage = "";
|
||||
|
||||
// Check if the commit message contains any Jira ticket references
|
||||
const jiraTicketRegex = /[A-Z0-9]+-[0-9]+/g;
|
||||
const jiraTicketMatches = commitMessage.match(jiraTicketRegex);
|
||||
if (jiraTicketMatches) {
|
||||
const jiraTicketNames = jiraTicketMatches.join(", ");
|
||||
issuesSingleCommitMessage.push(
|
||||
`- probably contains Jira ticket reference (\`${jiraTicketNames}\`). Please remove Jira tickets from commit messages.`
|
||||
);
|
||||
}
|
||||
|
||||
// Lint commit messages with @commitlint (Conventional Commits style)
|
||||
const result = await lint(commit.message, lintingRules);
|
||||
|
||||
for (const warning of result.warnings) {
|
||||
// Custom messages for each rule with terminology used by Espressif conventional commits guide
|
||||
switch (warning.name) {
|
||||
case "subject-max-length":
|
||||
issuesSingleCommitMessage.push(
|
||||
`- *summary* appears to be too long`
|
||||
);
|
||||
break;
|
||||
case "type-empty":
|
||||
issuesSingleCommitMessage.push(
|
||||
`- *type/action* looks empty`
|
||||
);
|
||||
break;
|
||||
case "type-case":
|
||||
issuesSingleCommitMessage.push(
|
||||
`- *type/action* should start with a lowercase letter`
|
||||
);
|
||||
|
||||
break;
|
||||
case "scope-empty":
|
||||
issuesSingleCommitMessage.push(
|
||||
`- *scope/component* looks empty`
|
||||
);
|
||||
break;
|
||||
case "scope-case":
|
||||
issuesSingleCommitMessage.push(
|
||||
`- *scope/component* should be lowercase without whitespace, allowed special characters are \`_\` \`/\` \`.\` \`,\` \`*\` \`-\` \`.\``
|
||||
);
|
||||
break;
|
||||
case "subject-empty":
|
||||
issuesSingleCommitMessage.push(`- *summary* looks empty`);
|
||||
generateAISuggestion = true;
|
||||
break;
|
||||
case "subject-min-length":
|
||||
issuesSingleCommitMessage.push(
|
||||
`- *summary* looks too short`
|
||||
);
|
||||
generateAISuggestion = true;
|
||||
break;
|
||||
case "subject-case":
|
||||
issuesSingleCommitMessage.push(
|
||||
`- *summary* should start with a capital letter`
|
||||
);
|
||||
break;
|
||||
case "subject-full-stop":
|
||||
issuesSingleCommitMessage.push(
|
||||
`- *summary* should not end with a period (full stop)`
|
||||
);
|
||||
break;
|
||||
case "type-enum":
|
||||
issuesSingleCommitMessage.push(
|
||||
`- *type/action* should be one of [${allowedTypes
|
||||
.map((type) => `\`${type}\``)
|
||||
.join(", ")}]`
|
||||
);
|
||||
break;
|
||||
|
||||
default:
|
||||
issuesSingleCommitMessage.push(`- ${warning.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (issuesSingleCommitMessage.length) {
|
||||
reportSingleCommitMessage = `- the commit message \`"${commitMessageTitle}"\`:\n${issuesSingleCommitMessage
|
||||
.map((message) => ` ${message}`) // Indent each issue by 2 spaces
|
||||
.join("\n")}`;
|
||||
issuesAllCommitMessages.push(reportSingleCommitMessage);
|
||||
}
|
||||
}
|
||||
|
||||
// Create report
|
||||
if (issuesAllCommitMessages.length) {
|
||||
issuesAllCommitMessages.sort();
|
||||
const basicTips = [
|
||||
`- correct format of commit message should be: \`<type/action>(<scope/component>): <summary>\`, for example \`fix(esp32): Fixed startup timeout issue\``,
|
||||
`- allowed types are: \`${allowedTypes}\``,
|
||||
`- sufficiently descriptive message summary should be between ${minimumSummaryChars} to ${maximumSummaryChars} characters and start with upper case letter`,
|
||||
`- avoid Jira references in commit messages (unavailable/irrelevant for our customers)`,
|
||||
`- follow this [commit messages guide](${process.env.DANGER_GITLAB_HOST}/espressif/esp-idf/-/wikis/dev-proc/Commit-messages)`,
|
||||
];
|
||||
let dangerMessage = `\n**Some issues found for the commit messages in this MR:**\n${issuesAllCommitMessages.join(
|
||||
"\n"
|
||||
)}
|
||||
\n***
|
||||
\n**Please consider updating these commit messages** - here are some basic tips:\n${basicTips.join(
|
||||
"\n"
|
||||
)}
|
||||
\n \`TIP:\` You can install commit-msg pre-commit hook (\`pre-commit install -t pre-commit -t commit-msg\`) to run this check when committing.
|
||||
\n***
|
||||
`;
|
||||
|
||||
if (generateAISuggestion) {
|
||||
// Create AI generated suggestion for git commit message based of gitDiff and current commit messages
|
||||
const AImessageSuggestion =
|
||||
await require("./aiGenerateGitMessage.js")();
|
||||
dangerMessage += AImessageSuggestion;
|
||||
}
|
||||
|
||||
recordRuleExitStatus(ruleName, "Failed");
|
||||
return warn(dangerMessage);
|
||||
}
|
||||
|
||||
// At this point, the rule has passed
|
||||
recordRuleExitStatus(ruleName, "Passed");
|
||||
};
|
||||
16
.gitlab/dangerjs/mrCommitsConstants.js
Normal file
16
.gitlab/dangerjs/mrCommitsConstants.js
Normal file
@@ -0,0 +1,16 @@
|
||||
module.exports = {
|
||||
gptStandardModelTokens: 4096,
|
||||
minimumSummaryChars: 20,
|
||||
maximumSummaryChars: 72,
|
||||
maximumBodyLineChars: 100,
|
||||
allowedTypes: [
|
||||
"change",
|
||||
"ci",
|
||||
"docs",
|
||||
"feat",
|
||||
"fix",
|
||||
"refactor",
|
||||
"remove",
|
||||
"revert",
|
||||
],
|
||||
};
|
||||
23
.gitlab/dangerjs/mrCommitsEmail.js
Normal file
23
.gitlab/dangerjs/mrCommitsEmail.js
Normal file
@@ -0,0 +1,23 @@
|
||||
const { recordRuleExitStatus } = require("./configParameters.js");
|
||||
|
||||
/**
|
||||
* Check if the author is accidentally making a commit using a personal email
|
||||
*
|
||||
* @dangerjs INFO
|
||||
*/
|
||||
module.exports = function () {
|
||||
const ruleName = 'Commits from outside Espressif';
|
||||
const mrCommitAuthorEmails = danger.gitlab.commits.map(commit => commit.author_email);
|
||||
const mrCommitCommitterEmails = danger.gitlab.commits.map(commit => commit.committer_email);
|
||||
const emailPattern = /.*@espressif\.com/;
|
||||
const filteredEmails = [...mrCommitAuthorEmails, ...mrCommitCommitterEmails].filter((email) => !emailPattern.test(email));
|
||||
if (filteredEmails.length) {
|
||||
recordRuleExitStatus(ruleName, "Failed");
|
||||
return message(
|
||||
`Some of the commits were authored or committed by developers outside Espressif: ${filteredEmails.join(', ')}. Please check if this is expected.`
|
||||
);
|
||||
}
|
||||
|
||||
// At this point, the rule has passed
|
||||
recordRuleExitStatus(ruleName, 'Passed');
|
||||
};
|
||||
22
.gitlab/dangerjs/mrCommitsTooManyCommits.js
Normal file
22
.gitlab/dangerjs/mrCommitsTooManyCommits.js
Normal file
@@ -0,0 +1,22 @@
|
||||
const { recordRuleExitStatus } = require("./configParameters.js");
|
||||
|
||||
/**
|
||||
* Check if MR has not an excessive numbers of commits (if squashed)
|
||||
*
|
||||
* @dangerjs INFO
|
||||
*/
|
||||
module.exports = function () {
|
||||
const ruleName = 'Number of commits in merge request';
|
||||
const tooManyCommitThreshold = 2; // above this number of commits, squash commits is suggested
|
||||
const mrCommits = danger.gitlab.commits;
|
||||
|
||||
if (mrCommits.length > tooManyCommitThreshold) {
|
||||
recordRuleExitStatus(ruleName, "Passed (with suggestions)");
|
||||
return message(
|
||||
`You might consider squashing your ${mrCommits.length} commits (simplifying branch history).`
|
||||
);
|
||||
}
|
||||
|
||||
// At this point, the rule has passed
|
||||
recordRuleExitStatus(ruleName, 'Passed');
|
||||
};
|
||||
238
.gitlab/dangerjs/mrDescriptionJiraLinks.js
Normal file
238
.gitlab/dangerjs/mrDescriptionJiraLinks.js
Normal file
@@ -0,0 +1,238 @@
|
||||
const { recordRuleExitStatus } = require("./configParameters.js");
|
||||
|
||||
/** Check that there are valid JIRA links in MR description.
|
||||
*
|
||||
* This check extracts the "Related" section from the MR description and
|
||||
* searches for JIRA ticket references in the format "Closes [JIRA ticket key]".
|
||||
*
|
||||
* It then extracts the closing GitHub links from the corresponding JIRA tickets and
|
||||
* checks if the linked GitHub issues are still in open state.
|
||||
*
|
||||
* Finally, it checks if the required GitHub closing links are present in the MR's commit messages.
|
||||
*
|
||||
*/
|
||||
module.exports = async function () {
|
||||
const ruleName = 'Jira ticket references';
|
||||
const axios = require("axios");
|
||||
const mrDescription = danger.gitlab.mr.description;
|
||||
const mrCommitMessages = danger.gitlab.commits.map(
|
||||
(commit) => commit.message
|
||||
);
|
||||
const jiraTicketRegex = /[A-Z0-9]+-[0-9]+/;
|
||||
|
||||
let partMessages = []; // Create a blank field for future records of individual issues
|
||||
|
||||
// Parse section "Related" from MR Description
|
||||
const sectionRelated = extractSectionRelated(mrDescription);
|
||||
|
||||
if (
|
||||
!sectionRelated.header || // No section Related in MR description or ...
|
||||
!jiraTicketRegex.test(sectionRelated.content) // no Jira links in section Related
|
||||
) {
|
||||
recordRuleExitStatus(ruleName, 'Passed (with suggestions)');
|
||||
return message(
|
||||
"Please consider adding references to JIRA issues in the `Related` section of the MR description."
|
||||
);
|
||||
}
|
||||
|
||||
// Get closing (only) JIRA tickets
|
||||
const jiraTickets = findClosingJiraTickets(sectionRelated.content);
|
||||
|
||||
for (const ticket of jiraTickets) {
|
||||
ticket.jiraUIUrl = `https://jira.espressif.com:8443/browse/${ticket.ticketName}`;
|
||||
|
||||
if (!ticket.correctFormat) {
|
||||
partMessages.push(
|
||||
`- closing ticket \`${ticket.record}\` seems to be in the wrong format (or inaccessible to Jira DangerBot).. The correct format is for example \`- Closes JIRA-123\`.`
|
||||
);
|
||||
}
|
||||
|
||||
// Get closing GitHub issue links from JIRA tickets
|
||||
const closingGithubLink = await getGitHubClosingLink(ticket.ticketName);
|
||||
if (closingGithubLink) {
|
||||
ticket.closingGithubLink = closingGithubLink;
|
||||
} else if (closingGithubLink === null) {
|
||||
partMessages.push(
|
||||
`- the Jira issue number [\`${ticket.ticketName}\`](${ticket.jiraUIUrl}) seems to be invalid (please check if the ticket number is correct)`
|
||||
);
|
||||
continue; // Handle unreachable JIRA tickets; skip the following checks
|
||||
} else {
|
||||
continue; // Jira ticket have no GitHub closing link; skip the following checks
|
||||
}
|
||||
|
||||
// Get still open GitHub issues
|
||||
const githubIssueStatusOpen = await isGithubIssueOpen(
|
||||
ticket.closingGithubLink
|
||||
);
|
||||
ticket.isOpen = githubIssueStatusOpen;
|
||||
if (githubIssueStatusOpen === null) {
|
||||
// Handle unreachable GitHub issues
|
||||
partMessages.push(
|
||||
`- the GitHub issue [\`${ticket.closingGithubLink}\`](${ticket.closingGithubLink}) does not seem to exist on GitHub (referenced from JIRA ticket [\`${ticket.ticketName}\`](${ticket.jiraUIUrl}) )`
|
||||
);
|
||||
continue; // skip the following checks
|
||||
}
|
||||
|
||||
// Search in commit message if there are all GitHub closing links (from Related section) for still open GH issues
|
||||
if (ticket.isOpen) {
|
||||
if (
|
||||
!mrCommitMessages.some((item) =>
|
||||
item.includes(`Closes ${ticket.closingGithubLink}`)
|
||||
)
|
||||
) {
|
||||
partMessages.push(
|
||||
`- please add \`Closes ${ticket.closingGithubLink}\` to the commit message`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create report / DangerJS check feedback if issues with Jira links found
|
||||
if (partMessages.length) {
|
||||
createReport();
|
||||
}
|
||||
|
||||
// At this point, the rule has passed
|
||||
recordRuleExitStatus(ruleName, 'Passed');
|
||||
|
||||
// ---------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* This function takes in a string mrDescription which contains a Markdown-formatted text
|
||||
* related to a Merge Request (MR) in a GitLab repository. It searches for a section titled "Related"
|
||||
* and extracts the content of that section. If the section is not found, it returns an object
|
||||
* indicating that the header and content are null. If the section is found but empty, it returns
|
||||
* an object indicating that the header is present but the content is null. If the section is found
|
||||
* with content, it returns an object indicating that the header is present and the content of the
|
||||
* "Related" section.
|
||||
*
|
||||
* @param {string} mrDescription - The Markdown-formatted text related to the Merge Request.
|
||||
* @returns {{
|
||||
* header: string | boolean | null,
|
||||
* content: string | null
|
||||
* }} - An object containing the header and content of the "Related" section, if present.
|
||||
*/
|
||||
|
||||
function extractSectionRelated(mrDescription) {
|
||||
const regexSectionRelated = /## Related([\s\S]*?)(?=## |$)/;
|
||||
const sectionRelated = mrDescription.match(regexSectionRelated);
|
||||
if (!sectionRelated) {
|
||||
return { header: null, content: null }; // Section "Related" is missing
|
||||
}
|
||||
|
||||
const content = sectionRelated[1].replace(/(\r\n|\n|\r)/gm, ""); // Remove empty lines
|
||||
if (!content.length) {
|
||||
return { header: true, content: null }; // Section "Related" is present, but empty
|
||||
}
|
||||
|
||||
return { header: true, content: sectionRelated[1] }; // Found section "Related" with content
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds all JIRA tickets that are being closed in the given sectionRelatedcontent.
|
||||
* The function searches for lines that start with - Closes and have the format Closes [uppercase letters]-[numbers].
|
||||
* @param {string} sectionRelatedcontent - A string that contains lines with mentions of JIRA tickets
|
||||
* @returns {Array} An array of objects with ticketName property that has the correct format
|
||||
*/
|
||||
|
||||
function findClosingJiraTickets(sectionRelatedcontent) {
|
||||
let closingTickets = [];
|
||||
const lines = sectionRelatedcontent.split("\n");
|
||||
for (const line of lines) {
|
||||
if (!line.startsWith("- Closes")) {
|
||||
continue; // Not closing-type ticket, skip
|
||||
}
|
||||
|
||||
const correctJiraClosingLinkFormat = new RegExp(
|
||||
`^- Closes ${jiraTicketRegex.source}$`
|
||||
);
|
||||
const matchedJiraTicket = line.match(jiraTicketRegex);
|
||||
if (matchedJiraTicket) {
|
||||
if (!correctJiraClosingLinkFormat.test(line)) {
|
||||
closingTickets.push({
|
||||
record: line,
|
||||
ticketName: matchedJiraTicket[0],
|
||||
correctFormat: false,
|
||||
});
|
||||
} else {
|
||||
closingTickets.push({
|
||||
record: line,
|
||||
ticketName: matchedJiraTicket[0],
|
||||
correctFormat: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
return closingTickets;
|
||||
}
|
||||
|
||||
/**
|
||||
* This function takes a JIRA issue key and retrieves the description from JIRA's API.
|
||||
* It then searches the description for a GitHub closing link in the format "Closes https://github.com/owner/repo/issues/123".
|
||||
* If a GitHub closing link is found, it is returned. If no GitHub closing link is found, it returns null.
|
||||
* @param {string} jiraIssueKey - The key of the JIRA issue to search for the GitHub closing link.
|
||||
* @returns {Promise<string|null>} - A promise that resolves to a string containing the GitHub closing link if found,
|
||||
* or null if not found.
|
||||
*/
|
||||
async function getGitHubClosingLink(jiraIssueKey) {
|
||||
let jiraDescription = "";
|
||||
|
||||
// Get JIRA ticket description content
|
||||
try {
|
||||
const response = await axios({
|
||||
url: `https://jira.espressif.com:8443/rest/api/latest/issue/${jiraIssueKey}`,
|
||||
auth: {
|
||||
username: process.env.DANGER_JIRA_USER,
|
||||
password: process.env.DANGER_JIRA_PASSWORD,
|
||||
},
|
||||
});
|
||||
jiraDescription = response.data.fields.description
|
||||
? response.data.fields.description
|
||||
: ""; // if the Jira ticket has an unfilled Description, the ".description" property is missing in API response - in that case set "jiraDescription" to an empty string
|
||||
} catch (error) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Find GitHub closing link in description
|
||||
const regexClosingGhLink =
|
||||
/Closes\s+(https:\/\/github.com\/\S+\/\S+\/issues\/\d+)/;
|
||||
const closingGithubLink = jiraDescription.match(regexClosingGhLink);
|
||||
|
||||
if (closingGithubLink) {
|
||||
return closingGithubLink[1];
|
||||
} else {
|
||||
return false; // Jira issue has no GitHub closing link in description
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a GitHub issue linked in a merge request is still open.
|
||||
*
|
||||
* @param {string} link - The link to the GitHub issue.
|
||||
* @returns {Promise<boolean>} A promise that resolves to a boolean indicating if the issue is open.
|
||||
* @throws {Error} If the link is invalid or if there was an error fetching the issue.
|
||||
*/
|
||||
async function isGithubIssueOpen(link) {
|
||||
const parsedUrl = new URL(link);
|
||||
const [owner, repo] = parsedUrl.pathname.split("/").slice(1, 3);
|
||||
const issueNumber = parsedUrl.pathname.split("/").slice(-1)[0];
|
||||
|
||||
try {
|
||||
const response = await axios.get(
|
||||
`https://api.github.com/repos/${owner}/${repo}/issues/${issueNumber}`
|
||||
);
|
||||
return response.data.state === "open"; // return True if GitHub issue is open
|
||||
} catch (error) {
|
||||
return null; // GET request to issue fails
|
||||
}
|
||||
}
|
||||
|
||||
function createReport() {
|
||||
partMessages.sort();
|
||||
let dangerMessage = `Some issues found for the related JIRA tickets in this MR:\n${partMessages.join(
|
||||
"\n"
|
||||
)}`;
|
||||
recordRuleExitStatus(ruleName, "Failed");
|
||||
return warn(dangerMessage);
|
||||
}
|
||||
};
|
||||
24
.gitlab/dangerjs/mrDescriptionLongEnough.js
Normal file
24
.gitlab/dangerjs/mrDescriptionLongEnough.js
Normal file
@@ -0,0 +1,24 @@
|
||||
const { recordRuleExitStatus } = require("./configParameters.js");
|
||||
|
||||
/**
|
||||
* Check if MR Description has accurate description".
|
||||
*
|
||||
* @dangerjs WARN
|
||||
*/
|
||||
module.exports = function () {
|
||||
const ruleName = "Merge request sufficient description";
|
||||
const mrDescription = danger.gitlab.mr.description;
|
||||
const descriptionChunk = mrDescription.match(/^([^#]*)/)[1].trim(); // Extract all text before the first section header (i.e., the text before the "## Release notes")
|
||||
|
||||
const shortMrDescriptionThreshold = 50; // Description is considered too short below this number of characters
|
||||
|
||||
if (descriptionChunk.length < shortMrDescriptionThreshold) {
|
||||
recordRuleExitStatus(ruleName, "Failed");
|
||||
return warn(
|
||||
"The MR description looks very brief, please check if more details can be added."
|
||||
);
|
||||
}
|
||||
|
||||
// At this point, the rule has passed
|
||||
recordRuleExitStatus(ruleName, "Passed");
|
||||
};
|
||||
103
.gitlab/dangerjs/mrDescriptionReleaseNotes.js
Normal file
103
.gitlab/dangerjs/mrDescriptionReleaseNotes.js
Normal file
@@ -0,0 +1,103 @@
|
||||
const { recordRuleExitStatus } = require("./configParameters.js");
|
||||
|
||||
/**
|
||||
* Check if MR Description contains mandatory section "Release notes"
|
||||
*
|
||||
* Extracts the content of the "Release notes" section from the GitLab merge request description.
|
||||
*
|
||||
* @dangerjs WARN (if section missing, is empty or wrong markdown format)
|
||||
*/
|
||||
module.exports = function () {
|
||||
const ruleName = 'Merge request Release Notes section';
|
||||
const mrDescription = danger.gitlab.mr.description;
|
||||
const wiki_link = `${process.env.DANGER_GITLAB_HOST}/espressif/esp-idf/-/wikis/rfc/How-to-write-release-notes-properly`;
|
||||
|
||||
const regexSectionReleaseNotes = /## Release notes([\s\S]*?)(?=## |$)/;
|
||||
const regexValidEntry = /^\s*[-*+]\s+.+/;
|
||||
const regexNoReleaseNotes = /no release note/i;
|
||||
|
||||
const sectionReleaseNotes = mrDescription.match(regexSectionReleaseNotes);
|
||||
if (!sectionReleaseNotes) {
|
||||
recordRuleExitStatus(ruleName, "Failed");
|
||||
return warn(`The \`Release Notes\` section seems to be missing. Please check if the section header in MR description is present and in the correct markdown format ("## Release Notes").\n\nSee [Release Notes Format Rules](${wiki_link}).`);
|
||||
}
|
||||
|
||||
const releaseNotesLines = sectionReleaseNotes[1].replace(/<!--[\s\S]*?-->/g, '')
|
||||
|
||||
const lines = releaseNotesLines.split("\n").filter(s => s.trim().length > 0);
|
||||
let valid_entries_found = 0;
|
||||
let no_release_notes_found = false;
|
||||
let violations = [];
|
||||
|
||||
lines.forEach((line) => {
|
||||
if (line.match(regexValidEntry)) {
|
||||
valid_entries_found++;
|
||||
const error_msg = check_entry(line);
|
||||
if (error_msg) {
|
||||
violations.push(error_msg);
|
||||
}
|
||||
} else if (line.match(regexNoReleaseNotes)) {
|
||||
no_release_notes_found = true;
|
||||
}
|
||||
});
|
||||
|
||||
let error_output = [];
|
||||
if (violations.length > 0) {
|
||||
error_output = [...error_output, 'Invalid release note entries:', violations.join('\n')];
|
||||
}
|
||||
if (no_release_notes_found) {
|
||||
if (valid_entries_found > 0) {
|
||||
error_output.push('`No release notes` comment shows up when there is valid entry. Remove bullets before comments in release notes section.');
|
||||
}
|
||||
} else {
|
||||
if (!valid_entries_found) {
|
||||
error_output.push('The `Release Notes` section seems to have no valid entries. Add bullets before valid entries, or add `No release notes` comment to suppress this error if you mean to have no release notes.');
|
||||
}
|
||||
}
|
||||
|
||||
if (error_output.length > 0) {
|
||||
// Paragraphs joined by double `\n`s.
|
||||
error_output = [...error_output, `See [Release Notes Format Guide](${wiki_link}).`].join('\n\n');
|
||||
recordRuleExitStatus(ruleName, "Failed");
|
||||
return warn(error_output);
|
||||
}
|
||||
|
||||
// At this point, the rule has passed
|
||||
recordRuleExitStatus(ruleName, 'Passed');
|
||||
};
|
||||
|
||||
function check_entry(entry) {
|
||||
const entry_str = `- \`${entry}\``;
|
||||
const indent = " ";
|
||||
|
||||
if (entry.match(/no\s+release\s+note/i)) {
|
||||
return [entry_str, `${indent}- \`No release notes\` comment shouldn't start with bullet.`].join('\n');
|
||||
}
|
||||
|
||||
// Remove a leading escaping backslash of the special characters, https://www.markdownguide.org/basic-syntax/#characters-you-can-escape
|
||||
const escapeCharRegex = /\\([\\`*_{}[\]<>()+#-.!|])/g;
|
||||
entry = entry.replace(escapeCharRegex, '$1');
|
||||
|
||||
const regex = /^(\s*)[-*+]\s+\[([^\]]+)\]\s+(.*)$/;
|
||||
const match = regex.exec(entry);
|
||||
if (!match) {
|
||||
return [entry_str, `${indent}- Please specify the [area] to which the change belongs (see guide). If this line is just a comment, remove the bullet.`].join('\n');
|
||||
}
|
||||
|
||||
// area is in match[2]
|
||||
const description = match[3].trim();
|
||||
let violations = [];
|
||||
|
||||
if (match[1]) {
|
||||
violations.push(`${indent}- Release note entry should start from the beginning of line. (Nested release note not allowed.)`);
|
||||
}
|
||||
|
||||
if (!/^[A-Z0-9]/.test(description)) {
|
||||
violations.push(`${indent}- Release note statement should start with a capital letter or digit.`);
|
||||
}
|
||||
|
||||
if (violations.length > 0) {
|
||||
return [entry_str, ...violations].join('\n');
|
||||
}
|
||||
return null;
|
||||
}
|
||||
280
.gitlab/dangerjs/mrDocsTranslation.js
Normal file
280
.gitlab/dangerjs/mrDocsTranslation.js
Normal file
@@ -0,0 +1,280 @@
|
||||
const { recordRuleExitStatus } = require("./configParameters.js");
|
||||
|
||||
/**
|
||||
* Check the documentation files in this MR.
|
||||
*
|
||||
* Generate an object with all docs/ files found in this MR with paths to their EN/CN versions.
|
||||
*
|
||||
* For common files (both language versions exist in this MR), compare the lines of both files.
|
||||
* Ignore if the CN file is only a single line file with an "include" reference to the EN version.
|
||||
*
|
||||
* For files that only have a CN version in this MR, add a message to the message that an EN file also needs to be created.
|
||||
*
|
||||
* For a file that only has an EN version in this MR, try loading its CN version from the target Gitlab branch.
|
||||
* If its CN version doesn't exist in the repository or it does exist,
|
||||
* but its contents are larger than just an "include" link to the EN version (it's a full-size file),
|
||||
* add a message to the report
|
||||
*
|
||||
* Create a compiled report with the docs/ files issues found and set its severity (WARN/INFO).
|
||||
* Severity is based on the presence of "needs translation: ??" labels in this MR
|
||||
*
|
||||
* @dangerjs WARN (if docs translation issues in the MR)
|
||||
* @dangerjs INFO (if docs translation issues in the MR and the user has already added translation labels).
|
||||
* Adding translation labels "needs translation: XX" automatically notifies the Documentation team
|
||||
*
|
||||
* @dangerjs WARN (if there are no docs issues in MR, but translation labels have been added anyway)
|
||||
*
|
||||
*/
|
||||
module.exports = async function () {
|
||||
const ruleName = 'Documentation translation';
|
||||
let partMessages = []; // Create a blank field for future records of individual issues
|
||||
const pathProject = "espressif/esp-idf";
|
||||
const regexIncludeLink = /\.\.\sinclude::\s((\.\.\/)+)en\//;
|
||||
const allMrFiles = [
|
||||
...danger.git.modified_files,
|
||||
...danger.git.created_files,
|
||||
...danger.git.deleted_files,
|
||||
];
|
||||
|
||||
const docsFilesMR = parseMrDocsFiles(allMrFiles); // Create single object of all doc files in MR with names, paths and groups
|
||||
|
||||
// Both versions (EN and CN) of document found changed in this MR
|
||||
for (const file of docsFilesMR.bothFilesInMr) {
|
||||
file.contentEn = await getContentFileInMR(file.fileEnPath); // Get content of English file
|
||||
file.linesEn = file.contentEn.split("\n").length; // Get number of lines of English file
|
||||
|
||||
file.contentCn = await getContentFileInMR(file.fileCnPath); // Get content of Chinese file
|
||||
file.linesCn = file.contentCn.split("\n").length; // Get number of lines of English file
|
||||
|
||||
// Compare number of lines in both versions
|
||||
if (file.linesEn !== file.linesCn) {
|
||||
// Check if CN file is only link to EN file
|
||||
if (!regexIncludeLink.test(file.contentCn)) {
|
||||
// if not just a link ...
|
||||
partMessages.push(
|
||||
`- please synchronize the EN and CN version of \`${file.fileName}\`. [\`${file.fileEnPath}\`](${file.fileUrlRepoEN}) has ${file.linesEn} lines; [\`${file.fileCnPath}\`](${file.fileUrlRepoCN}) has ${file.linesCn} lines.`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Only Chinese version of document found changed in this MR
|
||||
for (const file of docsFilesMR.onlyCnFilesInMr) {
|
||||
partMessages.push(
|
||||
`- file \`${file.fileEnPath}\` doesn't exist in this MR or in the GitLab repo. Please add \`${file.fileEnPath}\` into this MR.`
|
||||
);
|
||||
}
|
||||
|
||||
// Only English version of document found in this MR
|
||||
for (const file of docsFilesMR.onlyEnFilesInMr) {
|
||||
const targetBranch = danger.gitlab.mr.target_branch;
|
||||
file.contentCn = await getContentFileInGitlab(
|
||||
file.fileCnPath,
|
||||
targetBranch
|
||||
); // Try to fetch CN file from target branch of Gitlab repository and store content
|
||||
|
||||
if (file.contentCn) {
|
||||
// File found on target branch in Gitlab repository
|
||||
if (!regexIncludeLink.test(file.contentCn)) {
|
||||
// File on Gitlab master is NOT just an ..include:: link to ENG version
|
||||
file.fileUrlRepoMasterCN = `${process.env.DANGER_GITLAB_HOST}/${pathProject}/-/blob/${targetBranch}/${file.fileCnPath}`;
|
||||
partMessages.push(
|
||||
`- file \`${file.fileCnPath}\` was not updated in this MR, but found unchanged full document (not just link to EN) in target branch of Gitlab repository [\`${file.fileCnPath}\`](${file.fileUrlRepoMasterCN}). Please update \`${file.fileCnPath}\` into this MR.`
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// File failed to fetch, probably does not exist in the target branch
|
||||
partMessages.push(
|
||||
`- file \`${file.fileCnPath}\` probably doesn't exist in this MR or in the GitLab repo. Please add \`${file.fileCnPath}\` into this MR.`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Create a report with found issues with documents in MR
|
||||
createReport();
|
||||
|
||||
// At this point, the rule has passed
|
||||
recordRuleExitStatus(ruleName, 'Passed');
|
||||
|
||||
/**
|
||||
* Generates an object that represents the relationships between files in two different languages found in this MR.
|
||||
*
|
||||
* @param {string[]} docsFilesEN - An array of file paths for documents in English.
|
||||
* @param {string[]} docsFilesCN - An array of file paths for documents in Chinese.
|
||||
* @returns {Object} An object with the following properties:
|
||||
* - bothFilesInMr: An array of objects that represent files that found in MR in both languages. Each object has the following properties:
|
||||
* - fileName: The name of the file.
|
||||
* - fileEnPath: The path to the file in English.
|
||||
* - fileCnPath: The path to the file in Chinese.
|
||||
* - fileUrlRepoEN: The URL link to MR branch path to the file in English.
|
||||
* - fileUrlRepoCN: The URL link to MR branch path to the file in Chinese.
|
||||
* - onlyCnFilesInMr: An array of objects that represent files that only found in MR in English. Each object has the following properties:
|
||||
* - fileName: The name of the file.
|
||||
* - fileEnPath: The path to the file in English.
|
||||
* - fileCnPath: The FUTURE path to the file in Chinese.
|
||||
* - fileUrlRepoEN: The URL link to MR branch path to the file in English.
|
||||
* - fileUrlRepoCN: The URL link to MR branch path to the file in Chinese.
|
||||
* - onlyEnFilesInMr: An array of objects that represent files that only found in MR in Chinese. Each object has the following properties:
|
||||
* - fileName: The name of the file.
|
||||
* - fileEnPath: The FUTURE path to the file in English.
|
||||
* - fileCnPath: The path to the file in Chinese.
|
||||
* - fileUrlRepoEN: The URL link to MR branch path to the file in English.
|
||||
* - fileUrlRepoCN: The URL link to MR branch path to the file in Chinese.
|
||||
*/
|
||||
function parseMrDocsFiles(allMrFiles) {
|
||||
const path = require("path");
|
||||
const mrBranch = danger.gitlab.mr.source_branch;
|
||||
|
||||
const docsEnFilesMrPath = allMrFiles.filter((file) =>
|
||||
file.startsWith("docs/en")
|
||||
); // Filter all English doc files in MR
|
||||
const docsCnFilesMrPath = allMrFiles.filter((file) =>
|
||||
file.startsWith("docs/zh_CN")
|
||||
); // Filter all Chinese doc files in MR
|
||||
|
||||
const docsEnFileNames = docsEnFilesMrPath.map((filePath) =>
|
||||
path.basename(filePath)
|
||||
); // Get (base) file names for English docs
|
||||
const docsCnFileNames = docsCnFilesMrPath.map((filePath) =>
|
||||
path.basename(filePath)
|
||||
); // Get (base) file names for Chinese docs
|
||||
|
||||
const bothFileNames = docsEnFileNames.filter((fileName) =>
|
||||
docsCnFileNames.includes(fileName)
|
||||
); // Get file names that are common to both English and Chinese docs
|
||||
const onlyEnFileNames = docsEnFileNames.filter(
|
||||
(fileName) => !docsCnFileNames.includes(fileName)
|
||||
); // Get file names that are only present in English version
|
||||
const onlyCnFileNames = docsCnFileNames.filter(
|
||||
(fileName) => !docsEnFileNames.includes(fileName)
|
||||
); // Get file names that are only present in Chinese version
|
||||
|
||||
return {
|
||||
bothFilesInMr: bothFileNames.map((fileName) => {
|
||||
const fileEnPath =
|
||||
docsEnFilesMrPath[docsEnFileNames.indexOf(fileName)];
|
||||
const fileCnPath =
|
||||
docsCnFilesMrPath[docsCnFileNames.indexOf(fileName)];
|
||||
|
||||
return {
|
||||
fileName,
|
||||
fileEnPath,
|
||||
fileCnPath,
|
||||
fileUrlRepoEN: `${process.env.DANGER_GITLAB_HOST}/${pathProject}/-/blob/${mrBranch}/${fileEnPath}`,
|
||||
fileUrlRepoCN: `${process.env.DANGER_GITLAB_HOST}/${pathProject}/-/blob/${mrBranch}/${fileCnPath}`,
|
||||
};
|
||||
}),
|
||||
onlyEnFilesInMr: onlyEnFileNames.map((fileName) => {
|
||||
const fileEnPath =
|
||||
docsEnFilesMrPath[docsEnFileNames.indexOf(fileName)];
|
||||
const fileCnPath = fileEnPath.replace("en", "zh_CN"); // Generate future CN file path
|
||||
|
||||
return {
|
||||
fileName,
|
||||
fileEnPath,
|
||||
fileCnPath,
|
||||
fileUrlRepoEN: `${process.env.DANGER_GITLAB_HOST}/${pathProject}/-/blob/${mrBranch}/${fileEnPath}`,
|
||||
fileUrlRepoCN: `${process.env.DANGER_GITLAB_HOST}/${pathProject}/-/blob/${mrBranch}/${fileCnPath}`,
|
||||
};
|
||||
}),
|
||||
onlyCnFilesInMr: onlyCnFileNames.map((fileName) => {
|
||||
const fileCnPath =
|
||||
docsCnFilesMrPath[docsCnFileNames.indexOf(fileName)];
|
||||
const fileEnPath = fileCnPath.replace("zh_CN", "en"); // Generate future EN file path
|
||||
|
||||
return {
|
||||
fileName,
|
||||
fileEnPath,
|
||||
fileCnPath,
|
||||
fileUrlRepoEN: `${process.env.DANGER_GITLAB_HOST}/${pathProject}/-/blob/${mrBranch}/${fileEnPath}`,
|
||||
fileUrlRepoCN: `${process.env.DANGER_GITLAB_HOST}/${pathProject}/-/blob/${mrBranch}/${fileCnPath}`,
|
||||
};
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the contents of a file from GitLab using the GitLab API.
|
||||
*
|
||||
* @param {string} filePath - The path of the file to retrieve.
|
||||
* @param {string} branch - The branch where the file is located.
|
||||
* @returns {string|null} - The contents of the file, with any trailing new lines trimmed, or null if the file cannot be retrieved.
|
||||
*/
|
||||
async function getContentFileInGitlab(filePath, branch) {
|
||||
const axios = require("axios");
|
||||
|
||||
const encFilePath = encodeURIComponent(filePath);
|
||||
const encBranch = encodeURIComponent(branch);
|
||||
const urlApi = `${process.env.DANGER_GITLAB_API_BASE_URL}/projects/${danger.gitlab.mr.project_id}/repository/files/${encFilePath}/raw?ref=${encBranch}`;
|
||||
|
||||
try {
|
||||
const response = await axios.get(urlApi, {
|
||||
headers: {
|
||||
"Private-Token": process.env.DANGER_GITLAB_API_TOKEN,
|
||||
},
|
||||
});
|
||||
return response.data.trim(); // Trim trailing new line
|
||||
} catch (error) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the contents of a file in a DangerJS merge request object.
|
||||
*
|
||||
* @param {string} filePath - The path of the file to retrieve.
|
||||
* @returns {string|null} - The contents of the file, with any trailing new lines trimmed, or null if the file cannot be retrieved.
|
||||
*/
|
||||
async function getContentFileInMR(filePath) {
|
||||
try {
|
||||
const content = await danger.git.diffForFile(filePath);
|
||||
const fileContentAfter = content.after.trim(); // Trim trailing new lines
|
||||
return fileContentAfter;
|
||||
} catch (error) {
|
||||
console.error(`Error while getting file content MR: ${error}`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a compiled report for found documentation issues in the current MR and alerts the Documentation team if there are any "needs translation" labels present.
|
||||
*
|
||||
* Report if documentation labels have been added by mistake.
|
||||
*/
|
||||
function createReport() {
|
||||
const mrLabels = danger.gitlab.mr.labels; // Get MR labels
|
||||
const regexTranslationLabel = /needs translation:/i;
|
||||
|
||||
const translationLabelsPresent = mrLabels.some((label) =>
|
||||
regexTranslationLabel.test(label)
|
||||
); // Check if any of MR labels are "needs translation: XX"
|
||||
|
||||
// No docs issues found in MR, but translation labels have been added anyway
|
||||
if (!partMessages.length && translationLabelsPresent) {
|
||||
recordRuleExitStatus(ruleName, "Failed");
|
||||
return warn(
|
||||
`Please remove the \`needs translation: XX\` labels. For documents that need to translate from scratch, Doc team will translate them in the future. For the current stage, we only focus on updating exiting EN and CN translation to make them in sync.`
|
||||
);
|
||||
}
|
||||
|
||||
// Docs issues found in this MR
|
||||
partMessages.sort();
|
||||
let dangerMessage = `Some of the documentation files in this MR seem to have translations issues:\n${partMessages.join(
|
||||
"\n"
|
||||
)}\n`;
|
||||
|
||||
if (partMessages.length) {
|
||||
if (!translationLabelsPresent) {
|
||||
dangerMessage += `
|
||||
\nWhen synchronizing the EN and CN versions, please follow the [Documentation Code](https://docs.espressif.com/projects/esp-idf/zh_CN/latest/esp32/contribute/documenting-code.html#standardize-document-format). The total number of lines of EN and CN should be same.\n
|
||||
\nIf you have difficulty in providing translation, you can contact Documentation team by adding <kbd>needs translation: CN</kbd> or <kbd>needs translation: EN</kbd> labels into this MR and retrying Danger CI job. The documentation team will be automatically notified and will help you with the translations before the merge.\n`;
|
||||
recordRuleExitStatus(ruleName, "Failed");
|
||||
return warn(dangerMessage); // no "needs translation: XX" labels in MR; report issues as warn
|
||||
} else {
|
||||
dangerMessage += `\nTranslation labels <kbd>needs translation: CN</kbd> or <kbd>needs translation: EN</kbd> were added - this will automatically notify the Documentation team to help you with translation issues.`;
|
||||
recordRuleExitStatus(ruleName, 'Passed (with suggestions)');
|
||||
return message(dangerMessage); // "needs translation: XX" labels were found in MR and Docs team was notified; report issues as info
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
22
.gitlab/dangerjs/mrSizeTooLarge.js
Normal file
22
.gitlab/dangerjs/mrSizeTooLarge.js
Normal file
@@ -0,0 +1,22 @@
|
||||
const { recordRuleExitStatus } = require("./configParameters.js");
|
||||
|
||||
/**
|
||||
* Check if MR is too large (more than 1000 lines of changes)
|
||||
*
|
||||
* @dangerjs INFO
|
||||
*/
|
||||
module.exports = async function () {
|
||||
const ruleName = "Merge request size (number of changed lines)";
|
||||
const bigMrLinesOfCodeThreshold = 1000;
|
||||
const totalLines = await danger.git.linesOfCode();
|
||||
|
||||
if (totalLines > bigMrLinesOfCodeThreshold) {
|
||||
recordRuleExitStatus(ruleName, "Passed (with suggestions)");
|
||||
return message(
|
||||
`This MR seems to be quite large (total lines of code: ${totalLines}), you might consider splitting it into smaller MRs`
|
||||
);
|
||||
}
|
||||
|
||||
// At this point, the rule has passed
|
||||
recordRuleExitStatus(ruleName, "Passed");
|
||||
};
|
||||
31
.gitlab/dangerjs/mrSourceBranchName.js
Normal file
31
.gitlab/dangerjs/mrSourceBranchName.js
Normal file
@@ -0,0 +1,31 @@
|
||||
const { recordRuleExitStatus } = require("./configParameters.js");
|
||||
|
||||
/**
|
||||
* Throw Danger WARN if branch name contains more than one slash or uppercase letters
|
||||
*
|
||||
* @dangerjs INFO
|
||||
*/
|
||||
module.exports = function () {
|
||||
const ruleName = "Source branch name";
|
||||
const sourceBranch = danger.gitlab.mr.source_branch;
|
||||
|
||||
// Check if the source branch name contains more than one slash
|
||||
const slashCount = (sourceBranch.match(/\//g) || []).length;
|
||||
if (slashCount > 1) {
|
||||
recordRuleExitStatus(ruleName, "Failed");
|
||||
return warn(
|
||||
`The source branch name \`${sourceBranch}\` contains more than one slash. This can cause troubles with git sync. Please rename the branch.`
|
||||
);
|
||||
}
|
||||
|
||||
// Check if the source branch name contains any uppercase letters
|
||||
if (sourceBranch !== sourceBranch.toLowerCase()) {
|
||||
recordRuleExitStatus(ruleName, "Failed");
|
||||
return warn(
|
||||
`The source branch name \`${sourceBranch}\` contains uppercase letters. This can cause troubles on case-insensitive file systems (macOS). Please use only lowercase letters.`
|
||||
);
|
||||
}
|
||||
|
||||
// At this point, the rule has passed
|
||||
recordRuleExitStatus(ruleName, "Passed");
|
||||
};
|
||||
31
.gitlab/dangerjs/mrTitleNoDraftOrWip.js
Normal file
31
.gitlab/dangerjs/mrTitleNoDraftOrWip.js
Normal file
@@ -0,0 +1,31 @@
|
||||
const { recordRuleExitStatus } = require("./configParameters.js");
|
||||
|
||||
/**
|
||||
* Check if MR Title contains prefix "WIP: ...".
|
||||
*
|
||||
* @dangerjs WARN
|
||||
*/
|
||||
module.exports = function () {
|
||||
const ruleName = 'Merge request not in Draft or WIP state';
|
||||
const mrTitle = danger.gitlab.mr.title;
|
||||
const regexes = [
|
||||
{ prefix: "WIP", regex: /^WIP:/i },
|
||||
{ prefix: "W.I.P", regex: /^W\.I\.P/i },
|
||||
{ prefix: "[WIP]", regex: /^\[WIP/i },
|
||||
{ prefix: "[W.I.P]", regex: /^\[W\.I\.P/i },
|
||||
{ prefix: "(WIP)", regex: /^\(WIP/i },
|
||||
{ prefix: "(W.I.P)", regex: /^\(W\.I\.P/i },
|
||||
];
|
||||
|
||||
for (const item of regexes) {
|
||||
if (item.regex.test(mrTitle)) {
|
||||
recordRuleExitStatus(ruleName, "Failed");
|
||||
return warn(
|
||||
`Please remove the \`${item.prefix}\` prefix from the MR name before merging this MR.`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// At this point, the rule has passed
|
||||
recordRuleExitStatus(ruleName, "Passed");
|
||||
};
|
||||
2745
.gitlab/dangerjs/package-lock.json
generated
Normal file
2745
.gitlab/dangerjs/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
12
.gitlab/dangerjs/package.json
Normal file
12
.gitlab/dangerjs/package.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"name": "dangerjs-esp-idf",
|
||||
"description": "Merge request automatic linter",
|
||||
"main": "dangerfile.js",
|
||||
"dependencies": {
|
||||
"danger": "^11.2.3",
|
||||
"axios": "^1.3.3",
|
||||
"langchain": "^0.0.53",
|
||||
"openai-gpt-token-counter": "^1.0.3",
|
||||
"@commitlint/lint": "^13.1.0"
|
||||
}
|
||||
}
|
||||
@@ -9,19 +9,7 @@
|
||||
## Related <!-- Optional -->
|
||||
<!-- Related Jira issues and Github issues or write "No related issues"-->
|
||||
|
||||
<!-- ## Dynamic Pipeline Configuration
|
||||
```yaml
|
||||
Test Case Filters:
|
||||
# Only run tests that match the given substring expression (modified files/components will be ignored):
|
||||
# Please use a list of strings.
|
||||
# This will run the test cases filtered like `pytest -k "(<list_item_1>) or (<list_item_2>) or ...`
|
||||
# The fast pipeline will fail at the final stage.
|
||||
# For example:
|
||||
- test_sdm and not sdmmc
|
||||
- test_hello_world
|
||||
# This example will include all tests containing 'test_hello_world' in the name,
|
||||
# and include all tests containing 'test_sdm' but not 'sdmmc' in the name.
|
||||
``` --><!-- Optional -->
|
||||
## Release notes <!-- Mandatory -->
|
||||
<!-- Either state release notes or write "No release notes" -->
|
||||
|
||||
<!-- Don't remove the next line - assigns the MR author as the assignee -->
|
||||
/assign me
|
||||
<!-- ## Breaking change notes --><!-- Optional -->
|
||||
|
||||
@@ -10,7 +10,8 @@ _This entire section can be deleted if all items are checked._
|
||||
* [ ] All related links, including JIRA, backport, submodule MR, are mentioned in the `Related` subsection.
|
||||
* [ ] Any GitHub issues are linked inside the git commit message and corresponding release notes
|
||||
* [ ] Add label for the area this MR is part of
|
||||
* [ ] For documentation updates, check if label `needs translation:CN` or `needs translation:EN` have been added when the other language version still needs the update. Don't add such labels if the document updated has never been translated before and requires translation from scratch.
|
||||
* [ ] For documentation updates, check if label `Docs` and `needs translation:CN` or `needs translation:EN` have been added when the other language version still needs the update. Skip adding the label if the document is not yet translated.
|
||||
* [ ] Check if documents requiring translation fall under get-started section. If yes, add the labels mentioned above. Then the documentation team will assign a translator for you. Please inform the translator to prepare translation once your MR is ready to merge. The translation should be included in your MR to get it merged. For more information, see documentation workflow in Wiki.
|
||||
* [ ] Any necessary "needs backport" labels are added
|
||||
* [ ] Check if this is a breaking change. If it is, add notes to the `Breaking change notes` subsection below
|
||||
* [ ] Release note entry if this is a new public feature, or a fix for an issue introduced in the previous release.
|
||||
@@ -39,5 +40,24 @@ _For other small/non-public changes, which are not expected to be in the release
|
||||
* Mention submodule MR, if there is
|
||||
* Mention backport(ed) MR, if there is
|
||||
|
||||
<!-- Don't remove the next line - assigns the MR author as the assignee -->
|
||||
/assign me
|
||||
_Don't touch the subsection titles below, they will be parsed by scripts._
|
||||
|
||||
## Release notes (Mandatory)
|
||||
|
||||
_Changes made in this MR that should go into the **Release Notes** should be listed here. Please use **past tense** and *specify the area (see maintainers page of IDF internal wiki)*. If there is a subscope, include it and separate with slash (`/`). Minor changes can go to the descriptions above without a release notes entry._
|
||||
|
||||
_Write all the changes in a **list** (Start at the beginning of the line with `-` or `*`). If multiple changes are made, each of them should take a single line. If there is only one change to list, it should still be the only line of a list. If this MR does not need any release notes, write "No release notes" here without the `-` or `*`. e.g._
|
||||
|
||||
* [WiFi] Changed/fixed/updated xxx
|
||||
* [WiFi] Added support of xxx
|
||||
* [Peripheral Drivers/I2S] Fixed xxx (https://github.com/espressif/esp-idf/issues/xxxx)
|
||||
|
||||
## Breaking change notes
|
||||
|
||||
_Remove this subsection if not used._
|
||||
|
||||
_If there are any breaking changes, please mention it here. Talking about (1) what is not accepted any more, (2) the alternative solution and (3) the benefits/reason. e.g._
|
||||
|
||||
_Please strictly follow the breaking change restriction, which means, if there is a breaking change but you are merging to non-major versions, you have to separate the breaking part out to another MR for a major version. The breaking change subsection is only accepted in MRs merging to major versions._
|
||||
|
||||
* [VFS/UART] Now vfs_uart_set_rts_cts accept one more instance argument, to support configuration to different ports.
|
||||
|
||||
26
.gitmodules
vendored
26
.gitmodules
vendored
@@ -46,6 +46,16 @@
|
||||
sbom-description = Wear-leveled SPI flash file system for embedded devices
|
||||
sbom-hash = 0dbb3f71c5f6fae3747a9d935372773762baf852
|
||||
|
||||
[submodule "components/json/cJSON"]
|
||||
path = components/json/cJSON
|
||||
url = ../../DaveGamble/cJSON.git
|
||||
sbom-version = 1.7.17
|
||||
sbom-cpe = cpe:2.3:a:cjson_project:cjson:{}:*:*:*:*:*:*:*
|
||||
sbom-supplier = Person: Dave Gamble
|
||||
sbom-url = https://github.com/DaveGamble/cJSON
|
||||
sbom-description = Ultralightweight JSON parser in ANSI C
|
||||
sbom-hash = 87d8f0961a01bf09bef98ff89bae9fdec42181ee
|
||||
|
||||
[submodule "components/mbedtls/mbedtls"]
|
||||
path = components/mbedtls/mbedtls
|
||||
url = ../../espressif/mbedtls.git
|
||||
@@ -54,6 +64,10 @@
|
||||
path = components/lwip/lwip
|
||||
url = ../../espressif/esp-lwip.git
|
||||
|
||||
[submodule "components/mqtt/esp-mqtt"]
|
||||
path = components/mqtt/esp-mqtt
|
||||
url = ../../espressif/esp-mqtt.git
|
||||
|
||||
[submodule "components/protobuf-c/protobuf-c"]
|
||||
path = components/protobuf-c/protobuf-c
|
||||
url = ../../protobuf-c/protobuf-c.git
|
||||
@@ -67,11 +81,11 @@
|
||||
[submodule "components/unity/unity"]
|
||||
path = components/unity/unity
|
||||
url = ../../ThrowTheSwitch/Unity.git
|
||||
sbom-version = v2.6.0-RC1
|
||||
sbom-version = v2.4.3-51-g7d2bf62b7e6a
|
||||
sbom-supplier = Organization: ThrowTheSwitch community <http://www.throwtheswitch.org>
|
||||
sbom-url = https://github.com/ThrowTheSwitch/Unity
|
||||
sbom-description = Simple Unit Testing for C
|
||||
sbom-hash = bf560290f6020737eafaa8b5cbd2177c3956c03f
|
||||
sbom-hash = 7d2bf62b7e6afaf38153041a9d53c21aeeca9a25
|
||||
|
||||
[submodule "components/bt/host/nimble/nimble"]
|
||||
path = components/bt/host/nimble/nimble
|
||||
@@ -106,6 +120,10 @@
|
||||
path = components/openthread/lib
|
||||
url = ../../espressif/esp-thread-lib.git
|
||||
|
||||
[submodule "components/ieee802154/lib"]
|
||||
path = components/ieee802154/lib
|
||||
url = ../../espressif/esp-ieee802154-lib.git
|
||||
|
||||
[submodule "components/bt/controller/lib_esp32h2/esp32h2-bt-lib"]
|
||||
path = components/bt/controller/lib_esp32h2/esp32h2-bt-lib
|
||||
url = ../../espressif/esp32h2-bt-lib.git
|
||||
@@ -118,10 +136,6 @@
|
||||
path = components/bt/controller/lib_esp32c6/esp32c6-bt-lib
|
||||
url = ../../espressif/esp32c6-bt-lib.git
|
||||
|
||||
[submodule "components/bt/controller/lib_esp32c5/esp32c5-bt-lib"]
|
||||
path = components/bt/controller/lib_esp32c5/esp32c5-bt-lib
|
||||
url = ../../espressif/esp32c5-bt-lib.git
|
||||
|
||||
[submodule "components/heap/tlsf"]
|
||||
path = components/heap/tlsf
|
||||
url = ../../espressif/tlsf.git
|
||||
|
||||
@@ -1,68 +0,0 @@
|
||||
config_rules = [
|
||||
'sdkconfig.ci=default',
|
||||
'sdkconfig.ci.*=',
|
||||
'=default',
|
||||
]
|
||||
|
||||
extra_pythonpaths = [
|
||||
'$IDF_PATH/tools/ci/python_packages',
|
||||
'$IDF_PATH/tools/ci',
|
||||
'$IDF_PATH/tools',
|
||||
]
|
||||
build_system = "idf_ci_local.app:IdfCMakeApp"
|
||||
|
||||
recursive = true
|
||||
check_warnings = true
|
||||
keep_going = true
|
||||
copy_sdkconfig = true
|
||||
ignore_warning_files = [
|
||||
'$IDF_PATH/tools/ci/ignore_build_warnings.txt',
|
||||
]
|
||||
|
||||
build_dir = "build_@t_@w"
|
||||
build_log_filename = "build_log.txt"
|
||||
size_json_filename = "size_${CI_JOB_ID}.json"
|
||||
|
||||
verbose = 1 # INFO
|
||||
|
||||
additional_build_targets = [
|
||||
'esp32h21',
|
||||
'esp32h4',
|
||||
]
|
||||
|
||||
# collect
|
||||
collect_app_info_filename = "app_info_${CI_JOB_NAME_SLUG}.txt"
|
||||
junitxml = "build_summary_${CI_JOB_NAME_SLUG}.xml"
|
||||
|
||||
# manifest
|
||||
check_manifest_rules = true
|
||||
manifest_rootpath = "$IDF_PATH"
|
||||
manifest_filepatterns = [
|
||||
'**/.build-test-rules.yml',
|
||||
]
|
||||
|
||||
# dependency-driven build
|
||||
deactivate_dependency_driven_build_by_components = [
|
||||
'cxx',
|
||||
'esp_common',
|
||||
'esp_hw_support',
|
||||
'esp_rom',
|
||||
'esp_system',
|
||||
'esp_timer',
|
||||
'freertos',
|
||||
'hal',
|
||||
'heap',
|
||||
'log',
|
||||
'esp_libc',
|
||||
'riscv',
|
||||
'soc',
|
||||
'xtensa',
|
||||
]
|
||||
|
||||
deactivate_dependency_driven_build_by_filepatterns = [
|
||||
# tools
|
||||
'tools/cmake/**/*',
|
||||
'tools/tools.json',
|
||||
# ci
|
||||
'tools/ci/ignore_build_warnings.txt',
|
||||
]
|
||||
106
.idf_ci.toml
106
.idf_ci.toml
@@ -1,106 +0,0 @@
|
||||
preserve_non_test_related_apps = false
|
||||
exclude_dirs = [
|
||||
'tools/test_mkdfu',
|
||||
'tools/test_idf_size',
|
||||
'tools/test_idf_py',
|
||||
'tools/test_idf_diag',
|
||||
'tools/test_bsasm',
|
||||
'tools/ci/test_autocomplete',
|
||||
'tools/test_build_system',
|
||||
]
|
||||
|
||||
[local_runtime_envs]
|
||||
EXTRA_CFLAGS = "-Werror -Werror=deprecated-declarations -Werror=unused-variable -Werror=unused-but-set-variable -Werror=unused-function -Wstrict-prototypes"
|
||||
EXTRA_CXXFLAGS = "-Werror -Werror=deprecated-declarations -Werror=unused-variable -Werror=unused-but-set-variable -Werror=unused-function"
|
||||
LDGEN_CHECK_MAPPING = "1"
|
||||
IDF_CI_BUILD = "1"
|
||||
|
||||
[gitlab]
|
||||
|
||||
[gitlab.build_pipeline]
|
||||
workflow_name = "build_child_pipeline"
|
||||
presigned_json_job_name = 'generate_pytest_build_report'
|
||||
|
||||
job_tags = ['build', 'shiny']
|
||||
job_template_name = '.dynamic_build_template'
|
||||
job_template_jinja = '' # write in tools/ci/dynamic_pipelines/templates/.dynamic_jobs.yml
|
||||
pre_yaml_jinja = """
|
||||
include:
|
||||
- .gitlab/ci/common.yml
|
||||
- tools/ci/dynamic_pipelines/templates/.dynamic_jobs.yml
|
||||
- tools/ci/dynamic_pipelines/templates/test_child_pipeline.yml
|
||||
"""
|
||||
yaml_jinja = """
|
||||
{{ settings.gitlab.build_pipeline.pre_yaml_jinja }}
|
||||
|
||||
workflow:
|
||||
name: {{ settings.gitlab.build_pipeline.workflow_name }}
|
||||
rules:
|
||||
- when: always
|
||||
|
||||
{{ jobs }}
|
||||
""" # simplified since we included the tools/ci/dynamic_pipelines/templates/test_child_pipeline.yml
|
||||
|
||||
[gitlab.test_pipeline]
|
||||
job_template_name = '.dynamic_target_test_template'
|
||||
job_template_jinja = '' # write in tools/ci/dynamic_pipelines/templates/.dynamic_jobs.yml
|
||||
pre_yaml_jinja = """
|
||||
include:
|
||||
- .gitlab/ci/common.yml
|
||||
- tools/ci/dynamic_pipelines/templates/.dynamic_jobs.yml
|
||||
"""
|
||||
|
||||
[gitlab.artifacts.s3.debug]
|
||||
bucket = "idf-artifacts"
|
||||
patterns = [
|
||||
'**/build*/bootloader/*.map',
|
||||
'**/build*/bootloader/*.elf',
|
||||
'**/build*/*.map',
|
||||
'**/build*/*.elf',
|
||||
# customized
|
||||
'**/build*/esp_tee/*.map',
|
||||
'**/build*/esp_tee/*.elf',
|
||||
'**/build*/gdbinit/*',
|
||||
]
|
||||
|
||||
[gitlab.artifacts.s3.flash]
|
||||
bucket = "idf-artifacts"
|
||||
patterns = [
|
||||
'**/build*/bootloader/*.bin',
|
||||
'**/build*/*.bin',
|
||||
'**/build*/partition_table/*.bin',
|
||||
'**/build*/flasher_args.json',
|
||||
'**/build*/flash_project_args',
|
||||
'**/build*/config/sdkconfig.json',
|
||||
'**/build*/sdkconfig',
|
||||
'**/build*/project_description.json',
|
||||
# customized
|
||||
'**/build*/esp_tee/*.bin',
|
||||
]
|
||||
|
||||
[gitlab.artifacts.s3.log]
|
||||
bucket = "idf-artifacts"
|
||||
patterns = [
|
||||
'**/build*/build_log.txt',
|
||||
'**/build*/size*.json',
|
||||
]
|
||||
|
||||
[gitlab.artifacts.s3.junit]
|
||||
bucket = "idf-artifacts"
|
||||
patterns = [
|
||||
'**/XUNIT_RESULT_*.xml',
|
||||
'**/build_summary_*.xml',
|
||||
]
|
||||
|
||||
[gitlab.artifacts.s3.env]
|
||||
bucket = "idf-artifacts"
|
||||
patterns = [
|
||||
'**/pipeline.env',
|
||||
]
|
||||
|
||||
[gitlab.artifacts.s3.longterm]
|
||||
bucket = "longterm"
|
||||
if_clause = '"$CI_COMMIT_REF_NAME" == "master"'
|
||||
patterns = [
|
||||
'**/build*/size*.json',
|
||||
]
|
||||
@@ -1,7 +1,7 @@
|
||||
[mypy]
|
||||
|
||||
# Specifies the Python version used to parse and check the target program
|
||||
python_version = 3.10
|
||||
python_version = 3.9
|
||||
|
||||
# Disallows defining functions without type annotations or with incomplete type annotations
|
||||
# True => enforce type annotation in all function definitions
|
||||
|
||||
@@ -1,17 +1,11 @@
|
||||
# See https://pre-commit.com for more information
|
||||
# See https://pre-commit.com/hooks.html for more hooks
|
||||
|
||||
default_stages: [pre-commit]
|
||||
default_stages: [commit]
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: "v0.9.7"
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
args: [ "--fix", "--show-fixes" ]
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.5.0
|
||||
rev: v4.0.1
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
# note: whitespace exclusions use multiline regex, see https://pre-commit.com/#regular-expressions
|
||||
@@ -20,7 +14,6 @@ repos:
|
||||
# 2 - any file matching *test*/*expected* (for host tests, if possible use this naming pattern always)
|
||||
# 3 - any directory named 'testdata'
|
||||
# 4 - protobuf auto-generated files
|
||||
# 5 - COPYING files
|
||||
exclude: &whitespace_excludes |
|
||||
(?x)^(
|
||||
.+\.(md|rst|map|bin)|
|
||||
@@ -29,11 +22,7 @@ repos:
|
||||
.*_pb2.py|
|
||||
.*.pb-c.h|
|
||||
.*.pb-c.c|
|
||||
.*.yuv|
|
||||
.*.rgb|
|
||||
.*.gray|
|
||||
.*COPYING.*|
|
||||
docs/sphinx-known-warnings\.txt
|
||||
.*.yuv
|
||||
)$
|
||||
- id: end-of-file-fixer
|
||||
exclude: *whitespace_excludes
|
||||
@@ -47,10 +36,20 @@ repos:
|
||||
- id: no-commit-to-branch
|
||||
name: Do not use uppercase letters in the branch name
|
||||
args: ['--pattern', '^[^A-Z]*[A-Z]']
|
||||
- repo: https://github.com/codespell-project/codespell
|
||||
rev: v2.3.0
|
||||
- repo: https://github.com/PyCQA/flake8
|
||||
rev: 5.0.4
|
||||
hooks:
|
||||
- id: codespell
|
||||
- id: flake8
|
||||
args: ['--config=.flake8', '--tee', '--benchmark']
|
||||
- repo: https://github.com/pycqa/isort
|
||||
rev: 5.12.0 # python 3.8 compatible
|
||||
hooks:
|
||||
- id: isort
|
||||
name: isort (python)
|
||||
exclude: >
|
||||
(?x)^(
|
||||
.*_pb2.py
|
||||
)$
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: check-executables
|
||||
@@ -65,6 +64,18 @@ repos:
|
||||
language: python
|
||||
pass_filenames: false
|
||||
always_run: true
|
||||
- id: check-kconfigs
|
||||
name: Validate Kconfig files
|
||||
entry: tools/ci/check_kconfigs.py
|
||||
language: python
|
||||
additional_dependencies:
|
||||
- esp-idf-kconfig
|
||||
files: '^Kconfig$|Kconfig.*$'
|
||||
- id: check-deprecated-kconfigs-options
|
||||
name: Check if any Kconfig Options Deprecated
|
||||
entry: tools/ci/check_deprecated_kconfigs.py
|
||||
language: python
|
||||
files: 'sdkconfig\.ci$|sdkconfig\.rename$|sdkconfig.*$'
|
||||
- id: cmake-lint
|
||||
name: Check CMake Files Format
|
||||
entry: cmakelint --linelength=120 --spaces=4 --filter=-whitespace/indent
|
||||
@@ -80,24 +91,30 @@ repos:
|
||||
always_run: true
|
||||
files: '\.gitlab/CODEOWNERS'
|
||||
pass_filenames: false
|
||||
- id: check-rules-yml
|
||||
name: Check rules.yml all rules have at lease one job applied, all rules needed exist
|
||||
entry: tools/ci/check_rules_yml.py
|
||||
language: python
|
||||
files: '\.gitlab/ci/.+\.yml|\.gitlab-ci.yml|\.gitmodules'
|
||||
pass_filenames: false
|
||||
additional_dependencies:
|
||||
- PyYAML == 5.3.1
|
||||
- id: check-generated-rules
|
||||
name: Check rules are generated (based on .gitlab/ci/dependencies/dependencies.yml)
|
||||
entry: tools/ci/generate_rules.py
|
||||
language: python
|
||||
files: '\.gitlab/ci/dependencies/.+|\.gitlab/ci/.*\.yml|.gitlab-ci.yml'
|
||||
files: '\.gitlab/ci/dependencies/.+|\.gitlab/ci/.*\.yml'
|
||||
pass_filenames: false
|
||||
require_serial: true
|
||||
additional_dependencies:
|
||||
- PyYAML == 5.3.1
|
||||
- id: mypy-check
|
||||
name: Check type annotations in python files
|
||||
entry: tools/ci/check_type_comments.py
|
||||
additional_dependencies:
|
||||
- 'mypy'
|
||||
- 'mypy-extensions'
|
||||
- 'types-setuptools'
|
||||
- 'types-PyYAML'
|
||||
- 'types-requests'
|
||||
- 'mypy==0.940'
|
||||
- 'mypy-extensions==0.4.3'
|
||||
- 'types-setuptools==57.4.14'
|
||||
- 'types-PyYAML==0.1.9'
|
||||
exclude: >
|
||||
(?x)^(
|
||||
.*_pb2.py
|
||||
@@ -144,52 +161,42 @@ repos:
|
||||
require_serial: true
|
||||
additional_dependencies:
|
||||
- PyYAML == 5.3.1
|
||||
- idf-build-apps~=2.13
|
||||
- id: sort-yaml-files
|
||||
name: sort yaml files
|
||||
entry: tools/ci/sort_yaml.py
|
||||
- idf_build_apps~=1.0
|
||||
- id: sort-build-test-rules-ymls
|
||||
name: sort .build-test-rules.yml files
|
||||
entry: tools/ci/check_build_test_rules.py sort-yaml
|
||||
language: python
|
||||
files: '\.build-test-rules\.yml$|known_generate_test_child_pipeline_warnings\.yml$'
|
||||
files: '\.build-test-rules\.yml'
|
||||
additional_dependencies:
|
||||
- PyYAML == 5.3.1
|
||||
- ruamel.yaml
|
||||
- id: sort-yaml-test
|
||||
name: sort yaml test
|
||||
entry: python -m unittest tools/ci/sort_yaml.py
|
||||
- id: check-build-test-rules-path-exists
|
||||
name: check path in .build-test-rules.yml exists
|
||||
entry: tools/ci/check_build_test_rules.py check-exist
|
||||
language: python
|
||||
files: 'tools/ci/sort_yaml\.py$'
|
||||
additional_dependencies:
|
||||
- ruamel.yaml
|
||||
- PyYAML == 5.3.1
|
||||
always_run: true
|
||||
pass_filenames: false
|
||||
require_serial: true
|
||||
- id: cleanup-ignore-lists
|
||||
name: Remove non-existing patterns from ignore lists
|
||||
entry: tools/ci/cleanup_ignore_lists.py
|
||||
language: python
|
||||
always_run: true
|
||||
require_serial: true
|
||||
- id: gitlab-yaml-linter
|
||||
name: Check gitlab yaml files
|
||||
entry: tools/ci/gitlab_yaml_linter.py
|
||||
language: python
|
||||
files: '\.gitlab-ci\.yml|\.gitlab/ci/.+\.yml|\.gitmodules'
|
||||
pass_filenames: false
|
||||
additional_dependencies:
|
||||
- PyYAML == 5.3.1
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.0.1
|
||||
hooks:
|
||||
- id: file-contents-sorter
|
||||
files: "tools/ci/(\
|
||||
executable-list\\.txt\
|
||||
|mypy_ignore_list\\.txt\
|
||||
|check_copyright_ignore\\.txt\
|
||||
|exclude_check_tools_files\\.txt\
|
||||
)"
|
||||
files: 'tools\/ci\/(executable-list\.txt|mypy_ignore_list\.txt|check_copyright_ignore\.txt)'
|
||||
- repo: https://github.com/espressif/check-copyright/
|
||||
rev: v1.1.1
|
||||
rev: v1.0.3
|
||||
hooks:
|
||||
- id: check-copyright
|
||||
args: ['--ignore', 'tools/ci/check_copyright_ignore.txt', '--config', 'tools/ci/check_copyright_config.yaml']
|
||||
- repo: https://github.com/espressif/conventional-precommit-linter
|
||||
rev: v1.10.0
|
||||
rev: v1.2.1
|
||||
hooks:
|
||||
- id: conventional-precommit-linter
|
||||
stages: [commit-msg]
|
||||
@@ -210,24 +217,8 @@ repos:
|
||||
name: shellcheck dash (export.sh)
|
||||
args: ['--shell', 'dash', '-x']
|
||||
files: 'export.sh'
|
||||
- repo: https://github.com/espressif/esp-pwsh-check
|
||||
rev: v1.0.1
|
||||
hooks:
|
||||
- id: check-powershell-scripts
|
||||
stages: [manual]
|
||||
- repo: https://github.com/espressif/esp-idf-sbom.git
|
||||
rev: v0.13.0
|
||||
hooks:
|
||||
- id: validate-sbom-manifest
|
||||
stages: [post-commit]
|
||||
- repo: https://github.com/sphinx-contrib/sphinx-lint
|
||||
rev: v0.9.1
|
||||
hooks:
|
||||
- id: 'sphinx-lint'
|
||||
name: Lint rST files in docs folder using Sphinx Lint
|
||||
files: ^(docs/en|docs/zh_CN)/.*\.(rst|inc)$
|
||||
- repo: https://github.com/espressif/esp-idf-kconfig.git
|
||||
rev: v3.2.0
|
||||
hooks:
|
||||
- id: check-kconfig-files
|
||||
- id: check-deprecated-kconfig-options
|
||||
|
||||
641
.pylintrc
Normal file
641
.pylintrc
Normal file
@@ -0,0 +1,641 @@
|
||||
[MAIN]
|
||||
|
||||
# Analyse import fallback blocks. This can be used to support both Python 2 and
|
||||
# 3 compatible code, which means that the block might have code that exists
|
||||
# only in one or another interpreter, leading to false positives when analysed.
|
||||
analyse-fallback-blocks=no
|
||||
|
||||
# Clear in-memory caches upon conclusion of linting. Useful if running pylint
|
||||
# in a server-like mode.
|
||||
clear-cache-post-run=no
|
||||
|
||||
# Load and enable all available extensions. Use --list-extensions to see a list
|
||||
# all available extensions.
|
||||
#enable-all-extensions=
|
||||
|
||||
# In error mode, messages with a category besides ERROR or FATAL are
|
||||
# suppressed, and no reports are done by default. Error mode is compatible with
|
||||
# disabling specific errors.
|
||||
#errors-only=
|
||||
|
||||
# Always return a 0 (non-error) status code, even if lint errors are found.
|
||||
# This is primarily useful in continuous integration scripts.
|
||||
#exit-zero=
|
||||
|
||||
# A comma-separated list of package or module names from where C extensions may
|
||||
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||
# run arbitrary code.
|
||||
extension-pkg-allow-list=
|
||||
|
||||
# A comma-separated list of package or module names from where C extensions may
|
||||
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||
# run arbitrary code. (This is an alternative name to extension-pkg-allow-list
|
||||
# for backward compatibility.)
|
||||
extension-pkg-whitelist=
|
||||
|
||||
# Return non-zero exit code if any of these messages/categories are detected,
|
||||
# even if score is above --fail-under value. Syntax same as enable. Messages
|
||||
# specified are enabled, while categories only check already-enabled messages.
|
||||
fail-on=
|
||||
|
||||
# Specify a score threshold under which the program will exit with error.
|
||||
fail-under=10
|
||||
|
||||
# Interpret the stdin as a python script, whose filename needs to be passed as
|
||||
# the module_or_package argument.
|
||||
#from-stdin=
|
||||
|
||||
# Files or directories to be skipped. They should be base names, not paths.
|
||||
ignore=CVS
|
||||
|
||||
# Add files or directories matching the regular expressions patterns to the
|
||||
# ignore-list. The regex matches against paths and can be in Posix or Windows
|
||||
# format. Because '\\' represents the directory delimiter on Windows systems,
|
||||
# it can't be used as an escape character.
|
||||
ignore-paths=
|
||||
|
||||
# Files or directories matching the regular expression patterns are skipped.
|
||||
# The regex matches against base names, not paths. The default value ignores
|
||||
# Emacs file locks
|
||||
ignore-patterns=^\.#
|
||||
|
||||
# List of module names for which member attributes should not be checked
|
||||
# (useful for modules/projects where namespaces are manipulated during runtime
|
||||
# and thus existing member attributes cannot be deduced by static analysis). It
|
||||
# supports qualified module names, as well as Unix pattern matching.
|
||||
ignored-modules=
|
||||
|
||||
# Python code to execute, usually for sys.path manipulation such as
|
||||
# pygtk.require().
|
||||
#init-hook=
|
||||
|
||||
# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
|
||||
# number of processors available to use, and will cap the count on Windows to
|
||||
# avoid hangs.
|
||||
jobs=1
|
||||
|
||||
# Control the amount of potential inferred values when inferring a single
|
||||
# object. This can help the performance when dealing with large functions or
|
||||
# complex, nested conditions.
|
||||
limit-inference-results=100
|
||||
|
||||
# List of plugins (as comma separated values of python module names) to load,
|
||||
# usually to register additional checkers.
|
||||
load-plugins=
|
||||
|
||||
# Pickle collected data for later comparisons.
|
||||
persistent=yes
|
||||
|
||||
# Minimum Python version to use for version dependent checks. Will default to
|
||||
# the version used to run pylint.
|
||||
py-version=3.8
|
||||
|
||||
# Discover python modules and packages in the file system subtree.
|
||||
recursive=no
|
||||
|
||||
# Add paths to the list of the source roots. Supports globbing patterns. The
|
||||
# source root is an absolute path or a path relative to the current working
|
||||
# directory used to determine a package namespace for modules located under the
|
||||
# source root.
|
||||
source-roots=
|
||||
|
||||
# When enabled, pylint would attempt to guess common misconfiguration and emit
|
||||
# user-friendly hints instead of false-positive error messages.
|
||||
suggestion-mode=yes
|
||||
|
||||
# Allow loading of arbitrary C extensions. Extensions are imported into the
|
||||
# active Python interpreter and may run arbitrary code.
|
||||
unsafe-load-any-extension=no
|
||||
|
||||
# In verbose mode, extra non-checker-related info will be displayed.
|
||||
#verbose=
|
||||
|
||||
|
||||
[BASIC]
|
||||
|
||||
# Naming style matching correct argument names.
|
||||
argument-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct argument names. Overrides argument-
|
||||
# naming-style. If left empty, argument names will be checked with the set
|
||||
# naming style.
|
||||
#argument-rgx=
|
||||
|
||||
# Naming style matching correct attribute names.
|
||||
attr-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct attribute names. Overrides attr-naming-
|
||||
# style. If left empty, attribute names will be checked with the set naming
|
||||
# style.
|
||||
#attr-rgx=
|
||||
|
||||
# Bad variable names which should always be refused, separated by a comma.
|
||||
bad-names=foo,
|
||||
bar,
|
||||
baz,
|
||||
toto,
|
||||
tutu,
|
||||
tata
|
||||
|
||||
# Bad variable names regexes, separated by a comma. If names match any regex,
|
||||
# they will always be refused
|
||||
bad-names-rgxs=
|
||||
|
||||
# Naming style matching correct class attribute names.
|
||||
class-attribute-naming-style=any
|
||||
|
||||
# Regular expression matching correct class attribute names. Overrides class-
|
||||
# attribute-naming-style. If left empty, class attribute names will be checked
|
||||
# with the set naming style.
|
||||
#class-attribute-rgx=
|
||||
|
||||
# Naming style matching correct class constant names.
|
||||
class-const-naming-style=UPPER_CASE
|
||||
|
||||
# Regular expression matching correct class constant names. Overrides class-
|
||||
# const-naming-style. If left empty, class constant names will be checked with
|
||||
# the set naming style.
|
||||
#class-const-rgx=
|
||||
|
||||
# Naming style matching correct class names.
|
||||
class-naming-style=PascalCase
|
||||
|
||||
# Regular expression matching correct class names. Overrides class-naming-
|
||||
# style. If left empty, class names will be checked with the set naming style.
|
||||
#class-rgx=
|
||||
|
||||
# Naming style matching correct constant names.
|
||||
const-naming-style=UPPER_CASE
|
||||
|
||||
# Regular expression matching correct constant names. Overrides const-naming-
|
||||
# style. If left empty, constant names will be checked with the set naming
|
||||
# style.
|
||||
#const-rgx=
|
||||
|
||||
# Minimum line length for functions/classes that require docstrings, shorter
|
||||
# ones are exempt.
|
||||
docstring-min-length=-1
|
||||
|
||||
# Naming style matching correct function names.
|
||||
function-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct function names. Overrides function-
|
||||
# naming-style. If left empty, function names will be checked with the set
|
||||
# naming style.
|
||||
#function-rgx=
|
||||
|
||||
# Good variable names which should always be accepted, separated by a comma.
|
||||
good-names=i,
|
||||
j,
|
||||
k,
|
||||
ex,
|
||||
Run,
|
||||
_
|
||||
|
||||
# Good variable names regexes, separated by a comma. If names match any regex,
|
||||
# they will always be accepted
|
||||
good-names-rgxs=
|
||||
|
||||
# Include a hint for the correct naming format with invalid-name.
|
||||
include-naming-hint=no
|
||||
|
||||
# Naming style matching correct inline iteration names.
|
||||
inlinevar-naming-style=any
|
||||
|
||||
# Regular expression matching correct inline iteration names. Overrides
|
||||
# inlinevar-naming-style. If left empty, inline iteration names will be checked
|
||||
# with the set naming style.
|
||||
#inlinevar-rgx=
|
||||
|
||||
# Naming style matching correct method names.
|
||||
method-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct method names. Overrides method-naming-
|
||||
# style. If left empty, method names will be checked with the set naming style.
|
||||
#method-rgx=
|
||||
|
||||
# Naming style matching correct module names.
|
||||
module-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct module names. Overrides module-naming-
|
||||
# style. If left empty, module names will be checked with the set naming style.
|
||||
#module-rgx=
|
||||
|
||||
# Colon-delimited sets of names that determine each other's naming style when
|
||||
# the name regexes allow several styles.
|
||||
name-group=
|
||||
|
||||
# Regular expression which should only match function or class names that do
|
||||
# not require a docstring.
|
||||
no-docstring-rgx=^_
|
||||
|
||||
# List of decorators that produce properties, such as abc.abstractproperty. Add
|
||||
# to this list to register other decorators that produce valid properties.
|
||||
# These decorators are taken in consideration only for invalid-name.
|
||||
property-classes=abc.abstractproperty
|
||||
|
||||
# Regular expression matching correct type alias names. If left empty, type
|
||||
# alias names will be checked with the set naming style.
|
||||
#typealias-rgx=
|
||||
|
||||
# Regular expression matching correct type variable names. If left empty, type
|
||||
# variable names will be checked with the set naming style.
|
||||
#typevar-rgx=
|
||||
|
||||
# Naming style matching correct variable names.
|
||||
variable-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct variable names. Overrides variable-
|
||||
# naming-style. If left empty, variable names will be checked with the set
|
||||
# naming style.
|
||||
#variable-rgx=
|
||||
|
||||
|
||||
[CLASSES]
|
||||
|
||||
# Warn about protected attribute access inside special methods
|
||||
check-protected-access-in-special-methods=no
|
||||
|
||||
# List of method names used to declare (i.e. assign) instance attributes.
|
||||
defining-attr-methods=__init__,
|
||||
__new__,
|
||||
setUp,
|
||||
asyncSetUp,
|
||||
__post_init__
|
||||
|
||||
# List of member names, which should be excluded from the protected access
|
||||
# warning.
|
||||
exclude-protected=_asdict,_fields,_replace,_source,_make,os._exit
|
||||
|
||||
# List of valid names for the first argument in a class method.
|
||||
valid-classmethod-first-arg=cls
|
||||
|
||||
# List of valid names for the first argument in a metaclass class method.
|
||||
valid-metaclass-classmethod-first-arg=mcs
|
||||
|
||||
|
||||
[DESIGN]
|
||||
|
||||
# List of regular expressions of class ancestor names to ignore when counting
|
||||
# public methods (see R0903)
|
||||
exclude-too-few-public-methods=
|
||||
|
||||
# List of qualified class names to ignore when counting class parents (see
|
||||
# R0901)
|
||||
ignored-parents=
|
||||
|
||||
# Maximum number of arguments for function / method.
|
||||
max-args=5
|
||||
|
||||
# Maximum number of attributes for a class (see R0902).
|
||||
max-attributes=7
|
||||
|
||||
# Maximum number of boolean expressions in an if statement (see R0916).
|
||||
max-bool-expr=5
|
||||
|
||||
# Maximum number of branch for function / method body.
|
||||
max-branches=12
|
||||
|
||||
# Maximum number of locals for function / method body.
|
||||
max-locals=15
|
||||
|
||||
# Maximum number of parents for a class (see R0901).
|
||||
max-parents=7
|
||||
|
||||
# Maximum number of public methods for a class (see R0904).
|
||||
max-public-methods=20
|
||||
|
||||
# Maximum number of return / yield for function / method body.
|
||||
max-returns=6
|
||||
|
||||
# Maximum number of statements in function / method body.
|
||||
max-statements=50
|
||||
|
||||
# Minimum number of public methods for a class (see R0903).
|
||||
min-public-methods=2
|
||||
|
||||
|
||||
[EXCEPTIONS]
|
||||
|
||||
# Exceptions that will emit a warning when caught.
|
||||
overgeneral-exceptions=builtins.BaseException,builtins.Exception
|
||||
|
||||
|
||||
[FORMAT]
|
||||
|
||||
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
|
||||
expected-line-ending-format=
|
||||
|
||||
# Regexp for a line that is allowed to be longer than the limit.
|
||||
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
|
||||
|
||||
# Number of spaces of indent required inside a hanging or continued line.
|
||||
indent-after-paren=4
|
||||
|
||||
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
||||
# tab).
|
||||
indent-string=' '
|
||||
|
||||
# Maximum number of characters on a single line.
|
||||
max-line-length=160
|
||||
|
||||
# Maximum number of lines in a module.
|
||||
max-module-lines=1000
|
||||
|
||||
# Allow the body of a class to be on the same line as the declaration if body
|
||||
# contains single statement.
|
||||
single-line-class-stmt=no
|
||||
|
||||
# Allow the body of an if to be on the same line as the test if there is no
|
||||
# else.
|
||||
single-line-if-stmt=no
|
||||
|
||||
|
||||
[IMPORTS]
|
||||
|
||||
# List of modules that can be imported at any level, not just the top level
|
||||
# one.
|
||||
allow-any-import-level=
|
||||
|
||||
# Allow explicit reexports by alias from a package __init__.
|
||||
allow-reexport-from-package=no
|
||||
|
||||
# Allow wildcard imports from modules that define __all__.
|
||||
allow-wildcard-with-all=no
|
||||
|
||||
# Deprecated modules which should not be used, separated by a comma.
|
||||
deprecated-modules=
|
||||
|
||||
# Output a graph (.gv or any supported image format) of external dependencies
|
||||
# to the given file (report RP0402 must not be disabled).
|
||||
ext-import-graph=
|
||||
|
||||
# Output a graph (.gv or any supported image format) of all (i.e. internal and
|
||||
# external) dependencies to the given file (report RP0402 must not be
|
||||
# disabled).
|
||||
import-graph=
|
||||
|
||||
# Output a graph (.gv or any supported image format) of internal dependencies
|
||||
# to the given file (report RP0402 must not be disabled).
|
||||
int-import-graph=
|
||||
|
||||
# Force import order to recognize a module as part of the standard
|
||||
# compatibility libraries.
|
||||
known-standard-library=
|
||||
|
||||
# Force import order to recognize a module as part of a third party library.
|
||||
known-third-party=enchant
|
||||
|
||||
# Couples of modules and preferred modules, separated by a comma.
|
||||
preferred-modules=
|
||||
|
||||
|
||||
[LOGGING]
|
||||
|
||||
# The type of string formatting that logging methods do. `old` means using %
|
||||
# formatting, `new` is for `{}` formatting.
|
||||
logging-format-style=old
|
||||
|
||||
# Logging modules to check that the string format arguments are in logging
|
||||
# function parameter format.
|
||||
logging-modules=logging
|
||||
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
|
||||
# Only show warnings with the listed confidence levels. Leave empty to show
|
||||
# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE,
|
||||
# UNDEFINED.
|
||||
confidence=HIGH,
|
||||
CONTROL_FLOW,
|
||||
INFERENCE,
|
||||
INFERENCE_FAILURE,
|
||||
UNDEFINED
|
||||
|
||||
# Disable the message, report, category or checker with the given id(s). You
|
||||
# can either give multiple identifiers separated by comma (,) or put this
|
||||
# option multiple times (only on the command line, not in the configuration
|
||||
# file where it should appear only once). You can also use "--disable=all" to
|
||||
# disable everything first and then re-enable specific checks. For example, if
|
||||
# you want to run only the similarities checker, you can use "--disable=all
|
||||
# --enable=similarities". If you want to run only the classes checker, but have
|
||||
# no Warning level messages displayed, use "--disable=all --enable=classes
|
||||
# --disable=W".
|
||||
disable=raw-checker-failed,
|
||||
bad-inline-option,
|
||||
locally-disabled,
|
||||
file-ignored,
|
||||
suppressed-message,
|
||||
useless-suppression,
|
||||
deprecated-pragma,
|
||||
use-symbolic-message-instead,
|
||||
missing-function-docstring, # Modified since here, include this line
|
||||
missing-class-docstring,
|
||||
missing-module-docstring,
|
||||
wrong-import-order,
|
||||
invalid-name,
|
||||
too-few-public-methods,
|
||||
too-many-locals,
|
||||
ungrouped-imports, # since we have isort in pre-commit
|
||||
no-name-in-module, # since we have flake8 to check this
|
||||
too-many-instance-attributes,
|
||||
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
# multiple time (only on the command line, not in the configuration file where
|
||||
# it should appear only once). See also the "--disable" option for examples.
|
||||
enable=c-extension-no-member
|
||||
|
||||
|
||||
[METHOD_ARGS]
|
||||
|
||||
# List of qualified names (i.e., library.method) which require a timeout
|
||||
# parameter e.g. 'requests.api.get,requests.api.post'
|
||||
timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request
|
||||
|
||||
|
||||
[MISCELLANEOUS]
|
||||
|
||||
# List of note tags to take in consideration, separated by a comma.
|
||||
notes=FIXME,
|
||||
XXX,
|
||||
TODO
|
||||
|
||||
# Regular expression of note tags to take in consideration.
|
||||
notes-rgx=
|
||||
|
||||
|
||||
[REFACTORING]
|
||||
|
||||
# Maximum number of nested blocks for function / method body
|
||||
max-nested-blocks=5
|
||||
|
||||
# Complete name of functions that never returns. When checking for
|
||||
# inconsistent-return-statements if a never returning function is called then
|
||||
# it will be considered as an explicit return statement and no message will be
|
||||
# printed.
|
||||
never-returning-functions=sys.exit,argparse.parse_error
|
||||
|
||||
|
||||
[REPORTS]
|
||||
|
||||
# Python expression which should return a score less than or equal to 10. You
|
||||
# have access to the variables 'fatal', 'error', 'warning', 'refactor',
|
||||
# 'convention', and 'info' which contain the number of messages in each
|
||||
# category, as well as 'statement' which is the total number of statements
|
||||
# analyzed. This score is used by the global evaluation report (RP0004).
|
||||
evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10))
|
||||
|
||||
# Template used to display messages. This is a python new-style format string
|
||||
# used to format the message information. See doc for all details.
|
||||
msg-template=
|
||||
|
||||
# Set the output format. Available formats are text, parseable, colorized, json
|
||||
# and msvs (visual studio). You can also give a reporter class, e.g.
|
||||
# mypackage.mymodule.MyReporterClass.
|
||||
#output-format=
|
||||
|
||||
# Tells whether to display a full report or only the messages.
|
||||
reports=no
|
||||
|
||||
# Activate the evaluation score.
|
||||
score=yes
|
||||
|
||||
|
||||
[SIMILARITIES]
|
||||
|
||||
# Comments are removed from the similarity computation
|
||||
ignore-comments=yes
|
||||
|
||||
# Docstrings are removed from the similarity computation
|
||||
ignore-docstrings=yes
|
||||
|
||||
# Imports are removed from the similarity computation
|
||||
ignore-imports=yes
|
||||
|
||||
# Signatures are removed from the similarity computation
|
||||
ignore-signatures=yes
|
||||
|
||||
# Minimum lines number of a similarity.
|
||||
min-similarity-lines=4
|
||||
|
||||
|
||||
[SPELLING]
|
||||
|
||||
# Limits count of emitted suggestions for spelling mistakes.
|
||||
max-spelling-suggestions=4
|
||||
|
||||
# Spelling dictionary name. No available dictionaries : You need to install
|
||||
# both the python package and the system dependency for enchant to work..
|
||||
spelling-dict=
|
||||
|
||||
# List of comma separated words that should be considered directives if they
|
||||
# appear at the beginning of a comment and should not be checked.
|
||||
spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:
|
||||
|
||||
# List of comma separated words that should not be checked.
|
||||
spelling-ignore-words=
|
||||
|
||||
# A path to a file that contains the private dictionary; one word per line.
|
||||
spelling-private-dict-file=
|
||||
|
||||
# Tells whether to store unknown words to the private dictionary (see the
|
||||
# --spelling-private-dict-file option) instead of raising a message.
|
||||
spelling-store-unknown-words=no
|
||||
|
||||
|
||||
[STRING]
|
||||
|
||||
# This flag controls whether inconsistent-quotes generates a warning when the
|
||||
# character used as a quote delimiter is used inconsistently within a module.
|
||||
check-quote-consistency=no
|
||||
|
||||
# This flag controls whether the implicit-str-concat should generate a warning
|
||||
# on implicit string concatenation in sequences defined over several lines.
|
||||
check-str-concat-over-line-jumps=no
|
||||
|
||||
|
||||
[TYPECHECK]
|
||||
|
||||
# List of decorators that produce context managers, such as
|
||||
# contextlib.contextmanager. Add to this list to register other decorators that
|
||||
# produce valid context managers.
|
||||
contextmanager-decorators=contextlib.contextmanager
|
||||
|
||||
# List of members which are set dynamically and missed by pylint inference
|
||||
# system, and so shouldn't trigger E1101 when accessed. Python regular
|
||||
# expressions are accepted.
|
||||
generated-members=
|
||||
|
||||
# Tells whether to warn about missing members when the owner of the attribute
|
||||
# is inferred to be None.
|
||||
ignore-none=yes
|
||||
|
||||
# This flag controls whether pylint should warn about no-member and similar
|
||||
# checks whenever an opaque object is returned when inferring. The inference
|
||||
# can return multiple potential results while evaluating a Python object, but
|
||||
# some branches might not be evaluated, which results in partial inference. In
|
||||
# that case, it might be useful to still emit no-member and other checks for
|
||||
# the rest of the inferred objects.
|
||||
ignore-on-opaque-inference=yes
|
||||
|
||||
# List of symbolic message names to ignore for Mixin members.
|
||||
ignored-checks-for-mixins=no-member,
|
||||
not-async-context-manager,
|
||||
not-context-manager,
|
||||
attribute-defined-outside-init
|
||||
|
||||
# List of class names for which member attributes should not be checked (useful
|
||||
# for classes with dynamically set attributes). This supports the use of
|
||||
# qualified names.
|
||||
ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace
|
||||
|
||||
# Show a hint with possible names when a member name was not found. The aspect
|
||||
# of finding the hint is based on edit distance.
|
||||
missing-member-hint=yes
|
||||
|
||||
# The minimum edit distance a name should have in order to be considered a
|
||||
# similar match for a missing member name.
|
||||
missing-member-hint-distance=1
|
||||
|
||||
# The total number of similar names that should be taken in consideration when
|
||||
# showing a hint for a missing member.
|
||||
missing-member-max-choices=1
|
||||
|
||||
# Regex pattern to define which classes are considered mixins.
|
||||
mixin-class-rgx=.*[Mm]ixin
|
||||
|
||||
# List of decorators that change the signature of a decorated function.
|
||||
signature-mutators=
|
||||
|
||||
|
||||
[VARIABLES]
|
||||
|
||||
# List of additional names supposed to be defined in builtins. Remember that
|
||||
# you should avoid defining new builtins when possible.
|
||||
additional-builtins=
|
||||
|
||||
# Tells whether unused global variables should be treated as a violation.
|
||||
allow-global-unused-variables=yes
|
||||
|
||||
# List of names allowed to shadow builtins
|
||||
allowed-redefined-builtins=
|
||||
|
||||
# List of strings which can identify a callback function by name. A callback
|
||||
# name must start or end with one of those strings.
|
||||
callbacks=cb_,
|
||||
_cb
|
||||
|
||||
# A regular expression matching the name of dummy variables (i.e. expected to
|
||||
# not be used).
|
||||
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
|
||||
|
||||
# Argument names that match this expression will be ignored.
|
||||
ignored-argument-names=_.*|^ignored_|^unused_
|
||||
|
||||
# Tells whether we should check for unused import in __init__ files.
|
||||
init-import=no
|
||||
|
||||
# List of qualified module names which can have objects that can redefine
|
||||
# builtins.
|
||||
redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
|
||||
21
.readthedocs.yml
Normal file
21
.readthedocs.yml
Normal file
@@ -0,0 +1,21 @@
|
||||
# .readthedocs.yml
|
||||
# Read the Docs configuration file
|
||||
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
|
||||
|
||||
# Required
|
||||
version: 2
|
||||
|
||||
# Optionally build your docs in additional formats such as PDF and ePub
|
||||
formats:
|
||||
- pdf
|
||||
|
||||
# Optionally set the version of Python and requirements required to build your docs
|
||||
python:
|
||||
version: 2.7
|
||||
install:
|
||||
- requirements: docs/requirements.txt
|
||||
|
||||
# We need to list all the submodules included in documenation build by DOxygen
|
||||
submodules:
|
||||
include:
|
||||
- components/mqtt/esp-mqtt
|
||||
226
CMakeLists.txt
226
CMakeLists.txt
@@ -1,63 +1,24 @@
|
||||
cmake_minimum_required(VERSION 3.22)
|
||||
cmake_minimum_required(VERSION 3.16)
|
||||
project(esp-idf C CXX ASM)
|
||||
|
||||
if(CMAKE_CURRENT_LIST_DIR STREQUAL CMAKE_SOURCE_DIR)
|
||||
message(FATAL_ERROR "Current directory '${CMAKE_CURRENT_LIST_DIR}' is not buildable. "
|
||||
"Change directories to one of the example projects in '${CMAKE_CURRENT_LIST_DIR}/examples' and try again.")
|
||||
"Change directories to one of the example projects in '${CMAKE_CURRENT_LIST_DIR}/examples' and try "
|
||||
"again.")
|
||||
endif()
|
||||
|
||||
project(esp-idf C CXX ASM)
|
||||
|
||||
# Variables compile_options, c_compile_options, cxx_compile_options, compile_definitions, link_options shall
|
||||
# not be unset as they may already contain flags, set by toolchain-TARGET.cmake files.
|
||||
|
||||
# Add the following build specifications here, since these seem to be dependent
|
||||
# on config values on the root Kconfig.
|
||||
|
||||
if(BOOTLOADER_BUILD)
|
||||
|
||||
if(CONFIG_BOOTLOADER_COMPILER_OPTIMIZATION_SIZE)
|
||||
if(CMAKE_C_COMPILER_ID MATCHES "Clang")
|
||||
list(APPEND compile_options "-Oz")
|
||||
else()
|
||||
list(APPEND compile_options "-Os")
|
||||
endif()
|
||||
if(CMAKE_C_COMPILER_ID MATCHES "GNU")
|
||||
list(APPEND compile_options "-freorder-blocks")
|
||||
if(CONFIG_IDF_TARGET_ARCH_XTENSA)
|
||||
list(APPEND compile_options "-mno-target-align")
|
||||
endif()
|
||||
endif()
|
||||
elseif(CONFIG_BOOTLOADER_COMPILER_OPTIMIZATION_DEBUG)
|
||||
list(APPEND compile_options "-Og")
|
||||
if(CMAKE_C_COMPILER_ID MATCHES "GNU" AND NOT CONFIG_IDF_TARGET_LINUX)
|
||||
list(APPEND compile_options "-fno-shrink-wrap") # Disable shrink-wrapping to reduce binary size
|
||||
endif()
|
||||
elseif(CONFIG_BOOTLOADER_COMPILER_OPTIMIZATION_PERF)
|
||||
list(APPEND compile_options "-O2")
|
||||
endif()
|
||||
|
||||
elseif(ESP_TEE_BUILD)
|
||||
|
||||
if(CMAKE_C_COMPILER_ID MATCHES "Clang")
|
||||
list(APPEND compile_options "-Oz")
|
||||
else()
|
||||
list(APPEND compile_options "-Os")
|
||||
list(APPEND compile_options "-freorder-blocks")
|
||||
endif()
|
||||
|
||||
else()
|
||||
if(NOT BOOTLOADER_BUILD)
|
||||
|
||||
if(CONFIG_COMPILER_OPTIMIZATION_SIZE)
|
||||
if(CMAKE_C_COMPILER_ID MATCHES "Clang")
|
||||
list(APPEND compile_options "-Oz")
|
||||
else()
|
||||
list(APPEND compile_options "-Os")
|
||||
endif()
|
||||
list(APPEND compile_options "-Os")
|
||||
if(CMAKE_C_COMPILER_ID MATCHES "GNU")
|
||||
list(APPEND compile_options "-freorder-blocks")
|
||||
if(CONFIG_IDF_TARGET_ARCH_XTENSA)
|
||||
list(APPEND compile_options "-mno-target-align")
|
||||
endif()
|
||||
endif()
|
||||
elseif(CONFIG_COMPILER_OPTIMIZATION_DEBUG)
|
||||
list(APPEND compile_options "-Og")
|
||||
@@ -70,6 +31,24 @@ else()
|
||||
list(APPEND compile_options "-O2")
|
||||
endif()
|
||||
|
||||
else() # BOOTLOADER_BUILD
|
||||
|
||||
if(CONFIG_BOOTLOADER_COMPILER_OPTIMIZATION_SIZE)
|
||||
list(APPEND compile_options "-Os")
|
||||
if(CMAKE_C_COMPILER_ID MATCHES "GNU")
|
||||
list(APPEND compile_options "-freorder-blocks")
|
||||
endif()
|
||||
elseif(CONFIG_BOOTLOADER_COMPILER_OPTIMIZATION_DEBUG)
|
||||
list(APPEND compile_options "-Og")
|
||||
if(CMAKE_C_COMPILER_ID MATCHES "GNU" AND NOT CONFIG_IDF_TARGET_LINUX)
|
||||
list(APPEND compile_options "-fno-shrink-wrap") # Disable shrink-wrapping to reduce binary size
|
||||
endif()
|
||||
elseif(CONFIG_BOOTLOADER_COMPILER_OPTIMIZATION_NONE)
|
||||
list(APPEND compile_options "-O0")
|
||||
elseif(CONFIG_BOOTLOADER_COMPILER_OPTIMIZATION_PERF)
|
||||
list(APPEND compile_options "-O2")
|
||||
endif()
|
||||
|
||||
endif()
|
||||
|
||||
if(CONFIG_COMPILER_CXX_EXCEPTIONS)
|
||||
@@ -109,13 +88,15 @@ if(CMAKE_C_COMPILER_ID MATCHES "Clang")
|
||||
list(APPEND compile_options "-Wno-char-subscripts")
|
||||
# Clang seems to notice format string issues which GCC doesn't.
|
||||
list(APPEND compile_options "-Wno-format-security")
|
||||
# Logic bug in essl component
|
||||
list(APPEND compile_options "-Wno-tautological-overlap-compare")
|
||||
# Some pointer checks in mDNS component check addresses which can't be NULL
|
||||
list(APPEND compile_options "-Wno-tautological-pointer-compare")
|
||||
# Similar to the above, in tcp_transport
|
||||
list(APPEND compile_options "-Wno-pointer-bool-conversion")
|
||||
# mbedTLS md5.c triggers this warning in md5_test_buf (false positive)
|
||||
list(APPEND compile_options "-Wno-string-concatenation")
|
||||
# multiple cases of implicit conversions between unrelated enum types
|
||||
# multiple cases of implict convertions between unrelated enum types
|
||||
list(APPEND compile_options "-Wno-enum-conversion")
|
||||
# When IRAM_ATTR is specified both in function declaration and definition,
|
||||
# it produces different section names, since section names include __COUNTER__.
|
||||
@@ -140,10 +121,8 @@ if(CMAKE_C_COMPILER_ID MATCHES "Clang")
|
||||
list(APPEND compile_options "-Wno-c2x-extensions")
|
||||
# warning on xMPU_SETTINGS for esp32s2 has size 0 for C and 1 for C++
|
||||
list(APPEND compile_options "-Wno-extern-c-compat")
|
||||
if(NOT (CONFIG_IDF_TARGET_LINUX AND CMAKE_HOST_SYSTEM_NAME STREQUAL "Darwin"))
|
||||
# warning: implicit truncation from 'int' to a one-bit wide bit-field changes value from 1 to -1
|
||||
list(APPEND compile_options "-Wno-single-bit-bitfield-constant-conversion")
|
||||
endif()
|
||||
# warning: implicit truncation from 'int' to a one-bit wide bit-field changes value from 1 to -1
|
||||
list(APPEND compile_options "-Wno-single-bit-bitfield-constant-conversion")
|
||||
endif()
|
||||
# More warnings may exist in unit tests and example projects.
|
||||
|
||||
@@ -155,14 +134,6 @@ if(CONFIG_COMPILER_OPTIMIZATION_ASSERTIONS_DISABLE)
|
||||
list(APPEND compile_definitions "-DNDEBUG")
|
||||
endif()
|
||||
|
||||
if(CONFIG_COMPILER_NO_MERGE_CONSTANTS)
|
||||
list(APPEND compile_options "-fno-merge-constants")
|
||||
endif()
|
||||
|
||||
if(CONFIG_COMPILER_ENABLE_TEXT_SECTION_LITERALS)
|
||||
list(APPEND compile_options "-mtext-section-literals")
|
||||
endif()
|
||||
|
||||
if(CONFIG_COMPILER_STACK_CHECK_MODE_NORM)
|
||||
list(APPEND compile_options "-fstack-protector")
|
||||
elseif(CONFIG_COMPILER_STACK_CHECK_MODE_STRONG)
|
||||
@@ -175,20 +146,46 @@ if(CONFIG_COMPILER_DUMP_RTL_FILES)
|
||||
list(APPEND compile_options "-fdump-rtl-expand")
|
||||
endif()
|
||||
|
||||
if(CMAKE_C_COMPILER_ID MATCHES "GNU" AND CMAKE_C_COMPILER_VERSION VERSION_GREATER 15.0)
|
||||
list(APPEND c_compile_options "-fzero-init-padding-bits=all" "-fno-malloc-dce")
|
||||
endif()
|
||||
if(NOT ${CMAKE_C_COMPILER_VERSION} VERSION_LESS 8.0.0)
|
||||
if(CONFIG_COMPILER_HIDE_PATHS_MACROS)
|
||||
list(APPEND compile_options "-fmacro-prefix-map=${CMAKE_SOURCE_DIR}=.")
|
||||
list(APPEND compile_options "-fmacro-prefix-map=${IDF_PATH}=/IDF")
|
||||
endif()
|
||||
|
||||
if(CONFIG_COMPILER_CXX_GLIBCXX_CONSTEXPR_COLD_CONSTEXPR)
|
||||
list(APPEND cxx_compile_options "-D_GLIBCXX20_CONSTEXPR=__attribute__((cold)) constexpr")
|
||||
list(APPEND cxx_compile_options "-D_GLIBCXX23_CONSTEXPR=__attribute__((cold)) constexpr")
|
||||
elseif(CONFIG_COMPILER_CXX_GLIBCXX_CONSTEXPR_COLD)
|
||||
list(APPEND cxx_compile_options "-D_GLIBCXX20_CONSTEXPR=__attribute__((cold))")
|
||||
list(APPEND cxx_compile_options "-D_GLIBCXX23_CONSTEXPR=__attribute__((cold))")
|
||||
endif()
|
||||
if(CONFIG_APP_REPRODUCIBLE_BUILD)
|
||||
idf_build_set_property(DEBUG_PREFIX_MAP_GDBINIT "${BUILD_DIR}/prefix_map_gdbinit")
|
||||
|
||||
__generate_prefix_map(prefix_map_compile_options)
|
||||
list(APPEND compile_options ${prefix_map_compile_options})
|
||||
list(APPEND compile_options "-fdebug-prefix-map=${IDF_PATH}=/IDF")
|
||||
list(APPEND compile_options "-fdebug-prefix-map=${PROJECT_DIR}=/IDF_PROJECT")
|
||||
list(APPEND compile_options "-fdebug-prefix-map=${BUILD_DIR}=/IDF_BUILD")
|
||||
|
||||
# component dirs
|
||||
idf_build_get_property(python PYTHON)
|
||||
idf_build_get_property(idf_path IDF_PATH)
|
||||
idf_build_get_property(component_dirs BUILD_COMPONENT_DIRS)
|
||||
|
||||
execute_process(
|
||||
COMMAND ${python}
|
||||
"${idf_path}/tools/generate_debug_prefix_map.py"
|
||||
"${BUILD_DIR}"
|
||||
"${component_dirs}"
|
||||
OUTPUT_VARIABLE result
|
||||
RESULT_VARIABLE ret
|
||||
)
|
||||
if(NOT ret EQUAL 0)
|
||||
message(FATAL_ERROR "This is a bug. Please report to https://github.com/espressif/esp-idf/issues")
|
||||
endif()
|
||||
|
||||
spaces2list(result)
|
||||
list(LENGTH component_dirs length)
|
||||
math(EXPR max_index "${length} - 1")
|
||||
foreach(index RANGE ${max_index})
|
||||
list(GET component_dirs ${index} folder)
|
||||
list(GET result ${index} after)
|
||||
list(APPEND compile_options "-fdebug-prefix-map=${folder}=${after}")
|
||||
endforeach()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(CONFIG_COMPILER_DISABLE_GCC12_WARNINGS)
|
||||
list(APPEND compile_options "-Wno-address"
|
||||
@@ -202,31 +199,10 @@ if(CONFIG_COMPILER_DISABLE_GCC13_WARNINGS)
|
||||
"-Wno-dangling-reference")
|
||||
endif()
|
||||
|
||||
if(CONFIG_COMPILER_DISABLE_GCC14_WARNINGS)
|
||||
list(APPEND compile_options "-Wno-calloc-transposed-args")
|
||||
endif()
|
||||
|
||||
if(CONFIG_COMPILER_DISABLE_GCC15_WARNINGS)
|
||||
list(APPEND c_compile_options "-Wno-unterminated-string-initialization")
|
||||
list(APPEND c_compile_options "-Wno-header-guard")
|
||||
list(APPEND cxx_compile_options "-Wno-self-move")
|
||||
list(APPEND cxx_compile_options "-Wno-template-body")
|
||||
list(APPEND cxx_compile_options "-Wno-dangling-reference")
|
||||
list(APPEND cxx_compile_options "-Wno-defaulted-function-deleted")
|
||||
endif()
|
||||
|
||||
if(CONFIG_COMPILER_DISABLE_DEFAULT_ERRORS)
|
||||
if(NOT CMAKE_C_COMPILER_ID MATCHES "Clang")
|
||||
idf_build_replace_option_from_property(COMPILE_OPTIONS "-Werror" "-Werror=all")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# GCC-specific options
|
||||
if(CMAKE_C_COMPILER_ID STREQUAL "GNU")
|
||||
list(APPEND compile_options "-fstrict-volatile-bitfields")
|
||||
if(CONFIG_COMPILER_STATIC_ANALYZER)
|
||||
list(APPEND compile_options "-fanalyzer")
|
||||
endif()
|
||||
list(APPEND compile_options "-fstrict-volatile-bitfields"
|
||||
)
|
||||
endif()
|
||||
|
||||
if(CONFIG_ESP_SYSTEM_USE_EH_FRAME)
|
||||
@@ -234,54 +210,20 @@ if(CONFIG_ESP_SYSTEM_USE_EH_FRAME)
|
||||
list(APPEND link_options "-Wl,--eh-frame-hdr")
|
||||
endif()
|
||||
|
||||
if(CONFIG_ESP_SYSTEM_USE_FRAME_POINTER)
|
||||
list(APPEND compile_options "-fno-omit-frame-pointer")
|
||||
if(CMAKE_C_COMPILER_ID MATCHES "GNU")
|
||||
list(APPEND compile_options "-mno-omit-leaf-frame-pointer")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
list(APPEND link_options "-fno-lto")
|
||||
|
||||
if(CONFIG_IDF_TARGET_LINUX AND CMAKE_HOST_SYSTEM_NAME STREQUAL "Darwin")
|
||||
# Not all versions of the MacOS linker support the -warn_commons flag.
|
||||
# ld version 1053.12 (and above) have been tested to support it.
|
||||
# Hence, we extract the version string from the linker output
|
||||
# before including the flag.
|
||||
|
||||
# Get the ld version, capturing both stdout and stderr
|
||||
execute_process(
|
||||
COMMAND ${CMAKE_LINKER} -v
|
||||
OUTPUT_VARIABLE LD_VERSION_OUTPUT
|
||||
ERROR_VARIABLE LD_VERSION_ERROR
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
ERROR_STRIP_TRAILING_WHITESPACE
|
||||
)
|
||||
|
||||
# Combine stdout and stderr
|
||||
set(LD_VERSION_OUTPUT "${LD_VERSION_OUTPUT}\n${LD_VERSION_ERROR}")
|
||||
|
||||
# Extract the version string
|
||||
string(REGEX MATCH "PROJECT:(ld|dyld)-([0-9]+)\\.([0-9]+)" LD_VERSION_MATCH "${LD_VERSION_OUTPUT}")
|
||||
set(LD_VERSION_MAJOR_MINOR "${CMAKE_MATCH_2}.${CMAKE_MATCH_3}")
|
||||
|
||||
message(STATUS "Linker Version: ${LD_VERSION_MAJOR_MINOR}")
|
||||
|
||||
# Compare the version with 1053.12
|
||||
if(LD_VERSION_MAJOR_MINOR VERSION_GREATER_EQUAL "1053.12")
|
||||
list(APPEND link_options "-Wl,-warn_commons")
|
||||
endif()
|
||||
|
||||
list(APPEND link_options "-Wl,-dead_strip")
|
||||
list(APPEND link_options "-Wl,-warn_commons")
|
||||
else()
|
||||
list(APPEND link_options "-Wl,--gc-sections")
|
||||
list(APPEND link_options "-Wl,--warn-common")
|
||||
endif()
|
||||
|
||||
# SMP FreeRTOS user provided minimal idle hook. This allows the user to provide
|
||||
# their own copy of vApplicationPassiveIdleHook()
|
||||
if(CONFIG_FREERTOS_USE_PASSIVE_IDLE_HOOK)
|
||||
list(APPEND link_options "-Wl,--wrap=vApplicationPassiveIdleHook")
|
||||
# their own copy of vApplicationMinimalIdleHook()
|
||||
if(CONFIG_FREERTOS_USE_MINIMAL_IDLE_HOOK)
|
||||
list(APPEND link_options "-Wl,--wrap=vApplicationMinimalIdleHook")
|
||||
endif()
|
||||
|
||||
# Placing jump tables in flash would cause issues with code that required
|
||||
@@ -294,15 +236,22 @@ if(CMAKE_C_COMPILER_ID MATCHES "GNU")
|
||||
endif()
|
||||
|
||||
if(CMAKE_C_COMPILER_ID MATCHES "Clang")
|
||||
list(APPEND compile_options "-fno-use-cxa-atexit") # TODO IDF-10934
|
||||
else()
|
||||
list(APPEND cxx_compile_options "-fuse-cxa-atexit")
|
||||
list(APPEND compile_options "-fno-use-cxa-atexit")
|
||||
endif()
|
||||
|
||||
if(COMPILER_RT_LIB_NAME)
|
||||
list(APPEND link_options "-rtlib=${CONFIG_COMPILER_RT_LIB_NAME}")
|
||||
endif()
|
||||
|
||||
# For the transition period from 32-bit time_t to 64-bit time_t,
|
||||
# auto-detect the size of this type and set corresponding variable.
|
||||
include(CheckTypeSize)
|
||||
check_type_size("time_t" TIME_T_SIZE)
|
||||
if(TIME_T_SIZE)
|
||||
idf_build_set_property(TIME_T_SIZE ${TIME_T_SIZE})
|
||||
else()
|
||||
message(FATAL_ERROR "Failed to determine sizeof(time_t)")
|
||||
endif()
|
||||
|
||||
idf_build_set_property(COMPILE_OPTIONS "${compile_options}" APPEND)
|
||||
idf_build_set_property(C_COMPILE_OPTIONS "${c_compile_options}" APPEND)
|
||||
@@ -332,12 +281,3 @@ foreach(component_target ${build_component_targets})
|
||||
endif()
|
||||
set(__idf_component_context 0)
|
||||
endforeach()
|
||||
|
||||
# Run component validation checks after all components have been processed
|
||||
# Only run validation for the main project, not subprojects like bootloader
|
||||
idf_build_get_property(bootloader_build BOOTLOADER_BUILD)
|
||||
idf_build_get_property(esp_tee_build ESP_TEE_BUILD)
|
||||
if(NOT bootloader_build AND NOT esp_tee_build)
|
||||
include("${CMAKE_CURRENT_LIST_DIR}/tools/cmake/component_validation.cmake")
|
||||
__component_validation_run_checks()
|
||||
endif()
|
||||
|
||||
@@ -8,9 +8,9 @@ This document describes the compatibility between ESP-IDF releases and Espressif
|
||||
|
||||
NOTE: This document on release branches may be out-of-date. Check the [Compatibility file on master](https://github.com/espressif/esp-idf/blob/master/COMPATIBILITY.md) for the most accurate information.
|
||||
|
||||
See [Compatibility Advisory for Chip Revision Numbering Scheme](https://www.espressif.com/sites/default/files/advisory_downloads/AR2022-005%20Compatibility%20Advisory%20for%20Chip%20Revision%20Numbering%20%20Scheme.pdf) on the versioning of Espressif SoC revisions.
|
||||
See [Compatibility Advisory for Chip Revision Numbering Scheme](https://www.espressif.com.cn/sites/default/files/advisory_downloads/AR2022-005%20Compatibility%20Advisory%20for%20Chip%20Revision%20Numbering%20%20Scheme.pdf) on the versioning of Espressif SoC revisions.
|
||||
|
||||
You can run `esptool chip-id` to detect the series and revision of an SoC. See [SoC Errata](https://www.espressif.com/en/support/documents/technical-documents?keys=errata) for more on how to distinguish between chip revisions, and the improvements provided by chip revisions. And run `idf.py --version` to know the version of current ESP-IDF.
|
||||
You can run `esptool chip_id` to detect the series and revision of an SoC. See [SoC Errata](https://www.espressif.com.cn/en/support/documents/technical-documents?keys=errata) for more on how to distinguish between chip revisions, and the improvements provided by chip revisions. And run `idf.py --version` to know the version of current ESP-IDF.
|
||||
|
||||
## ESP-IDF Support for Different Chip Revisions
|
||||
|
||||
@@ -52,99 +52,47 @@ Supported since ESP-IDF v4.2.
|
||||
|
||||
| Release branch | Recommended | Required |
|
||||
|------------------------|-------------|----------|
|
||||
| release/v4.2 | v4.2.3+ | v4.2 |
|
||||
| release/v4.3 | v4.3.3+ | v4.3 |
|
||||
| release/v4.4 | v4.4.6+ | v4.4 |
|
||||
| release/v5.0 | v5.0.4+ | v5.0 |
|
||||
| release/v5.1 | v5.1.2+ | v5.1 |
|
||||
| release/v5.2 and above | v5.2+ | v5.2 |
|
||||
| release/v4.2 | v4.2.3 | v4.2.3 |
|
||||
| release/v4.3 | v4.3.3 | v4.3.3 |
|
||||
| release/v4.4 | v4.4.6 | v4.4.1 |
|
||||
| release/v5.0 | v5.0.4 | v5.0 |
|
||||
| release/v5.1 | v5.1.2 | v5.1 |
|
||||
| release/v5.2 and above | v5.2 | v5.2 |
|
||||
|
||||
### ESP32-C3
|
||||
|
||||
#### v0.2 - v0.4
|
||||
#### v0.2, v0.3
|
||||
|
||||
Supported since ESP-IDF v4.3.
|
||||
|
||||
#### v1.1
|
||||
#### v0.4
|
||||
|
||||
| Release branch | Recommended | Required |
|
||||
|------------------------|-------------|----------|
|
||||
| release/v4.2 | EOL | EOL |
|
||||
| release/v4.3 | v4.3.7+ | v4.3.7 |
|
||||
| release/v4.4 | v4.4.7+ | v4.4.7 |
|
||||
| release/v5.0 | v5.0.5+ | v5.0.5 |
|
||||
| release/v5.1 | v5.1.3+ | v5.1.3 |
|
||||
| release/v5.2 and above | v5.2+ | v5.2 |
|
||||
To be added.
|
||||
|
||||
### ESP32-S3
|
||||
|
||||
#### v0.1, v0.2
|
||||
#### v0.1
|
||||
|
||||
Supported since ESP-IDF v4.4.
|
||||
|
||||
#### v0.2
|
||||
|
||||
To be added.
|
||||
|
||||
### ESP32-C2 & ESP8684
|
||||
|
||||
#### v1.0, v1.1
|
||||
#### v1.0
|
||||
|
||||
Supported since ESP-IDF v5.0.
|
||||
|
||||
#### v1.2
|
||||
#### v1.1
|
||||
|
||||
| Release branch | Recommended | Required |
|
||||
|------------------------|-------------|----------|
|
||||
| release/v5.0 | v5.0.7+ | v5.0 |
|
||||
| release/v5.1 | v5.1.4+ | v5.1 |
|
||||
| release/v5.2 | v5.2.2+ | v5.2 |
|
||||
| release/v5.3 and above | v5.3+ | v5.3 |
|
||||
|
||||
#### v2.0
|
||||
|
||||
| Release branch | Recommended | Required |
|
||||
|------------------------|-------------|----------|
|
||||
| release/v5.0 | v5.0.8+ | v5.0.8 |
|
||||
| release/v5.1 | v5.1.5+ | v5.1.5* |
|
||||
| release/v5.2 | v5.2.4+ | v5.2.4 |
|
||||
| release/v5.3 | v5.3.2+ | v5.3.2* |
|
||||
| release/v5.4 and above | v5.4+ | v5.4 |
|
||||
|
||||
Note: IDF v5.1.5 and v5.3.2 are compatible with C2 v2.0. However the chip revision check hasn't been updated on these releases. Enable `ESP32C2_REV2_DEVELOPMENT` config to bypass the outdated check.
|
||||
|
||||
### ESP32-C6
|
||||
|
||||
#### v0.0, v0.1
|
||||
|
||||
Supported since ESP-IDF v5.1.
|
||||
|
||||
#### v0.2
|
||||
|
||||
| Release branch | Recommended | Required |
|
||||
|------------------------|-------------|----------|
|
||||
| release/v5.1 | v5.1.5+ | v5.1 |
|
||||
| release/v5.2 | v5.2.4+ | v5.2 |
|
||||
| release/v5.3 | v5.3.2+ | v5.3 |
|
||||
| release/v5.4 and above | v5.4+ | v5.4 |
|
||||
|
||||
### ESP32-H2
|
||||
|
||||
#### v0.1, v0.2
|
||||
|
||||
Supported since ESP-IDF v5.1.
|
||||
To be added.
|
||||
|
||||
#### v1.2
|
||||
|
||||
| Release branch | Recommended | Required |
|
||||
|------------------------|-------------|----------|
|
||||
| release/v5.1 | v5.1.6+ | v5.1.6 |
|
||||
| release/v5.2 | v5.2.5+ | v5.2.5 |
|
||||
| release/v5.3 | v5.3.3+ | v5.3.3 |
|
||||
| release/v5.4 | v5.4.1+ | v5.4.1 |
|
||||
| release/v5.5 and above | v5.5+ | v5.5 |
|
||||
To be added.
|
||||
|
||||
### ESP32-P4
|
||||
|
||||
#### v1.0, v1.3
|
||||
|
||||
Supported since ESP-IDF v5.3.
|
||||
|
||||
## What If the ESP-IDF Version Is Lower than the `Required` Version?
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
|
||||
有关乐鑫芯片版本的编码方式,请参考 [关于芯片版本 (Chip Revision) 编码方式的兼容性公告](https://www.espressif.com/sites/default/files/advisory_downloads/AR2022-005%20%E5%85%B3%E4%BA%8E%E8%8A%AF%E7%89%87%E7%89%88%E6%9C%AC%E7%BC%96%E7%A0%81%E6%96%B9%E5%BC%8F%20%28Chip%20Revision%29%20%E7%9A%84%E5%85%BC%E5%AE%B9%E6%80%A7%E5%85%AC%E5%91%8A.pdf)。
|
||||
|
||||
运行 `esptool chip-id` 可查看芯片系列及其版本。有关区分芯片版本及版本改进内容的更多信息,请参考 [芯片勘误表](https://www.espressif.com.cn/zh-hans/support/documents/technical-documents?keys=%E5%8B%98%E8%AF%AF%E8%A1%A8)。运行 `idf.py --version` 可查看当前的 ESP-IDF 版本。
|
||||
运行 `esptool chip_id` 可查看芯片系列及其版本。有关区分芯片版本及版本改进内容的更多信息,请参考 [芯片勘误表](https://www.espressif.com.cn/zh-hans/support/documents/technical-documents?keys=%E5%8B%98%E8%AF%AF%E8%A1%A8)。运行 `idf.py --version` 可查看当前的 ESP-IDF 版本。
|
||||
|
||||
## ESP-IDF 对各芯片版本的支持
|
||||
|
||||
@@ -50,101 +50,49 @@
|
||||
|
||||
#### v1.0
|
||||
|
||||
| 发布分支 | 推荐版本 | 需求版本 |
|
||||
| 发布分支 | 推荐版本 | 需求版本 |
|
||||
|------------------------|-------------|----------|
|
||||
| release/v4.2 | v4.2.3+ | v4.2 |
|
||||
| release/v4.3 | v4.3.3+ | v4.3 |
|
||||
| release/v4.4 | v4.4.6+ | v4.4 |
|
||||
| release/v5.0 | v5.0.4+ | v5.0 |
|
||||
| release/v5.1 | v5.1.2+ | v5.1 |
|
||||
| release/v5.2 及以上 | v5.2+ | v5.2 |
|
||||
| release/v4.2 | v4.2.3 | v4.2.3 |
|
||||
| release/v4.3 | v4.3.3 | v4.3.3 |
|
||||
| release/v4.4 | v4.4.6 | v4.4.1 |
|
||||
| release/v5.0 | v5.0.4 | v5.0 |
|
||||
| release/v5.1 | v5.1.2 | v5.1 |
|
||||
| release/v5.2 及以上 | v5.2 | v5.2 |
|
||||
|
||||
### ESP32-C3
|
||||
|
||||
#### v0.2 - v0.4
|
||||
#### v0.2 和 v0.3
|
||||
|
||||
从 ESP-IDF v4.3 开始支持。
|
||||
|
||||
#### v1.1
|
||||
#### v0.4
|
||||
|
||||
| 发布分支 | 推荐版本 | 需求版本 |
|
||||
|------------------------|-------------|----------|
|
||||
| release/v4.2 | EOL | EOL |
|
||||
| release/v4.3 | v4.3.7+ | v4.3.7 |
|
||||
| release/v4.4 | v4.4.7+ | v4.4.7 |
|
||||
| release/v5.0 | v5.0.5+ | v5.0.5 |
|
||||
| release/v5.1 | v5.1.3+ | v5.1.3 |
|
||||
| release/v5.2 及以上 | v5.2+ | v5.2 |
|
||||
待更新。
|
||||
|
||||
### ESP32-S3
|
||||
|
||||
#### v0.1, v0.2
|
||||
#### v0.1
|
||||
|
||||
从 ESP-IDF v4.4 开始支持。
|
||||
|
||||
#### v0.2
|
||||
|
||||
待更新。
|
||||
|
||||
### ESP32-C2 & ESP8684
|
||||
|
||||
#### v1.0, v1.1
|
||||
#### v1.0
|
||||
|
||||
从 ESP-IDF v5.0 开始支持。
|
||||
|
||||
#### v1.2
|
||||
#### v1.1
|
||||
|
||||
| 发布分支 | 推荐版本 | 需求版本 |
|
||||
|------------------------|-------------|----------|
|
||||
| release/v5.0 | v5.0.7+ | v5.0 |
|
||||
| release/v5.1 | v5.1.4+ | v5.1 |
|
||||
| release/v5.2 | v5.2.2+ | v5.1 |
|
||||
| release/v5.3 及以上 | v5.3+ | v5.3 |
|
||||
|
||||
#### v2.0
|
||||
|
||||
| 发布分支 | 推荐版本 | 需求版本 |
|
||||
|------------------------|-------------|----------|
|
||||
| release/v5.0 | v5.0.8+ | v5.0.8 |
|
||||
| release/v5.1 | v5.1.5+ | v5.1.5* |
|
||||
| release/v5.2 | v5.2.4+ | v5.2.4 |
|
||||
| release/v5.3 | v5.3.2+ | v5.3.2* |
|
||||
| release/v5.4 及以上 | v5.4+ | v5.4 |
|
||||
|
||||
提示: IDF v5.1.5 及 v5.3.2 与 C2 v2.0 兼容,但芯片版本检查尚未在这些发布版本更新。使能 `ESP32C2_REV2_DEVELOPMENT` 选项来跳过这些过时的检查。
|
||||
|
||||
### ESP32-C6
|
||||
|
||||
#### v0.0, v0.1
|
||||
|
||||
从 ESP-IDF v5.1 开始支持。
|
||||
|
||||
#### v0.2
|
||||
|
||||
| 发布分支 | 推荐版本 | 需求版本 |
|
||||
|------------------------|-------------|----------|
|
||||
| release/v5.1 | v5.1.5+ | v5.1 |
|
||||
| release/v5.2 | v5.2.4+ | v5.2 |
|
||||
| release/v5.3 | v5.3.2+ | v5.3 |
|
||||
| release/v5.4 及以上 | v5.4+ | v5.4 |
|
||||
|
||||
### ESP32-H2
|
||||
|
||||
#### v0.1, v0.2
|
||||
|
||||
从 ESP-IDF v5.1 开始支持。
|
||||
待更新。
|
||||
|
||||
#### v1.2
|
||||
|
||||
| 发布分支 | 推荐版本 | 需求版本 |
|
||||
|------------------------|-------------|----------|
|
||||
| release/v5.1 | v5.1.6+ | v5.1.6 |
|
||||
| release/v5.2 | v5.2.5+ | v5.2.5 |
|
||||
| release/v5.3 | v5.3.3+ | v5.3.3 |
|
||||
| release/v5.4 | v5.4.1+ | v5.4.1 |
|
||||
| release/v5.5 及以上 | v5.5+ | v5.5 |
|
||||
待更新。
|
||||
|
||||
### ESP32-P4
|
||||
|
||||
#### v1.0, v1.3
|
||||
|
||||
从 ESP-IDF v5.3 开始支持。
|
||||
|
||||
## 如果 ESP-IDF 版本低于 `需求版本` 会出现什么情况?
|
||||
|
||||
|
||||
232
Kconfig
232
Kconfig
@@ -22,6 +22,7 @@ mainmenu "Espressif IoT Development Framework Configuration"
|
||||
|
||||
config IDF_ENV_BRINGUP
|
||||
bool
|
||||
default "y" if IDF_TARGET_ESP32P4
|
||||
help
|
||||
- This option is ONLY used when doing new chip bringup.
|
||||
- This option will only enable necessary hw / sw settings for running
|
||||
@@ -48,10 +49,6 @@ mainmenu "Espressif IoT Development Framework Configuration"
|
||||
bool
|
||||
default "y" if IDF_TOOLCHAIN="clang"
|
||||
|
||||
config IDF_TOOLCHAIN_GCC
|
||||
bool
|
||||
default "y" if IDF_TOOLCHAIN="gcc"
|
||||
|
||||
config IDF_TARGET_ARCH_RISCV
|
||||
bool
|
||||
default "n"
|
||||
@@ -79,6 +76,10 @@ mainmenu "Espressif IoT Development Framework Configuration"
|
||||
string
|
||||
default "$IDF_INIT_VERSION"
|
||||
|
||||
config IDF_TARGET_LINUX
|
||||
bool
|
||||
default "y" if IDF_TARGET="linux"
|
||||
|
||||
config IDF_TARGET_ESP32
|
||||
bool
|
||||
default "y" if IDF_TARGET="esp32"
|
||||
@@ -113,12 +114,6 @@ mainmenu "Espressif IoT Development Framework Configuration"
|
||||
select FREERTOS_UNICORE
|
||||
select IDF_TARGET_ARCH_RISCV
|
||||
|
||||
config IDF_TARGET_ESP32C5
|
||||
bool
|
||||
default "y" if IDF_TARGET="esp32c5"
|
||||
select FREERTOS_UNICORE
|
||||
select IDF_TARGET_ARCH_RISCV
|
||||
|
||||
config IDF_TARGET_ESP32P4
|
||||
bool
|
||||
default "y" if IDF_TARGET="esp32p4"
|
||||
@@ -130,34 +125,6 @@ mainmenu "Espressif IoT Development Framework Configuration"
|
||||
select FREERTOS_UNICORE
|
||||
select IDF_TARGET_ARCH_RISCV
|
||||
|
||||
config IDF_TARGET_ESP32C61
|
||||
bool
|
||||
default "y" if IDF_TARGET="esp32c61"
|
||||
select FREERTOS_UNICORE
|
||||
select IDF_TARGET_ARCH_RISCV
|
||||
|
||||
config IDF_TARGET_ESP32H21
|
||||
bool
|
||||
default "y" if IDF_TARGET="esp32h21"
|
||||
select FREERTOS_UNICORE
|
||||
select IDF_TARGET_ARCH_RISCV
|
||||
select IDF_ENV_BRINGUP
|
||||
select IDF_ENV_FPGA if ESP32H21_SELECTS_REV_MP
|
||||
|
||||
config IDF_TARGET_ESP32H4
|
||||
bool
|
||||
default "y" if IDF_TARGET="esp32h4"
|
||||
select IDF_TARGET_ARCH_RISCV
|
||||
select IDF_ENV_BRINGUP
|
||||
select IDF_ENV_FPGA if ESP32H4_SELECTS_REV_MP
|
||||
|
||||
config IDF_TARGET_ESP32S31
|
||||
bool
|
||||
default "y" if IDF_TARGET="esp32s31"
|
||||
select IDF_TARGET_ARCH_RISCV
|
||||
select IDF_ENV_FPGA
|
||||
select IDF_ENV_BRINGUP
|
||||
|
||||
config IDF_TARGET_LINUX
|
||||
bool
|
||||
default "y" if IDF_TARGET="linux"
|
||||
@@ -172,11 +139,6 @@ mainmenu "Espressif IoT Development Framework Configuration"
|
||||
default 0x000D if IDF_TARGET_ESP32C6
|
||||
default 0x0010 if IDF_TARGET_ESP32H2
|
||||
default 0x0012 if IDF_TARGET_ESP32P4
|
||||
default 0x0017 if IDF_TARGET_ESP32C5
|
||||
default 0x0014 if IDF_TARGET_ESP32C61
|
||||
default 0x0019 if IDF_TARGET_ESP32H21
|
||||
default 0x001C if IDF_TARGET_ESP32H4
|
||||
default 0x0020 if IDF_TARGET_ESP32S31
|
||||
default 0xFFFF
|
||||
|
||||
|
||||
@@ -227,10 +189,10 @@ mainmenu "Espressif IoT Development Framework Configuration"
|
||||
|
||||
When loading the BIN with UART, the ROM will jump to ram and run the app after finishing the ROM
|
||||
startup code, so there's no additional startup initialization required. You can use the
|
||||
`load-ram` in esptool to load the generated .bin file into ram and execute.
|
||||
`load_ram` in esptool.py to load the generated .bin file into ram and execute.
|
||||
|
||||
Example:
|
||||
esptool --chip {chip} -p {port} -b {baud} --no-stub load-ram {app.bin}
|
||||
esptool.py --chip {chip} -p {port} -b {baud} --no-stub load_ram {app.bin}
|
||||
|
||||
Recommended sdkconfig.defaults for building loadable ELF files is as follows.
|
||||
CONFIG_APP_BUILD_TYPE_RAM is required, other options help reduce application
|
||||
@@ -238,7 +200,7 @@ mainmenu "Espressif IoT Development Framework Configuration"
|
||||
|
||||
CONFIG_APP_BUILD_TYPE_RAM=y
|
||||
CONFIG_VFS_SUPPORT_TERMIOS=
|
||||
CONFIG_LIBC_NEWLIB_NANO_FORMAT=y
|
||||
CONFIG_NEWLIB_NANO_FORMAT=y
|
||||
CONFIG_ESP_SYSTEM_PANIC_PRINT_HALT=y
|
||||
CONFIG_ESP_DEBUG_STUBS_ENABLE=
|
||||
CONFIG_ESP_ERR_TO_NAME_LOOKUP=
|
||||
@@ -348,8 +310,8 @@ mainmenu "Espressif IoT Development Framework Configuration"
|
||||
help
|
||||
This option sets compiler optimization level (gcc -O argument) for the app.
|
||||
|
||||
- The "Debug" setting will add the -Og flag to CFLAGS.
|
||||
- The "Size" setting will add the -Os flag to CFLAGS (-Oz with Clang).
|
||||
- The "Debug" setting will add the -0g flag to CFLAGS.
|
||||
- The "Size" setting will add the -0s flag to CFLAGS.
|
||||
- The "Performance" setting will add the -O2 flag to CFLAGS.
|
||||
- The "None" setting will add the -O0 flag to CFLAGS.
|
||||
|
||||
@@ -370,7 +332,7 @@ mainmenu "Espressif IoT Development Framework Configuration"
|
||||
config COMPILER_OPTIMIZATION_DEBUG
|
||||
bool "Debug (-Og)"
|
||||
config COMPILER_OPTIMIZATION_SIZE
|
||||
bool "Optimize for size (-Os with GCC, -Oz with Clang)"
|
||||
bool "Optimize for size (-Os)"
|
||||
config COMPILER_OPTIMIZATION_PERF
|
||||
bool "Optimize for performance (-O2)"
|
||||
config COMPILER_OPTIMIZATION_NONE
|
||||
@@ -378,27 +340,6 @@ mainmenu "Espressif IoT Development Framework Configuration"
|
||||
|
||||
endchoice
|
||||
|
||||
config COMPILER_ENABLE_RISCV_ZCMP
|
||||
bool "Enable RISCV ZCMP extension"
|
||||
depends on SOC_CPU_ZCMP_WORKAROUND
|
||||
default n
|
||||
help
|
||||
Enable the RISC-V ZCMP (Compressed Macro) extension to reduce binary size
|
||||
by optimizing function prologue and epilogue sequences.
|
||||
|
||||
Note: Due to a hardware issue on some ESP32 chips (e.g., ESP32C5, ESP32C61,
|
||||
ESP32H4), executing "cm.push" may re-enable interrupts even when global
|
||||
interrupts are disabled (mstatus.mie = 0). This can cause unexpected interrupts
|
||||
during CPU retention or within critical sections.
|
||||
|
||||
Workarounds are implemented in the IDF codebase. However, if user code
|
||||
directly disables interrupts, additional actions may be required. Refer
|
||||
to code examples under the SOC_CPU_ZCMP_WORKAROUND macro, or disable
|
||||
the ZCMP extension for source files that contain functions which may
|
||||
execute while mstatus.mie = 0.
|
||||
|
||||
Even with these workarounds, the issue may still affect dual-core variants.
|
||||
|
||||
choice COMPILER_OPTIMIZATION_ASSERTION_LEVEL
|
||||
prompt "Assertion level"
|
||||
default COMPILER_OPTIMIZATION_ASSERTIONS_ENABLE
|
||||
@@ -434,18 +375,6 @@ mainmenu "Espressif IoT Development Framework Configuration"
|
||||
|
||||
endchoice # assertions
|
||||
|
||||
config COMPILER_ASSERT_NDEBUG_EVALUATE
|
||||
bool "Enable the evaluation of the expression inside assert(X) when NDEBUG is set"
|
||||
default n
|
||||
help
|
||||
When NDEBUG is set, assert(X) will not cause code to trigger an assertion.
|
||||
With this option set, assert(X) will still evaluate the expression X, though
|
||||
the result will never cause an assertion. This means that if X is a function
|
||||
then the function will be called.
|
||||
|
||||
This is not according to the standard, which states that the assert(X) should
|
||||
be replaced with ((void)0) if NDEBUG is defined.
|
||||
|
||||
choice COMPILER_FLOAT_LIB_FROM
|
||||
prompt "Compiler float lib source"
|
||||
default COMPILER_FLOAT_LIB_FROM_RVFPLIB if ESP_ROM_HAS_RVFPLIB
|
||||
@@ -567,29 +496,6 @@ mainmenu "Espressif IoT Development Framework Configuration"
|
||||
help
|
||||
Stack smashing protection.
|
||||
|
||||
config COMPILER_NO_MERGE_CONSTANTS
|
||||
bool "Disable merging const sections"
|
||||
depends on IDF_TOOLCHAIN_GCC
|
||||
help
|
||||
Disable merging identical constants (string/floating-point) across compilation units.
|
||||
This helps in better size analysis of the application binary as the rodata section
|
||||
distribution is more uniform across libraries. On downside, it may increase
|
||||
the binary size and hence should be used during development phase only.
|
||||
|
||||
config COMPILER_ENABLE_TEXT_SECTION_LITERALS
|
||||
bool
|
||||
depends on IDF_TOOLCHAIN_GCC
|
||||
depends on IDF_TARGET_ARCH_XTENSA
|
||||
default y if ESPTOOLPY_FAST_REFLASHING
|
||||
help
|
||||
Intersperse Xtensa literals within the text section to keep
|
||||
them as close as possible to their references. This prevents
|
||||
literals from being placed into a separate section in the
|
||||
output file and prevents the linker from combining literal
|
||||
pools from different object files. Enabling this is necessary
|
||||
for fast reflashing to prevent mixing code from mutable and
|
||||
immutable libraries.
|
||||
|
||||
config COMPILER_WARN_WRITE_STRINGS
|
||||
bool "Enable -Wwrite-strings warning flag"
|
||||
default "n"
|
||||
@@ -616,20 +522,6 @@ mainmenu "Espressif IoT Development Framework Configuration"
|
||||
|
||||
This option can be enabled for RISC-V targets only.
|
||||
|
||||
config COMPILER_DISABLE_DEFAULT_ERRORS
|
||||
bool "Disable errors for default warnings"
|
||||
default "n"
|
||||
help
|
||||
Enable this option if you do not want default warnings to be considered as errors,
|
||||
especially when updating IDF.
|
||||
|
||||
This is a temporary flag that could help to allow upgrade while having
|
||||
some time to address the warnings raised by those default warnings.
|
||||
Alternatives are:
|
||||
1) fix code (preferred),
|
||||
2) remove specific warnings,
|
||||
3) do not consider specific warnings as error.
|
||||
|
||||
config COMPILER_DISABLE_GCC12_WARNINGS
|
||||
bool "Disable new warnings introduced in GCC 12"
|
||||
default "n"
|
||||
@@ -644,20 +536,6 @@ mainmenu "Espressif IoT Development Framework Configuration"
|
||||
Enable this option if use GCC 13 or newer, and want to disable warnings which don't appear with
|
||||
GCC 12.
|
||||
|
||||
config COMPILER_DISABLE_GCC14_WARNINGS
|
||||
bool "Disable new warnings introduced in GCC 14"
|
||||
default "n"
|
||||
help
|
||||
Enable this option if use GCC 14 or newer, and want to disable warnings which don't appear with
|
||||
GCC 13.
|
||||
|
||||
config COMPILER_DISABLE_GCC15_WARNINGS
|
||||
bool "Disable new warnings introduced in GCC 15"
|
||||
default "n"
|
||||
help
|
||||
Enable this option if use GCC 15 or newer, and want to disable warnings which don't appear with
|
||||
GCC 14.
|
||||
|
||||
config COMPILER_DUMP_RTL_FILES
|
||||
bool "Dump RTL files during compilation"
|
||||
help
|
||||
@@ -692,89 +570,10 @@ mainmenu "Espressif IoT Development Framework Configuration"
|
||||
default "gcc" if COMPILER_RT_LIB_GCCLIB
|
||||
default "" if COMPILER_RT_LIB_HOST
|
||||
|
||||
choice COMPILER_ORPHAN_SECTIONS
|
||||
prompt "Orphan sections handling"
|
||||
default COMPILER_ORPHAN_SECTIONS_ERROR
|
||||
depends on !IDF_TARGET_LINUX
|
||||
help
|
||||
If the linker finds orphan sections, it attempts to place orphan sections after sections of the same
|
||||
attribute such as code vs data, loadable vs non-loadable, etc.
|
||||
That means that orphan sections could placed between sections defined in IDF linker scripts.
|
||||
This could lead to corruption of the binary image. Configure the linker action here.
|
||||
|
||||
config COMPILER_ORPHAN_SECTIONS_ERROR
|
||||
bool "Fail if orphan sections found"
|
||||
help
|
||||
Fails the link step with an error if orphan sections are detected.
|
||||
|
||||
config COMPILER_ORPHAN_SECTIONS_WARNING
|
||||
bool "Place with warning"
|
||||
help
|
||||
Places orphan sections with a warning message.
|
||||
|
||||
config COMPILER_ORPHAN_SECTIONS_PLACE
|
||||
bool "Place silently"
|
||||
help
|
||||
Places orphan sections without a warning/error message.
|
||||
endchoice
|
||||
|
||||
config COMPILER_STATIC_ANALYZER
|
||||
bool "Enable compiler static analyzer"
|
||||
default "n"
|
||||
depends on IDF_TOOLCHAIN_GCC
|
||||
help
|
||||
Enable compiler static analyzer. This may produce false-positive results and increases compile time.
|
||||
|
||||
choice COMPILER_CXX_GLIBCXX_CONSTEXPR
|
||||
prompt "Define _GLIBCXX_CONSTEXPR"
|
||||
default COMPILER_CXX_GLIBCXX_CONSTEXPR_NO_CHANGE
|
||||
depends on IDF_TOOLCHAIN_GCC && !IDF_TARGET_LINUX
|
||||
help
|
||||
Modify libstdc++ _GLIBCXX20_CONSTEXPR and _GLIBCXX23_CONSTEXPR definitions to provide size
|
||||
optimizations. The total size optimization depends on the application's structure.
|
||||
There is no robust way to determine which option would be better in a particular case.
|
||||
Please try all available options to find the best size optimization.
|
||||
|
||||
config COMPILER_CXX_GLIBCXX_CONSTEXPR_NO_CHANGE
|
||||
bool "No change"
|
||||
help
|
||||
Use default _GLIBCXX20_CONSTEXPR and _GLIBCXX23_CONSTEXPR defined in libstdc++
|
||||
|
||||
config COMPILER_CXX_GLIBCXX_CONSTEXPR_COLD_CONSTEXPR
|
||||
bool "_GLIBCXX2X_CONSTEXPR=__attribute__((cold)) constexpr"
|
||||
help
|
||||
Define _GLIBCXX20_CONSTEXPR=__attribute__((cold)) constexpr
|
||||
Define _GLIBCXX23_CONSTEXPR=__attribute__((cold)) constexpr
|
||||
|
||||
config COMPILER_CXX_GLIBCXX_CONSTEXPR_COLD
|
||||
bool "_GLIBCXX2X_CONSTEXPR=__attribute__((cold))"
|
||||
help
|
||||
Define _GLIBCXX20_CONSTEXPR=__attribute__((cold)).
|
||||
Define _GLIBCXX23_CONSTEXPR=__attribute__((cold)).
|
||||
endchoice
|
||||
|
||||
endmenu # Compiler Options
|
||||
|
||||
menu "Component config"
|
||||
comment "!!! MINIMAL_BUILD is enabled !!!"
|
||||
depends on "${IDF_MINIMAL_BUILD}"
|
||||
comment "Only common components and those transitively required by the main component are listed"
|
||||
depends on "${IDF_MINIMAL_BUILD}"
|
||||
comment "If a component configuration is missing, please add it to the main component's requirements"
|
||||
depends on "${IDF_MINIMAL_BUILD}"
|
||||
|
||||
source "$COMPONENT_KCONFIGS_SOURCE_FILE"
|
||||
|
||||
menu "Configuration for components not included in the build"
|
||||
depends on "${IDF_BUILD_V2}"
|
||||
osource "$COMPONENT_KCONFIGS_EXCLUDED_SOURCE_FILE"
|
||||
endmenu
|
||||
|
||||
endmenu
|
||||
|
||||
menu "Project configuration for components not included in the build"
|
||||
depends on "${IDF_BUILD_V2}"
|
||||
osource "$COMPONENT_KCONFIGS_PROJBUILD_EXCLUDED_SOURCE_FILE"
|
||||
endmenu
|
||||
|
||||
config IDF_EXPERIMENTAL_FEATURES
|
||||
@@ -791,11 +590,4 @@ mainmenu "Espressif IoT Development Framework Configuration"
|
||||
- CONFIG_ESPTOOLPY_FLASHFREQ_120M && CONFIG_ESPTOOLPY_FLASH_SAMPLE_MODE_DTR
|
||||
- CONFIG_SPIRAM_SPEED_120M && CONFIG_SPIRAM_MODE_OCT
|
||||
- CONFIG_BOOTLOADER_CACHE_32BIT_ADDR_QUAD_FLASH
|
||||
- CONFIG_ESP_WIFI_EAP_TLS1_3
|
||||
- CONFIG_ESP_WIFI_ENABLE_ROAMING_APP
|
||||
- CONFIG_USB_HOST_EXT_PORT_RESET_ATTEMPTS
|
||||
- CONFIG_LIBC_PICOLIBC
|
||||
- CONFIG_GDMA_ENABLE_WEIGHTED_ARBITRATION
|
||||
- CONFIG_I3C_MASTER_ENABLED
|
||||
- CONFIG_MBEDTLS_ESP_IDF_USE_PSA_CRYPTO
|
||||
- CONFIG_ESPTOOLPY_FAST_REFLASHING
|
||||
- CONFIG_MBEDTLS_USE_CRYPTO_ROM_IMPL
|
||||
|
||||
25
README.md
25
README.md
@@ -15,19 +15,16 @@ ESP-IDF is the development framework for Espressif SoCs supported on Windows, Li
|
||||
|
||||
The following table shows ESP-IDF support of Espressif SoCs where ![alt text][preview] and ![alt text][supported] denote preview status and support, respectively. The preview support is usually limited in time and intended for beta versions of chips. Please use an ESP-IDF release where the desired SoC is already supported.
|
||||
|
||||
|Chip | v5.1 | v5.2 | v5.3 | v5.4 | v5.5 | v6.0 | |
|
||||
|:----------- |:---------------------: |:---------------------: |:---------------------: | :---------------------: | :-------------------: | :--------------------: |:------------------------------------------------------------------- |
|
||||
|ESP32 | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | |
|
||||
|ESP32-S2 | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | |
|
||||
|ESP32-C3 | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | |
|
||||
|ESP32-S3 | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] |[Announcement](https://www.espressif.com/en/news/ESP32_S3) |
|
||||
|ESP32-C2 | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] |[Announcement](https://www.espressif.com/en/news/ESP32-C2) |
|
||||
|ESP32-C6 | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] |[Announcement](https://www.espressif.com/en/news/ESP32_C6) |
|
||||
|ESP32-H2 | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] |[Announcement](https://www.espressif.com/en/news/ESP32_H2) |
|
||||
|ESP32-P4 | | | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] |[Announcement](https://www.espressif.com/en/news/ESP32-P4) |
|
||||
|ESP32-C5 | | | | | ![alt text][supported] | ![alt text][supported] |[Announcement](https://www.espressif.com/en/news/ESP32-C5) |
|
||||
|ESP32-C61 | | | | | ![alt text][supported] | ![alt text][supported] |[Announcement](https://www.espressif.com/en/products/socs/esp32-c61) |
|
||||
|ESP32-H4 | | | | | | ![alt text][preview] |[Announcement](https://www.espressif.com/en/news/ESP32-H4) |
|
||||
|Chip | v4.3 | v4.4 | v5.0 | v5.1 | v5.2 | |
|
||||
|:----------- | :---------------------:| :---------------------:| :---------------------:| :--------------------: | :--------------------: | :----------------------------------------------------------|
|
||||
|ESP32 | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | |
|
||||
|ESP32-S2 | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | |
|
||||
|ESP32-C3 | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | |
|
||||
|ESP32-S3 | | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | [Announcement](https://www.espressif.com/en/news/ESP32_S3) |
|
||||
|ESP32-C2 | | | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | [Announcement](https://www.espressif.com/en/news/ESP32-C2) |
|
||||
|ESP32-C6 | | | | ![alt text][supported] | ![alt text][supported] | [Announcement](https://www.espressif.com/en/news/ESP32_C6) |
|
||||
|ESP32-H2 | | | | ![alt text][supported] | ![alt text][supported] | [Announcement](https://www.espressif.com/en/news/ESP32_H2) |
|
||||
|ESP32-P4 | | | | | ![alt text][preview] | [Announcement](https://www.espressif.com/en/news/ESP32-P4) |
|
||||
|
||||
[supported]: https://img.shields.io/badge/-supported-green "supported"
|
||||
[preview]: https://img.shields.io/badge/-preview-orange "preview"
|
||||
@@ -83,7 +80,7 @@ See the Getting Started guide links above for a detailed setup guide. This is a
|
||||
|
||||
## Flashing the Project
|
||||
|
||||
When the build finishes, it will print a command line to use `esptool` to flash the chip. However you can also do this automatically by running:
|
||||
When the build finishes, it will print a command line to use esptool.py to flash the chip. However you can also do this automatically by running:
|
||||
|
||||
`idf.py -p PORT flash`
|
||||
|
||||
|
||||
27
README_CN.md
27
README_CN.md
@@ -15,19 +15,16 @@ ESP-IDF 是乐鑫官方推出的物联网开发框架,支持 Windows、Linux
|
||||
|
||||
下表总结了乐鑫芯片在 ESP-IDF 各版本中的支持状态,其中 ![alt text][supported] 代表已支持,![alt text][preview] 代表目前处于预览支持状态。预览支持状态通常有时间限制,而且仅适用于测试版芯片。请确保使用与芯片相匹配的 ESP-IDF 版本。
|
||||
|
||||
|芯片 | v5.1 | v5.2 | v5.3 | v5.4 | v5.5 | v6.0 | |
|
||||
|:----------- | :-------------------: | :--------------------: | :--------------------: | :--------------------: | :-------------------: | :-------------------: |:------------------------------------------------------------------------- |
|
||||
|ESP32 |![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] |![alt text][supported] | |
|
||||
|ESP32-S2 |![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] |![alt text][supported] | |
|
||||
|ESP32-C3 |![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] |![alt text][supported] | |
|
||||
|ESP32-S3 |![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] |![alt text][supported] | [芯片发布公告](https://www.espressif.com/zh-hans/news/ESP32_S3) |
|
||||
|ESP32-C2 |![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] |![alt text][supported] | [芯片发布公告](https://www.espressif.com/zh-hans/news/ESP32-C2) |
|
||||
|ESP32-C6 |![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] |![alt text][supported] | [芯片发布公告](https://www.espressif.com/zh-hans/news/ESP32_C6) |
|
||||
|ESP32-H2 |![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] |![alt text][supported] | [芯片发布公告](https://www.espressif.com/zh-hans/news/ESP32_H2) |
|
||||
|ESP32-P4 | | | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] |![alt text][supported] | [芯片发布公告](https://www.espressif.com/zh-hans/news/ESP32-P4) |
|
||||
|ESP32-C5 | | | | | ![alt text][supported] |![alt text][supported] | [芯片发布公告](https://www.espressif.com/zh-hans/news/ESP32-C5) |
|
||||
|ESP32-C61 | | | | | ![alt text][supported] |![alt text][supported] | [芯片发布公告](https://www.espressif.com/zh-hans/products/socs/esp32-c61) |
|
||||
|ESP32-H4 | | | | | |![alt text][preview] | [芯片发布公告](https://www.espressif.com/zh-hans/news/ESP32-H4) |
|
||||
|芯片 | v4.3 | v4.4 | v5.0 | v5.1 | v5.2 | |
|
||||
|:----------- | :---------------------:| :---------------------:| :---------------------:| :--------------------: | :--------------------: | :-------------------------------------------------------------- |
|
||||
|ESP32 | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | |
|
||||
|ESP32-S2 | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | |
|
||||
|ESP32-C3 | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | |
|
||||
|ESP32-S3 | | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | [芯片发布公告](https://www.espressif.com/zh-hans/news/ESP32_S3) |
|
||||
|ESP32-C2 | | | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | [芯片发布公告](https://www.espressif.com/zh-hans/news/ESP32-C2) |
|
||||
|ESP32-C6 | | | | ![alt text][supported] | ![alt text][supported] | [芯片发布公告](https://www.espressif.com/zh-hans/news/ESP32_C6) |
|
||||
|ESP32-H2 | | | | ![alt text][supported] | ![alt text][supported] | [芯片发布公告](https://www.espressif.com/zh-hans/news/ESP32_H2) |
|
||||
|ESP32-P4 | | | | | ![alt text][preview] | [芯片发布公告](https://www.espressif.com/en/news/ESP32-P4) |
|
||||
|
||||
[supported]: https://img.shields.io/badge/-%E6%94%AF%E6%8C%81-green "supported"
|
||||
[preview]: https://img.shields.io/badge/-%E9%A2%84%E8%A7%88-orange "preview"
|
||||
@@ -83,7 +80,7 @@ ESP-IDF 中的子模块采用相对路径([详见 .gitmodules 文件](.gitmodu
|
||||
|
||||
## 烧写项目
|
||||
|
||||
当构建结束,终端会打印出一条命令行,告知如何使用 `esptool` 工具烧写项目到芯片中。但你也可以运行下面这条命令来自动烧写:
|
||||
当构建结束,终端会打印出一条命令行,告知如何使用 esptool.py 工具烧写项目到芯片中。但你也可以运行下面这条命令来自动烧写:
|
||||
|
||||
`idf.py -p PORT flash`
|
||||
|
||||
@@ -124,7 +121,7 @@ ESP-IDF 中的子模块采用相对路径([详见 .gitmodules 文件](.gitmodu
|
||||
|
||||
* 最新版的文档:https://docs.espressif.com/projects/esp-idf/ ,该文档是由本仓库 [docs 目录](docs) 构建得到。
|
||||
|
||||
* [初学者指南:主要概念和资源](https://www.bilibili.com/video/BV1114y1r7du/)
|
||||
* [初学者指南:主要概念和资源](https://www.bilibili.com/video/BV1114y1r7du/)
|
||||
|
||||
* 可以前往 [esp32.com 论坛](https://esp32.com/) 提问,挖掘社区资源。
|
||||
|
||||
|
||||
130
ROADMAP.md
130
ROADMAP.md
@@ -1,130 +0,0 @@
|
||||
# ESP-IDF Project Roadmap 2025
|
||||
|
||||
* [中文版](./ROADMAP_CN.md)
|
||||
|
||||
This document outlines the goals of ESP-IDF project and is shared for the convenience of our customers. It is important to clarify that this document is not a binding commitment to our customers. Instead, its primary purpose is to offer a clear roadmap and direction for the project's development. By openly sharing this information, we aim to enhance our customers' understanding, promote transparency and ensure alignment with the overarching objectives of the ESP-IDF project.
|
||||
|
||||
## Project Overview
|
||||
|
||||
### Project Goals
|
||||
|
||||
In both minor and major releases, we integrate new chip support to enhance our product range. By expanding the chip matrix, we broaden the scope of our offerings, catering to a wider audience with diverse needs. This proactive approach ensures that our products remain at the forefront of technological advancements, consistently meeting and exceeding customer expectations.
|
||||
|
||||
Furthermore, we prioritize bugfix releases for active branches, focusing on improving the stability and performance of products already in production. By addressing bugs promptly, we aim to enhance the overall user experience and provide tangible benefits to customers relying on our solutions. This proactive maintenance strategy reflects our commitment to delivering reliable, high-quality products to our valued customer base.
|
||||
|
||||
Below are the main objectives that ESP-IDF project/teams would like to implement in 2025.
|
||||
|
||||
- New Chip Support
|
||||
|
||||
- Add support for ESP32-C5
|
||||
- Add support for ESP32-C61
|
||||
|
||||
- More Minor/Major Releases
|
||||
|
||||
- Release IDF v5.5 in the middle of 2025
|
||||
- Release IDF v6.0 at the end of 2025
|
||||
|
||||
- More Bugfix Releases
|
||||
|
||||
- Release v5.0.8 and v5.0.9 before ESP-IDF v5.0 goes End of Life in May 2025
|
||||
- Release v5.1.6 and v5.1.7 before ESP-IDF v5.1 goes End of Life in December 2025
|
||||
- Do more bugfix releases for IDF v5.2 and IDF v5.3 before release/5.2 and release/5.3 enter maintenance period
|
||||
- Do more bug fixes releases for release/5.4 and release/5.5, and push the two releases to be more stable and production-ready
|
||||
|
||||
- Major Changes
|
||||
|
||||
- We plan to upgrade MbedTLS to v4.x series in IDF v6.0. In addition, we will also be migrating to newer PSA crypto API as part of this upgrade. Please note that this may involve some breaking changes on the application side for the crypto API usage.
|
||||
|
||||
Please note that support status of previous silicones could be found on [ESP-IDF Release and SoC Compatibility](https://github.com/espressif/esp-idf#esp-idf-release-and-soc-compatibility).
|
||||
|
||||
### Roadmap Details
|
||||
|
||||
The ESP-IDF project prioritizes consistent maintenance and updates to ensure our customers remain at the forefront of technological advancements. Our commitment to ongoing development ensures that customers continuously benefit from the latest innovations in the field.
|
||||
|
||||
Moreover, we are dedicated to empowering our customers to leverage newly implemented features and enhanced functionalities through iterative improvements. Our steadfast commitment to pushing boundaries ensures that clients not only keep pace with evolving technology but also extract optimal value from the cutting-edge capabilities of our products.
|
||||
|
||||
Below are the main roadmap details for functional areas inside ESP-IDF.
|
||||
|
||||
- New Chip Support
|
||||
|
||||
- Add the initial support for the mass production version of ESP32-C5 in ESP-IDF v5.5, refer to [ESP32-C5 Support Status](https://github.com/espressif/esp-idf/issues/14021)
|
||||
- Add the initial support for the mass production version of ESP32-C61 in ESP-IDF v5.5, refer to [ESP32-C61 Support Status](https://developer.espressif.com/pages/chip-support-status/esp32c61/#esp-idf)
|
||||
|
||||
- Bugfix releases
|
||||
|
||||
- Do bugfix releases v5.0.8 and v5.0.9 and stop maintaining ESP-IDF v5.0 in May 2025
|
||||
- Do bugfix releases v5.1.6 and v5.1.7 and stop maintaining ESP-IDF v5.1 in December 2025
|
||||
- Release bugfix IDF v5.2.4, IDF v5.2.5 and IDF v5.2.6 in 2025, and push release/5.2 to maintenance period from February 2025
|
||||
- Release bugfix IDF v5.3.3 and IDF v5.3.4 in 2025, and push release/5.3 to maintenance period from July 2025
|
||||
- Do more bug fixes releases for release/5.4 (IDF v5.4.1, IDF v5.4.2, IDF v5.4.3) and release/5.5 (IDF v5.5.1, IDF v5.5.2), and push releases to be more stable and more production-ready
|
||||
|
||||
## ESP-IDF Planning information
|
||||
|
||||
For the full list of ESP-IDF releases, please visit https://github.com/espressif/esp-idf/releases
|
||||
|
||||
All the information provided here is subject to change without notice, due to business reasons and other factors.
|
||||
|
||||
### ESP-IDF Major Releases
|
||||
|
||||
```mermaid
|
||||
timeline
|
||||
|
||||
title ESP-IDF Major Releases
|
||||
section 2025 Q1 <br> Major Release Planning
|
||||
No version planned : N/A
|
||||
section 2025 Q2 <br> Major Release Planning
|
||||
No version planned : N/A
|
||||
section 2025 Q3 <br> Major Release Planning
|
||||
No version planned : N/A
|
||||
section 2025 Q4 <br> Major Release Planning
|
||||
v6.0-beta1 : 2025/11/13
|
||||
v6.0-beta2 : 2025/12/05
|
||||
v6.0-RC1 : 2026/01/14
|
||||
v6.0-RC2 : 2026/02/06
|
||||
v6.0 : 2026/02/13
|
||||
```
|
||||
|
||||
### ESP-IDF Minor Releases
|
||||
|
||||
```mermaid
|
||||
timeline
|
||||
|
||||
title ESP-IDF Minor Releases
|
||||
section 2025 Q1 <br> Minor Release Planning
|
||||
No version planned : N/A
|
||||
section 2025 Q2 <br> Minor Release Planning
|
||||
v5.5-beta1 : 2025/05/14
|
||||
v5.5-beta2 : 2025/06/04
|
||||
section 2025 Q3 <br> Minor Release Planning
|
||||
v5.5-RC1 : 2025/07/07
|
||||
v5.5-RC2 : 2025/07/28
|
||||
v5.5 : 2025/08/04
|
||||
section 2025 Q4 <br> Minor Release Planning
|
||||
No version planned : N/A
|
||||
```
|
||||
|
||||
### ESP-IDF Bugfix Releases
|
||||
|
||||
```mermaid
|
||||
timeline
|
||||
|
||||
title ESP-IDF Bugfix Releases
|
||||
section 2025 Q1 <br> Bugfix Release Planning
|
||||
v5.0.8 : 2025/01/14
|
||||
v5.1.6 : 2025/02/18
|
||||
v5.2.4 : 2025/02/23
|
||||
v5.2.5 : 2025/02/28
|
||||
v5.4.1 : 2025/03/27
|
||||
v5.3.3 : 2025/04/04
|
||||
section 2025 Q2 <br> Bugfix Release Planning
|
||||
v5.0.9 : 2025/05/16
|
||||
v5.4.2 : 2025/06/30
|
||||
section 2025 Q3 <br> Bugfix Release Planning
|
||||
v5.3.4 : 2025/08/03
|
||||
v5.2.6 : 2025/09/04
|
||||
v5.5.1 : 2025/09/11
|
||||
v5.4.3 : 2025/10/08
|
||||
section 2025 Q4 <br> Bugfix Release Planning
|
||||
v5.5.2 : 2025/11/12
|
||||
v5.1.7 : 2026/01/06
|
||||
```
|
||||
130
ROADMAP_CN.md
130
ROADMAP_CN.md
@@ -1,130 +0,0 @@
|
||||
# ESP-IDF 项目路线图 2025
|
||||
|
||||
* [English Version](./ROADMAP.md)
|
||||
|
||||
本文档概述了 ESP-IDF 项目的年度计划,方便客户据此规划自己的项目周期。需要说明的是该文档并不是我们对客户的约束性承诺。相反,其主要目的是为客户提供 ESP-IDF 项目开发的路线图和方向。通过公开这些信息,我们希望增进客户对 ESP-IDF 项目的理解,提高透明度,并确保与 ESP-IDF 项目的总体目标保持一致。
|
||||
|
||||
## 项目总览
|
||||
|
||||
### 项目目标
|
||||
|
||||
在 ESP-IDF 的主要版本和次要版本中,我们一般会增加对新芯片的支持,以扩展我们的产品线。通过扩展芯片矩阵,拓宽我们的产品范围,并满足广泛受众的各种需求。这样便能保证我们的产品始终处于技术进步的前沿,不断满足客户的需求并超越客户的期望。
|
||||
|
||||
此外,ESP-IDF 各活跃分支的 Bugfix 版本发布也是我们项目的重中之重,着力提升已量产产品的稳定性和性能。通过及时解决问题,我们期待提升用户的整体体验,切实惠及使用乐鑫解决方案的客户。通过积极维护 ESP-IDF 的各活跃分支,我们践行了对宝贵的客户群提供可靠、高质量产品的承诺。
|
||||
|
||||
以下是 ESP-IDF 项目在 2025 年计划实现的主要目标。
|
||||
|
||||
* 新芯片支持
|
||||
|
||||
* 增加对 ESP32-C5 芯片的支持
|
||||
* 增加对 ESP32-C61 芯片的支持
|
||||
|
||||
* 发布更多的次要和主要版本
|
||||
|
||||
* 在 2025 年中发布 IDF v5.5
|
||||
* 在 2025 年底发布 IDF v6.0
|
||||
|
||||
* 发布更多 bugfix 版本
|
||||
|
||||
* 在 2025 年 5 月底 IDF v5.0 停止维护之前,发布 IDF v5.0.8 和 IDF v5.0.9
|
||||
* 在 2025 年 12 月底 IDF v5.1 停止维护之前,发布 IDF v5.1.6 和 IDF v5.1.7
|
||||
* 在 release/5.2 分支和 release/5.3 分支进入维护周期之前,发布更多 bugfix 版本
|
||||
* release/5.4 分支和 release/5.5 分支发布更多 bugfix 版本,使这两个分支更加稳定和产品化
|
||||
|
||||
* 重大变更
|
||||
|
||||
* 我们计划在 IDF v6.0 中将 MbedTLS 版本升级到 v4.x。另外,我们还会在升级中迁移到更新版的 PSA 加密 API,但请注意,这可能会导致应用程序端在使用加密 API 时出现一些非兼容性更新。
|
||||
|
||||
请注意,获取之前芯片的支持状态,请参阅 [ESP-IDF 发布和 SoC 兼容性](https://github.com/espressif/esp-idf/blob/master/README_CN.md#esp-idf-与乐鑫芯片)。
|
||||
|
||||
### 路线图细节
|
||||
|
||||
ESP-IDF 项目重视持续维护和更新,确保我们的客户始终处于技术进步的前沿。我们承诺持续进行开发,并将该领域的最新创新成果呈现给客户。
|
||||
|
||||
此外,我们也在给客户赋能,客户通过迭代改进便能接触到新开发的功能和更高的性能。我们在突破技术界限方面的坚定承诺,使客户不仅能接触到最新的技术,还能从我们产品的尖端功能中获取最大价值。
|
||||
|
||||
以下是 ESP-IDF 路线图的主要信息。
|
||||
|
||||
* 新芯片支持
|
||||
|
||||
* 在 ESP-IDF v5.4 中为 ESP32-C5 提供预览支持,并在 ESP-IDF v5.5 中为 ESP32-C5 提供完整支持,参考 [ESP32-C5 支持状态](https://github.com/espressif/esp-idf/issues/14021)
|
||||
* 在 ESP-IDF v5.4 中增加对 ESP32-C61 早期样品的预览支持,并在 IDF v5.5 中增加对 ESP32-C61 量产版本的完整支持,参考 [ESP32-C61 支持状态](https://developer.espressif.com/pages/chip-support-status/esp32c61/#esp-idf)
|
||||
|
||||
* Bugfix 版本发布
|
||||
|
||||
* 发布 Bugfix 版本 IDF v5.0.8 和 IDF v5.0.9,并在 2025 年 5 月底停止维护 ESP-IDF v5.0
|
||||
* 发布 Bugfix 版本 IDF v5.1.6 和 IDF v5.1.7,并在 2025 年 12 月底停止维护 ESP-IDF v5.1
|
||||
* 发布 Bugfix 版本 IDF v5.2.4,IDF v5.2.5 和 IDF v5.2.6,release/5.2 分支自 2025 年 2 月进入维护周期
|
||||
* 发布 Bugfix 版本 IDF v5.3.3 和 IDF v5.3.4,release/5.3 分支自 2025 年 7 月进入维护周期
|
||||
* release/5.4 分支发布更多 bugfix 版本,包括 IDF v5.4.1、IDF v5.4.2、IDF v5.4.3;release/5.5 分支发布更多 bugfix 版本,包括 IDF v5.5.1、IDF v5.5.2。通过发布这些 Bugfix 版本,使 release/5.4 分支和 release/5.5 分支更加稳定和产品化。
|
||||
|
||||
## ESP-IDF 发布计划
|
||||
|
||||
获取 ESP-IDF 的完整发布列表,请访问 https://github.com/espressif/esp-idf/releases
|
||||
|
||||
此处提供的所有信息均可因业务原因及其他因素而在没有通知的情况下进行更改。
|
||||
|
||||
### ESP-IDF 主要版本发布
|
||||
|
||||
```mermaid
|
||||
timeline
|
||||
|
||||
title ESP-IDF Major Releases
|
||||
section 2025 Q1 <br> Major Release Planning
|
||||
No version planned : N/A
|
||||
section 2025 Q2 <br> Major Release Planning
|
||||
No version planned : N/A
|
||||
section 2025 Q3 <br> Major Release Planning
|
||||
No version planned : N/A
|
||||
section 2025 Q4 <br> Major Release Planning
|
||||
v6.0-beta1 : 2025/11/13
|
||||
v6.0-beta2 : 2025/12/05
|
||||
v6.0-RC1 : 2026/01/14
|
||||
v6.0-RC2 : 2026/02/06
|
||||
v6.0 : 2026/02/13
|
||||
```
|
||||
|
||||
### ESP-IDF 次要版本发布
|
||||
|
||||
```mermaid
|
||||
timeline
|
||||
|
||||
title ESP-IDF Minor Releases
|
||||
section 2025 Q1 <br> Minor Release Planning
|
||||
No version planned : N/A
|
||||
section 2025 Q2 <br> Minor Release Planning
|
||||
v5.5-beta1 : 2025/05/14
|
||||
v5.5-beta2 : 2025/06/04
|
||||
section 2025 Q3 <br> Minor Release Planning
|
||||
v5.5-RC1 : 2025/07/07
|
||||
v5.5-RC2 : 2025/07/28
|
||||
v5.5 : 2025/08/04
|
||||
section 2025 Q4 <br> Minor Release Planning
|
||||
No version planned : N/A
|
||||
```
|
||||
|
||||
### ESP-IDF Bugfix 版本发布
|
||||
|
||||
```mermaid
|
||||
timeline
|
||||
|
||||
title ESP-IDF Bugfix Releases
|
||||
section 2025 Q1 <br> Bugfix Release Planning
|
||||
v5.0.8 : 2025/01/14
|
||||
v5.1.6 : 2025/02/18
|
||||
v5.2.4 : 2025/02/23
|
||||
v5.2.5 : 2025/02/28
|
||||
v5.4.1 : 2025/03/27
|
||||
v5.3.3 : 2025/04/04
|
||||
section 2025 Q2 <br> Bugfix Release Planning
|
||||
v5.0.9 : 2025/05/16
|
||||
v5.4.2 : 2025/06/30
|
||||
section 2025 Q3 <br> Bugfix Release Planning
|
||||
v5.3.4 : 2025/08/03
|
||||
v5.2.6 : 2025/09/04
|
||||
v5.5.1 : 2025/09/11
|
||||
v5.4.3 : 2025/10/08
|
||||
section 2025 Q4 <br> Bugfix Release Planning
|
||||
v5.5.2 : 2025/11/12
|
||||
v5.1.7 : 2026/01/06
|
||||
```
|
||||
@@ -10,7 +10,7 @@ The core components are organized into two groups.
|
||||
|
||||
The first group (referred to as `G0`) includes `hal`, `arch` (where `arch` is either `riscv` or `xtensa` depending on the chip), `esp_rom`, `esp_common`, and `soc`. This group contains information about and provides low-level access to the underlying hardware. In the case of `esp_common`, it contains hardware-agnostic code and utilities. These components may have dependencies on each other within the group, but outside dependencies should be minimized. The reason for this approach is that these components are fundamental, and many other components may require them. Ideally, the dependency relationship only goes one way, making it easier for this group to be usable in other projects.
|
||||
|
||||
The second group (referred to as `G1`) operates at a higher level than the first group. `G1` includes the components `esp_hw_support`, `esp_system`, `esp_libc`, `spi_flash`, `freertos`, `log`, and `heap`. Like the first group, circular dependencies within this group are allowed, and these components can have dependencies on the first group. G1 components represent essential software mechanisms for building other components.
|
||||
The second group (referred to as `G1`) operates at a higher level than the first group. `G1` includes the components `esp_hw_support`, `esp_system`, `newlib`, `spi_flash`, `freertos`, `log`, and `heap`. Like the first group, circular dependencies within this group are allowed, and these components can have dependencies on the first group. G1 components represent essential software mechanisms for building other components.
|
||||
|
||||
## Descriptions
|
||||
|
||||
@@ -40,7 +40,7 @@ Example:
|
||||
|
||||
#### `esp_common`
|
||||
|
||||
Contains hardware-agnostic definitions, constants, macros, utilities, 'pure' and/or algorithmic functions that is usable by all other components (that is, barring there being a more appropriate component to put them in).
|
||||
Contains hardware-agnostic definitions, constants, macros, utilities, 'pure' and/or algorithmic functions that is useable by all other components (that is, barring there being a more appropriate component to put them in).
|
||||
|
||||
Example:
|
||||
|
||||
@@ -85,7 +85,7 @@ Logging library.
|
||||
|
||||
Heap implementation.
|
||||
|
||||
#### `esp_libc`
|
||||
#### `newlib`
|
||||
|
||||
Some functions n the standard library are implemented here, especially those needing other `G1` components.
|
||||
|
||||
|
||||
@@ -4,38 +4,129 @@ if(${target} STREQUAL "linux")
|
||||
return() # This component is not supported by the POSIX/Linux simulator
|
||||
endif()
|
||||
|
||||
if(CONFIG_ESP_TRACE_TRANSPORT_APPTRACE)
|
||||
set(srcs
|
||||
"app_trace.c"
|
||||
"app_trace_util.c"
|
||||
"host_file_io.c"
|
||||
)
|
||||
set(srcs
|
||||
"app_trace.c"
|
||||
"app_trace_util.c"
|
||||
"host_file_io.c")
|
||||
|
||||
if(NOT CONFIG_APPTRACE_DEST_UART) # JTAG or None
|
||||
if(CONFIG_IDF_TARGET_ARCH_XTENSA)
|
||||
list(APPEND srcs "port/xtensa/port_jtag.c")
|
||||
elseif(CONFIG_IDF_TARGET_ARCH_RISCV)
|
||||
list(APPEND srcs "port/riscv/port_jtag.c")
|
||||
endif()
|
||||
list(APPEND srcs "app_trace_membufs_proto.c")
|
||||
if(CONFIG_APPTRACE_GCOV_ENABLE)
|
||||
if("${CMAKE_C_COMPILER_ID}" STREQUAL "GNU")
|
||||
list(APPEND srcs
|
||||
"gcov/gcov_rtio.c")
|
||||
else()
|
||||
fail_at_build_time(app_trace "Only GNU compiler can link with Gcov library")
|
||||
endif()
|
||||
|
||||
if(NOT CONFIG_APPTRACE_DEST_JTAG) # UART or None
|
||||
list(APPEND srcs "port/port_uart.c")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(CONFIG_ESP_DEBUG_STUBS_ENABLE)
|
||||
list(APPEND srcs "debug_stubs.c")
|
||||
endif()
|
||||
|
||||
set(include_dirs "include")
|
||||
|
||||
set(priv_include_dirs "private_include" "port/include")
|
||||
|
||||
if(CONFIG_APPTRACE_MEMBUFS_APPTRACE_PROTO_ENABLE)
|
||||
list(APPEND srcs
|
||||
"app_trace_membufs_proto.c")
|
||||
|
||||
if(CONFIG_IDF_TARGET_ARCH_XTENSA)
|
||||
list(APPEND srcs
|
||||
"port/xtensa/port.c")
|
||||
endif()
|
||||
if(CONFIG_IDF_TARGET_ARCH_RISCV)
|
||||
list(APPEND srcs
|
||||
"port/riscv/port.c")
|
||||
endif()
|
||||
endif()
|
||||
list(APPEND srcs
|
||||
"port/port_uart.c")
|
||||
|
||||
if(CONFIG_APPTRACE_SV_ENABLE)
|
||||
list(APPEND include_dirs
|
||||
sys_view/Config
|
||||
sys_view/SEGGER
|
||||
sys_view/Sample/FreeRTOSV10.4)
|
||||
|
||||
list(APPEND srcs
|
||||
"sys_view/SEGGER/SEGGER_SYSVIEW.c"
|
||||
"sys_view/Sample/FreeRTOSV10.4/Config/esp/SEGGER_SYSVIEW_Config_FreeRTOS.c"
|
||||
"sys_view/Sample/FreeRTOSV10.4/SEGGER_SYSVIEW_FreeRTOS.c"
|
||||
"sys_view/esp/SEGGER_RTT_esp.c"
|
||||
"sys_view/ext/heap_trace_module.c"
|
||||
"sys_view/ext/logging.c")
|
||||
endif()
|
||||
|
||||
if(CONFIG_HEAP_TRACING_TOHOST)
|
||||
list(APPEND srcs "heap_trace_tohost.c")
|
||||
set_source_files_properties(heap_trace_tohost.c
|
||||
PROPERTIES COMPILE_FLAGS
|
||||
-Wno-frame-address)
|
||||
endif()
|
||||
|
||||
idf_component_register(SRCS "${srcs}"
|
||||
INCLUDE_DIRS "${include_dirs}"
|
||||
PRIV_INCLUDE_DIRS "${priv_include_dirs}"
|
||||
PRIV_REQUIRES esp_driver_gptimer
|
||||
REQUIRES esp_timer esp_driver_uart
|
||||
# Requires "driver" for GPTimer in "SEGGER_SYSVIEW_Config_FreeRTOS.c"
|
||||
PRIV_REQUIRES soc driver
|
||||
REQUIRES esp_timer
|
||||
LDFRAGMENTS linker.lf)
|
||||
|
||||
# Force app_trace to also appear later than gcov in link line
|
||||
idf_component_get_property(app_trace app_trace COMPONENT_LIB)
|
||||
|
||||
if(CONFIG_APPTRACE_GCOV_ENABLE)
|
||||
if(CMAKE_C_COMPILER_ID MATCHES "Clang")
|
||||
# Coverage info is not supported when clang is used
|
||||
# TODO: LLVM-214
|
||||
message(FATAL_ERROR "Coverage info is not supported when building with Clang!")
|
||||
endif()
|
||||
|
||||
# The original Gcov library from toolchain will be objcopy with symbols redefinitions (see file gcov/io_sym.map).
|
||||
# This needs because ESP has no file-system onboard, and redefined functions solves this problem and transmits
|
||||
# output file to host PC.
|
||||
|
||||
# Set a name for Gcov library
|
||||
set(GCOV_LIB libgcov_rtio)
|
||||
|
||||
# Set include direcrory of Gcov internal headers
|
||||
execute_process(COMMAND ${CMAKE_C_COMPILER} -print-file-name=plugin
|
||||
OUTPUT_VARIABLE gcc_plugin_dir
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
ERROR_QUIET)
|
||||
set_source_files_properties(gcov/gcov_rtio.c
|
||||
PROPERTIES COMPILE_FLAGS "-I${gcc_plugin_dir}/include")
|
||||
|
||||
# Copy libgcov.a with symbols redefinition
|
||||
find_library(GCOV_LIBRARY_PATH gcov ${CMAKE_C_IMPLICIT_LINK_DIRECTORIES})
|
||||
add_custom_command(OUTPUT ${GCOV_LIB}.a
|
||||
COMMAND ${_CMAKE_TOOLCHAIN_PREFIX}objcopy
|
||||
--redefine-syms ${CMAKE_CURRENT_LIST_DIR}/gcov/io_sym.map
|
||||
${GCOV_LIBRARY_PATH} ${GCOV_LIB}.a
|
||||
MAIN_DEPENDENCY ${GCOV_LIBRARY_PATH}
|
||||
VERBATIM)
|
||||
add_custom_target(${GCOV_LIB}_target DEPENDS ${GCOV_LIB}.a)
|
||||
add_library(${GCOV_LIB} STATIC IMPORTED)
|
||||
set_target_properties(${GCOV_LIB}
|
||||
PROPERTIES
|
||||
IMPORTED_LOCATION ${CMAKE_CURRENT_BINARY_DIR}/${GCOV_LIB}.a)
|
||||
add_dependencies(${GCOV_LIB} ${GCOV_LIB}_target)
|
||||
add_dependencies(${COMPONENT_LIB} ${GCOV_LIB})
|
||||
|
||||
# disable --coverage for this component, as it is used as transport for gcov
|
||||
target_compile_options(${COMPONENT_LIB} PRIVATE "-fno-profile-arcs" "-fno-test-coverage")
|
||||
target_link_options(${COMPONENT_LIB} INTERFACE "-Wl,--wrap=__gcov_init")
|
||||
|
||||
target_link_libraries(${COMPONENT_LIB} INTERFACE ${GCOV_LIB} $<TARGET_FILE:${app_trace}> c)
|
||||
else()
|
||||
target_link_libraries(${COMPONENT_LIB} INTERFACE $<TARGET_FILE:${app_trace}> c)
|
||||
endif()
|
||||
|
||||
# This function adds a dependency on the given component if the component is included into the build.
|
||||
function(maybe_add_component component_name)
|
||||
idf_build_get_property(components BUILD_COMPONENTS)
|
||||
if(${component_name} IN_LIST components)
|
||||
idf_component_get_property(lib_name ${component_name} COMPONENT_LIB)
|
||||
target_link_libraries(${COMPONENT_LIB} PUBLIC ${lib_name})
|
||||
endif()
|
||||
endfunction()
|
||||
|
||||
if(CONFIG_APPTRACE_DEST_UART0 OR CONFIG_APPTRACE_DEST_UART1 OR CONFIG_APPTRACE_DEST_UART2)
|
||||
maybe_add_component(driver)
|
||||
endif()
|
||||
|
||||
399
components/app_trace/Kconfig
Normal file
399
components/app_trace/Kconfig
Normal file
@@ -0,0 +1,399 @@
|
||||
menu "Application Level Tracing"
|
||||
|
||||
choice APPTRACE_DESTINATION1
|
||||
prompt "Data Destination 1"
|
||||
default APPTRACE_DEST_NONE
|
||||
help
|
||||
Select destination for application trace: JTAG or none (to disable).
|
||||
|
||||
config APPTRACE_DEST_JTAG
|
||||
bool "JTAG"
|
||||
select APPTRACE_DEST_TRAX if IDF_TARGET_ARCH_XTENSA
|
||||
select APPTRACE_MEMBUFS_APPTRACE_PROTO_ENABLE
|
||||
select APPTRACE_ENABLE
|
||||
|
||||
config APPTRACE_DEST_NONE
|
||||
bool "None"
|
||||
endchoice
|
||||
|
||||
config APPTRACE_DEST_UART
|
||||
bool
|
||||
|
||||
config APPTRACE_DEST_UART_NOUSB
|
||||
bool
|
||||
|
||||
choice APPTRACE_DESTINATION2
|
||||
prompt "Data Destination 2"
|
||||
default APPTRACE_DEST_UART_NONE
|
||||
help
|
||||
Select destination for application trace: UART(XX) or none (to disable).
|
||||
|
||||
config APPTRACE_DEST_UART0
|
||||
bool "UART0"
|
||||
select APPTRACE_ENABLE
|
||||
select APPTRACE_DEST_UART
|
||||
select APPTRACE_DEST_UART_NOUSB
|
||||
depends on (ESP_CONSOLE_UART_NUM !=0)
|
||||
|
||||
config APPTRACE_DEST_UART1
|
||||
bool "UART1"
|
||||
select APPTRACE_ENABLE
|
||||
select APPTRACE_DEST_UART
|
||||
select APPTRACE_DEST_UART_NOUSB
|
||||
depends on (ESP_CONSOLE_UART_NUM !=1)
|
||||
|
||||
config APPTRACE_DEST_UART2
|
||||
bool "UART2"
|
||||
select APPTRACE_ENABLE
|
||||
select APPTRACE_DEST_UART
|
||||
select APPTRACE_DEST_UART_NOUSB
|
||||
depends on (ESP_CONSOLE_UART_NUM !=2) && (SOC_UART_NUM > 2)
|
||||
|
||||
config APPTRACE_DEST_USB_CDC
|
||||
bool "USB_CDC"
|
||||
select APPTRACE_ENABLE
|
||||
select APPTRACE_DEST_UART
|
||||
depends on !ESP_CONSOLE_USB_CDC && (IDF_TARGET_ESP32C3 || IDF_TARGET_ESP32S3) && !USB_ENABLED
|
||||
|
||||
config APPTRACE_DEST_UART_NONE
|
||||
bool "None"
|
||||
endchoice
|
||||
|
||||
config APPTRACE_UART_TX_GPIO
|
||||
int "UART TX on GPIO#"
|
||||
depends on APPTRACE_DEST_UART_NOUSB
|
||||
range 0 46
|
||||
default 12 if IDF_TARGET_ESP32
|
||||
default 12 if IDF_TARGET_ESP32C3
|
||||
default 12
|
||||
help
|
||||
This GPIO is used for UART TX pin.
|
||||
|
||||
config APPTRACE_UART_RX_GPIO
|
||||
int "UART RX on GPIO#"
|
||||
depends on APPTRACE_DEST_UART_NOUSB
|
||||
range 0 46
|
||||
default 13 if IDF_TARGET_ESP32
|
||||
default 13 if IDF_TARGET_ESP32C3
|
||||
default 13
|
||||
help
|
||||
This GPIO is used for UART RX pin.
|
||||
|
||||
config APPTRACE_UART_BAUDRATE
|
||||
int
|
||||
prompt "UART baud rate" if APPTRACE_DEST_UART
|
||||
depends on APPTRACE_DEST_UART
|
||||
default 1000000
|
||||
range 1200 8000000
|
||||
range 1200 1000000
|
||||
help
|
||||
This baud rate is used for UART.
|
||||
|
||||
The app's maximum baud rate depends on the UART clock source. If Power Management is disabled,
|
||||
the UART clock source is the APB clock and all baud rates in the available range will be sufficiently
|
||||
accurate. If Power Management is enabled, REF_TICK clock source is used so the baud rate is divided
|
||||
from 1MHz. Baud rates above 1Mbps are not possible and values between 500Kbps and 1Mbps may not be
|
||||
accurate.
|
||||
|
||||
config APPTRACE_UART_RX_BUFF_SIZE
|
||||
int
|
||||
prompt "UART RX ring buffer size" if APPTRACE_DEST_UART
|
||||
depends on APPTRACE_DEST_UART
|
||||
default 128
|
||||
range 64 32768
|
||||
help
|
||||
Size of the UART input ring buffer.
|
||||
This size related to the baudrate, system tick frequency and amount of data to transfer.
|
||||
The data placed to this buffer before sent out to the interface.
|
||||
|
||||
config APPTRACE_UART_TX_BUFF_SIZE
|
||||
int
|
||||
prompt "UART TX ring buffer size" if APPTRACE_DEST_UART
|
||||
depends on APPTRACE_DEST_UART
|
||||
default 4096
|
||||
range 2048 32768
|
||||
help
|
||||
Size of the UART output ring buffer.
|
||||
This size related to the baudrate, system tick frequency and amount of data to transfer.
|
||||
|
||||
config APPTRACE_UART_TX_MSG_SIZE
|
||||
int
|
||||
prompt "UART TX message size" if APPTRACE_DEST_UART
|
||||
depends on APPTRACE_DEST_UART
|
||||
default 128
|
||||
range 64 32768
|
||||
help
|
||||
Maximum size of the single message to transfer.
|
||||
|
||||
config APPTRACE_UART_TASK_PRIO
|
||||
int
|
||||
prompt "UART Task Priority" if APPTRACE_DEST_UART
|
||||
default 1
|
||||
range 1 32
|
||||
help
|
||||
UART task priority. In case of high events rate,
|
||||
this parameter could be changed up to (configMAX_PRIORITIES-1).
|
||||
|
||||
config APPTRACE_DEST_TRAX
|
||||
bool
|
||||
depends on IDF_TARGET_ARCH_XTENSA && !ESP32_TRAX && !ESP32S2_TRAX && !ESP32S3_TRAX
|
||||
select ESP32_MEMMAP_TRACEMEM
|
||||
select ESP32S2_MEMMAP_TRACEMEM
|
||||
select ESP32S3_MEMMAP_TRACEMEM
|
||||
select ESP32_MEMMAP_TRACEMEM_TWOBANKS
|
||||
select ESP32S2_MEMMAP_TRACEMEM_TWOBANKS
|
||||
select ESP32S3_MEMMAP_TRACEMEM_TWOBANKS
|
||||
default n
|
||||
help
|
||||
Enables/disable TRAX tracing HW.
|
||||
|
||||
config APPTRACE_MEMBUFS_APPTRACE_PROTO_ENABLE
|
||||
bool
|
||||
default n
|
||||
help
|
||||
Enables/disable swapping memory buffers tracing protocol.
|
||||
|
||||
config APPTRACE_ENABLE
|
||||
bool
|
||||
default n
|
||||
help
|
||||
Enables/disable application tracing module.
|
||||
|
||||
config APPTRACE_LOCK_ENABLE
|
||||
bool
|
||||
default !APPTRACE_SV_ENABLE
|
||||
help
|
||||
Enables/disable application tracing module internal sync lock.
|
||||
|
||||
config APPTRACE_ONPANIC_HOST_FLUSH_TMO
|
||||
int "Timeout for flushing last trace data to host on panic"
|
||||
depends on APPTRACE_ENABLE
|
||||
range -1 5000
|
||||
default -1
|
||||
help
|
||||
Timeout for flushing last trace data to host in case of panic. In ms.
|
||||
Use -1 to disable timeout and wait forever.
|
||||
|
||||
config APPTRACE_POSTMORTEM_FLUSH_THRESH
|
||||
int "Threshold for flushing last trace data to host on panic"
|
||||
depends on APPTRACE_ENABLE
|
||||
range 0 16384
|
||||
default 0
|
||||
help
|
||||
Threshold for flushing last trace data to host on panic in post-mortem mode.
|
||||
This is minimal amount of data needed to perform flush. In bytes.
|
||||
|
||||
config APPTRACE_BUF_SIZE
|
||||
int "Size of the apptrace buffer"
|
||||
depends on APPTRACE_MEMBUFS_APPTRACE_PROTO_ENABLE && !APPTRACE_DEST_TRAX
|
||||
default 16384
|
||||
help
|
||||
Size of the memory buffer for trace data in bytes.
|
||||
|
||||
config APPTRACE_PENDING_DATA_SIZE_MAX
|
||||
int "Size of the pending data buffer"
|
||||
depends on APPTRACE_MEMBUFS_APPTRACE_PROTO_ENABLE
|
||||
default 0
|
||||
help
|
||||
Size of the buffer for events in bytes. It is useful for buffering events from
|
||||
the time critical code (scheduler, ISRs etc). If this parameter is 0 then
|
||||
events will be discarded when main HW buffer is full.
|
||||
|
||||
menu "FreeRTOS SystemView Tracing"
|
||||
depends on APPTRACE_ENABLE
|
||||
config APPTRACE_SV_ENABLE
|
||||
bool "SystemView Tracing Enable"
|
||||
depends on APPTRACE_ENABLE
|
||||
default n
|
||||
help
|
||||
Enables supporrt for SEGGER SystemView tracing functionality.
|
||||
|
||||
choice APPTRACE_SV_DEST
|
||||
prompt "SystemView destination"
|
||||
depends on APPTRACE_SV_ENABLE
|
||||
default APPTRACE_SV_DEST_JTAG
|
||||
help
|
||||
SystemView witt transfer data trough defined interface.
|
||||
|
||||
config APPTRACE_SV_DEST_JTAG
|
||||
bool "Data destination JTAG"
|
||||
depends on !PM_ENABLE && !APPTRACE_DEST_NONE
|
||||
help
|
||||
Send SEGGER SystemView events through JTAG interface.
|
||||
|
||||
config APPTRACE_SV_DEST_UART
|
||||
bool "Data destination UART"
|
||||
depends on APPTRACE_DEST_UART
|
||||
help
|
||||
Send SEGGER SystemView events through UART interface.
|
||||
|
||||
endchoice
|
||||
|
||||
choice APPTRACE_SV_CPU
|
||||
prompt "CPU to trace"
|
||||
depends on APPTRACE_SV_DEST_UART && !FREERTOS_UNICORE
|
||||
default APPTRACE_SV_DEST_CPU_0
|
||||
help
|
||||
Define the CPU to trace by SystemView.
|
||||
|
||||
config APPTRACE_SV_DEST_CPU_0
|
||||
bool "CPU0"
|
||||
help
|
||||
Send SEGGER SystemView events for Pro CPU.
|
||||
|
||||
config APPTRACE_SV_DEST_CPU_1
|
||||
bool "CPU1"
|
||||
help
|
||||
Send SEGGER SystemView events for App CPU.
|
||||
|
||||
endchoice
|
||||
|
||||
|
||||
choice APPTRACE_SV_TS_SOURCE
|
||||
prompt "Timer to use as timestamp source"
|
||||
depends on APPTRACE_SV_ENABLE
|
||||
default APPTRACE_SV_TS_SOURCE_CCOUNT if FREERTOS_UNICORE && !PM_ENABLE && !IDF_TARGET_ESP32C3
|
||||
default APPTRACE_SV_TS_SOURCE_GPTIMER if !FREERTOS_UNICORE && !PM_ENABLE && !IDF_TARGET_ESP32C3
|
||||
default APPTRACE_SV_TS_SOURCE_ESP_TIMER if PM_ENABLE || IDF_TARGET_ESP32C3
|
||||
help
|
||||
SystemView needs to use a hardware timer as the source of timestamps
|
||||
when tracing. This option selects the timer for it.
|
||||
|
||||
config APPTRACE_SV_TS_SOURCE_CCOUNT
|
||||
bool "CPU cycle counter (CCOUNT)"
|
||||
depends on FREERTOS_UNICORE && !PM_ENABLE && !IDF_TARGET_ESP32C3
|
||||
|
||||
config APPTRACE_SV_TS_SOURCE_GPTIMER
|
||||
bool "General Purpose Timer (Timer Group)"
|
||||
depends on !PM_ENABLE && !IDF_TARGET_ESP32C3
|
||||
|
||||
config APPTRACE_SV_TS_SOURCE_ESP_TIMER
|
||||
bool "esp_timer high resolution timer"
|
||||
|
||||
endchoice
|
||||
|
||||
config APPTRACE_SV_MAX_TASKS
|
||||
int "Maximum supported tasks"
|
||||
depends on APPTRACE_SV_ENABLE
|
||||
range 1 64
|
||||
default 16
|
||||
help
|
||||
Configures maximum supported tasks in sysview debug
|
||||
|
||||
config APPTRACE_SV_BUF_WAIT_TMO
|
||||
int "Trace buffer wait timeout"
|
||||
depends on APPTRACE_SV_ENABLE
|
||||
default 500
|
||||
help
|
||||
Configures timeout (in us) to wait for free space in trace buffer.
|
||||
Set to -1 to wait forever and avoid lost events.
|
||||
|
||||
config APPTRACE_SV_EVT_OVERFLOW_ENABLE
|
||||
bool "Trace Buffer Overflow Event"
|
||||
depends on APPTRACE_SV_ENABLE
|
||||
default y
|
||||
help
|
||||
Enables "Trace Buffer Overflow" event.
|
||||
|
||||
config APPTRACE_SV_EVT_ISR_ENTER_ENABLE
|
||||
bool "ISR Enter Event"
|
||||
depends on APPTRACE_SV_ENABLE
|
||||
default y
|
||||
help
|
||||
Enables "ISR Enter" event.
|
||||
|
||||
config APPTRACE_SV_EVT_ISR_EXIT_ENABLE
|
||||
bool "ISR Exit Event"
|
||||
depends on APPTRACE_SV_ENABLE
|
||||
default y
|
||||
help
|
||||
Enables "ISR Exit" event.
|
||||
|
||||
config APPTRACE_SV_EVT_ISR_TO_SCHED_ENABLE
|
||||
bool "ISR Exit to Scheduler Event"
|
||||
depends on APPTRACE_SV_ENABLE
|
||||
default y
|
||||
help
|
||||
Enables "ISR to Scheduler" event.
|
||||
|
||||
config APPTRACE_SV_EVT_TASK_START_EXEC_ENABLE
|
||||
bool "Task Start Execution Event"
|
||||
depends on APPTRACE_SV_ENABLE
|
||||
default y
|
||||
help
|
||||
Enables "Task Start Execution" event.
|
||||
|
||||
config APPTRACE_SV_EVT_TASK_STOP_EXEC_ENABLE
|
||||
bool "Task Stop Execution Event"
|
||||
depends on APPTRACE_SV_ENABLE
|
||||
default y
|
||||
help
|
||||
Enables "Task Stop Execution" event.
|
||||
|
||||
config APPTRACE_SV_EVT_TASK_START_READY_ENABLE
|
||||
bool "Task Start Ready State Event"
|
||||
depends on APPTRACE_SV_ENABLE
|
||||
default y
|
||||
help
|
||||
Enables "Task Start Ready State" event.
|
||||
|
||||
config APPTRACE_SV_EVT_TASK_STOP_READY_ENABLE
|
||||
bool "Task Stop Ready State Event"
|
||||
depends on APPTRACE_SV_ENABLE
|
||||
default y
|
||||
help
|
||||
Enables "Task Stop Ready State" event.
|
||||
|
||||
config APPTRACE_SV_EVT_TASK_CREATE_ENABLE
|
||||
bool "Task Create Event"
|
||||
depends on APPTRACE_SV_ENABLE
|
||||
default y
|
||||
help
|
||||
Enables "Task Create" event.
|
||||
|
||||
config APPTRACE_SV_EVT_TASK_TERMINATE_ENABLE
|
||||
bool "Task Terminate Event"
|
||||
depends on APPTRACE_SV_ENABLE
|
||||
default y
|
||||
help
|
||||
Enables "Task Terminate" event.
|
||||
|
||||
config APPTRACE_SV_EVT_IDLE_ENABLE
|
||||
bool "System Idle Event"
|
||||
depends on APPTRACE_SV_ENABLE
|
||||
default y
|
||||
help
|
||||
Enables "System Idle" event.
|
||||
|
||||
config APPTRACE_SV_EVT_TIMER_ENTER_ENABLE
|
||||
bool "Timer Enter Event"
|
||||
depends on APPTRACE_SV_ENABLE
|
||||
default y
|
||||
help
|
||||
Enables "Timer Enter" event.
|
||||
|
||||
config APPTRACE_SV_EVT_TIMER_EXIT_ENABLE
|
||||
bool "Timer Exit Event"
|
||||
depends on APPTRACE_SV_ENABLE
|
||||
default y
|
||||
help
|
||||
Enables "Timer Exit" event.
|
||||
|
||||
endmenu
|
||||
|
||||
config APPTRACE_GCOV_ENABLE
|
||||
bool "GCOV to Host Enable"
|
||||
depends on APPTRACE_ENABLE && !APPTRACE_SV_ENABLE
|
||||
select ESP_DEBUG_STUBS_ENABLE
|
||||
default n
|
||||
help
|
||||
Enables support for GCOV data transfer to host.
|
||||
|
||||
config APPTRACE_GCOV_DUMP_TASK_STACK_SIZE
|
||||
int "Gcov dump task stack size"
|
||||
depends on APPTRACE_GCOV_ENABLE
|
||||
default 2048
|
||||
help
|
||||
Configures stack size of Gcov dump task
|
||||
|
||||
endmenu
|
||||
@@ -1,134 +0,0 @@
|
||||
menu "Application Level Tracing"
|
||||
depends on ESP_TRACE_TRANSPORT_APPTRACE
|
||||
choice APPTRACE_DESTINATION
|
||||
prompt "Data Destination"
|
||||
default APPTRACE_DEST_JTAG if !PM_ENABLE
|
||||
default APPTRACE_DEST_UART if PM_ENABLE
|
||||
help
|
||||
Select destination for application trace: JTAG, UART, or both.
|
||||
|
||||
config APPTRACE_DEST_JTAG
|
||||
bool "JTAG"
|
||||
select APPTRACE_TRAX_ENABLE if IDF_TARGET_ARCH_XTENSA
|
||||
depends on !PM_ENABLE
|
||||
|
||||
config APPTRACE_DEST_UART
|
||||
bool "UART"
|
||||
|
||||
config APPTRACE_DEST_ALL
|
||||
bool "All (runtime selection)"
|
||||
help
|
||||
Compile both JTAG and UART interfaces in advance (higher IRAM usage).
|
||||
Allows runtime switching between JTAG and UART via esp_apptrace_get_user_params().
|
||||
|
||||
If esp_apptrace_get_user_params() is not provided by the
|
||||
application, JTAG is used by default with the default
|
||||
configuration defined in components/app_trace/include/esp_app_trace_config.h.
|
||||
endchoice
|
||||
|
||||
config APPTRACE_BUF_SIZE
|
||||
int "Size of the apptrace buffer"
|
||||
depends on APPTRACE_DEST_JTAG && !APPTRACE_TRAX_ENABLE
|
||||
default 16384
|
||||
help
|
||||
Size of the memory buffer for trace data in bytes.
|
||||
|
||||
config APPTRACE_DEST_UART_NUM
|
||||
int "UART port number"
|
||||
depends on APPTRACE_DEST_UART
|
||||
range 0 1 if (SOC_UART_HP_NUM <= 2)
|
||||
range 0 2 if (SOC_UART_HP_NUM <= 3)
|
||||
range 0 4 if (SOC_UART_HP_NUM <= 5)
|
||||
default 1
|
||||
help
|
||||
UART communication port number for the apptrace destination.
|
||||
See UART documentation for available port numbers.
|
||||
|
||||
config APPTRACE_UART_TX_GPIO
|
||||
int "UART TX on GPIO<num>"
|
||||
depends on APPTRACE_DEST_UART
|
||||
range 0 46
|
||||
default 12
|
||||
help
|
||||
This GPIO is used for UART TX pin.
|
||||
|
||||
config APPTRACE_UART_RX_GPIO
|
||||
int "UART RX on GPIO<num>"
|
||||
depends on APPTRACE_DEST_UART
|
||||
range 0 46
|
||||
default 13
|
||||
help
|
||||
This GPIO is used for UART RX pin.
|
||||
|
||||
config APPTRACE_UART_BAUDRATE
|
||||
int
|
||||
prompt "UART baud rate" if APPTRACE_DEST_UART
|
||||
depends on APPTRACE_DEST_UART
|
||||
default 1000000
|
||||
range 1200 8000000
|
||||
range 1200 1000000
|
||||
help
|
||||
This baud rate is used for UART.
|
||||
|
||||
The app's maximum baud rate depends on the UART clock source. If Power Management is disabled,
|
||||
the UART clock source is the APB clock and all baud rates in the available range will be sufficiently
|
||||
accurate. If Power Management is enabled, REF_TICK clock source is used so the baud rate is divided
|
||||
from 1MHz. Baud rates above 1Mbps are not possible and values between 500Kbps and 1Mbps may not be
|
||||
accurate.
|
||||
|
||||
config APPTRACE_UART_TX_BUFF_SIZE
|
||||
int
|
||||
prompt "UART TX ring buffer size" if APPTRACE_DEST_UART
|
||||
depends on APPTRACE_DEST_UART
|
||||
default 4096
|
||||
range 2048 32768
|
||||
help
|
||||
Size of the UART output ring buffer. Must be power of two.
|
||||
This size related to the baudrate, system tick frequency and amount of data to transfer.
|
||||
|
||||
config APPTRACE_UART_TX_MSG_SIZE
|
||||
int
|
||||
prompt "UART TX message size" if APPTRACE_DEST_UART
|
||||
depends on APPTRACE_DEST_UART
|
||||
default 128
|
||||
range 64 32768
|
||||
help
|
||||
Maximum size of the single message to transfer.
|
||||
|
||||
config APPTRACE_TRAX_ENABLE
|
||||
bool
|
||||
depends on IDF_TARGET_ARCH_XTENSA && !ESP32_TRAX && !ESP32S2_TRAX && !ESP32S3_TRAX
|
||||
select ESP32_MEMMAP_TRACEMEM
|
||||
select ESP32S2_MEMMAP_TRACEMEM
|
||||
select ESP32S3_MEMMAP_TRACEMEM
|
||||
select ESP32_MEMMAP_TRACEMEM_TWOBANKS
|
||||
select ESP32S2_MEMMAP_TRACEMEM_TWOBANKS
|
||||
select ESP32S3_MEMMAP_TRACEMEM_TWOBANKS
|
||||
default n
|
||||
help
|
||||
Enables/disable TRAX tracing HW.
|
||||
|
||||
config APPTRACE_LOCK_ENABLE
|
||||
bool "Internal Sync Lock Enable"
|
||||
default n
|
||||
help
|
||||
Enables/disable application tracing module internal sync lock to prevent data corruption
|
||||
when multiple tasks are writing to the same trace buffer.
|
||||
Keep in mind this will slow down the trace data transfer to the host.
|
||||
|
||||
config APPTRACE_ONPANIC_HOST_FLUSH_TMO
|
||||
int "Timeout for flushing last trace data to host on panic"
|
||||
range -1 5000
|
||||
default -1
|
||||
help
|
||||
Timeout for flushing last trace data to host in case of panic. In ms.
|
||||
Use -1 to disable timeout and wait forever.
|
||||
|
||||
config APPTRACE_POSTMORTEM_FLUSH_THRESH
|
||||
int "Threshold for flushing last trace data to host on panic"
|
||||
range 0 16384
|
||||
default 0
|
||||
help
|
||||
Threshold for flushing last trace data to host on panic in post-mortem mode.
|
||||
This is minimal amount of data needed to perform flush. In bytes.
|
||||
endmenu
|
||||
@@ -1,230 +1,296 @@
|
||||
/*
|
||||
* SPDX-FileCopyrightText: 2017-2025 Espressif Systems (Shanghai) CO LTD
|
||||
* SPDX-FileCopyrightText: 2017-2021 Espressif Systems (Shanghai) CO LTD
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
#include <string.h>
|
||||
#include "esp_cpu.h"
|
||||
#include "esp_log.h"
|
||||
#include "esp_rom_sys.h"
|
||||
#include "esp_app_trace.h"
|
||||
#include "esp_app_trace_port.h"
|
||||
#include "esp_app_trace_types.h"
|
||||
#include "esp_private/startup_internal.h"
|
||||
|
||||
#if CONFIG_ESP_CONSOLE_UART && CONFIG_APPTRACE_DEST_UART && (CONFIG_APPTRACE_DEST_UART_NUM == CONFIG_ESP_CONSOLE_UART_NUM)
|
||||
#error "Application trace UART and console UART cannot use the same port number"
|
||||
#ifdef CONFIG_APPTRACE_DEST_UART0
|
||||
#define ESP_APPTRACE_DEST_UART_NUM 0
|
||||
#elif CONFIG_APPTRACE_DEST_UART1
|
||||
#define ESP_APPTRACE_DEST_UART_NUM 1
|
||||
#elif CONFIG_APPTRACE_DEST_UART2
|
||||
#define ESP_APPTRACE_DEST_UART_NUM 2
|
||||
#elif CONFIG_APPTRACE_DEST_USB_CDC
|
||||
#define ESP_APPTRACE_DEST_UART_NUM 10
|
||||
#else
|
||||
#define ESP_APPTRACE_DEST_UART_NUM 0
|
||||
#endif
|
||||
|
||||
#define ESP_APPTRACE_MAX_VPRINTF_ARGS 256
|
||||
#define ESP_APPTRACE_HOST_BUF_SIZE 256
|
||||
|
||||
#define ESP_APPTRACE_PRINT_LOCK 0
|
||||
|
||||
const static char *TAG = "esp_apptrace";
|
||||
|
||||
/** tracing module internal data */
|
||||
typedef struct {
|
||||
esp_apptrace_hw_t *hw;
|
||||
void *hw_data;
|
||||
esp_apptrace_dest_t dest;
|
||||
} esp_apptrace_channel_t;
|
||||
|
||||
static esp_apptrace_channel_t s_trace_ch;
|
||||
static volatile int s_trace_ch_hw_initialized = 0;
|
||||
static esp_apptrace_channel_t s_trace_channels[ESP_APPTRACE_DEST_MAX];
|
||||
static bool s_inited;
|
||||
|
||||
esp_err_t esp_apptrace_init(const esp_apptrace_config_t *config)
|
||||
esp_err_t esp_apptrace_init(void)
|
||||
{
|
||||
__attribute__((unused)) void *hw_data = NULL;
|
||||
int res;
|
||||
esp_apptrace_hw_t *hw = NULL;
|
||||
void *hw_data = NULL;
|
||||
|
||||
// 'esp_apptrace_init()' is called on every core, so ensure to do main initialization only once
|
||||
if (esp_cpu_get_core_id() == 0) {
|
||||
#if CONFIG_APPTRACE_DEST_JTAG
|
||||
s_trace_ch.hw = esp_apptrace_jtag_hw_get(&hw_data);
|
||||
s_trace_ch.hw_data = hw_data;
|
||||
#elif CONFIG_APPTRACE_DEST_UART
|
||||
const esp_apptrace_uart_config_t *uart_config = &config->dest_cfg.uart;
|
||||
s_trace_ch.hw = esp_apptrace_uart_hw_get(uart_config->uart_num, &hw_data);
|
||||
s_trace_ch.hw_data = hw_data;
|
||||
#else // CONFIG_APPTRACE_DEST_ALL allows runtime selection between destinations
|
||||
if (config->dest == ESP_APPTRACE_DEST_JTAG) {
|
||||
s_trace_ch.hw = esp_apptrace_jtag_hw_get(&hw_data);
|
||||
s_trace_ch.hw_data = hw_data;
|
||||
} else if (config->dest == ESP_APPTRACE_DEST_UART) {
|
||||
const esp_apptrace_uart_config_t *uart_config = &config->dest_cfg.uart;
|
||||
s_trace_ch.hw = esp_apptrace_uart_hw_get(uart_config->uart_num, &hw_data);
|
||||
s_trace_ch.hw_data = hw_data;
|
||||
} else {
|
||||
s_trace_ch.hw = NULL;
|
||||
s_trace_ch.hw_data = NULL;
|
||||
ESP_APPTRACE_LOGE("Invalid destination type (%d)!", config->dest);
|
||||
return ESP_ERR_INVALID_ARG;
|
||||
memset(&s_trace_channels, 0, sizeof(s_trace_channels));
|
||||
hw = esp_apptrace_jtag_hw_get(&hw_data);
|
||||
ESP_APPTRACE_LOGD("HW interface %p", hw);
|
||||
if (hw != NULL) {
|
||||
s_trace_channels[ESP_APPTRACE_DEST_JTAG].hw = hw;
|
||||
s_trace_channels[ESP_APPTRACE_DEST_JTAG].hw_data = hw_data;
|
||||
}
|
||||
hw = esp_apptrace_uart_hw_get(ESP_APPTRACE_DEST_UART_NUM, &hw_data);
|
||||
if (hw != NULL) {
|
||||
s_trace_channels[ESP_APPTRACE_DEST_UART].hw = hw;
|
||||
s_trace_channels[ESP_APPTRACE_DEST_UART].hw_data = hw_data;
|
||||
}
|
||||
s_inited = true;
|
||||
}
|
||||
|
||||
// esp_apptrace_init() is called on every core, so initialize trace channel on every core
|
||||
for (int i = 0; i < sizeof(s_trace_channels) / sizeof(s_trace_channels[0]); i++) {
|
||||
esp_apptrace_channel_t *ch = &s_trace_channels[i];
|
||||
if (ch->hw) {
|
||||
res = ch->hw->init(ch->hw_data);
|
||||
if (res != ESP_OK) {
|
||||
ESP_APPTRACE_LOGE("Failed to init trace channel HW interface (%d)!", res);
|
||||
return res;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return ESP_OK;
|
||||
}
|
||||
|
||||
ESP_SYSTEM_INIT_FN(esp_apptrace_init, ESP_SYSTEM_INIT_ALL_CORES, 115)
|
||||
{
|
||||
return esp_apptrace_init();
|
||||
}
|
||||
|
||||
void esp_apptrace_down_buffer_config(uint8_t *buf, uint32_t size)
|
||||
{
|
||||
esp_apptrace_channel_t *ch;
|
||||
|
||||
if (!s_inited) {
|
||||
return;
|
||||
}
|
||||
// currently down buffer is supported for JTAG interface only
|
||||
// TODO: one more argument should be added to this function to specify HW inteface: JTAG, UART0 etc
|
||||
ch = &s_trace_channels[ESP_APPTRACE_DEST_JTAG];
|
||||
if (ch->hw != NULL) {
|
||||
if (ch->hw->down_buffer_config != NULL) {
|
||||
ch->hw->down_buffer_config(ch->hw_data, buf, size);
|
||||
}
|
||||
#endif
|
||||
s_trace_ch.dest = config->dest;
|
||||
s_trace_ch_hw_initialized = 1;
|
||||
} else {
|
||||
// There is NO guarantee that system init functions will execute on core 0 first
|
||||
// So we need to wait for core 0 to set up the hardware interface
|
||||
while (!s_trace_ch_hw_initialized) {
|
||||
esp_rom_delay_us(10);
|
||||
}
|
||||
ESP_APPTRACE_LOGD("Trace destination for JTAG not supported!");
|
||||
}
|
||||
|
||||
if (s_trace_ch.hw) {
|
||||
int res = s_trace_ch.hw->init(s_trace_ch.hw_data, config);
|
||||
if (res != ESP_OK) {
|
||||
ESP_APPTRACE_LOGE("Failed to init trace channel HW interface (%d)!", res);
|
||||
return res;
|
||||
ch = &s_trace_channels[ESP_APPTRACE_DEST_UART];
|
||||
if (ch->hw != NULL) {
|
||||
if (ch->hw->down_buffer_config != NULL) {
|
||||
ch->hw->down_buffer_config(ch->hw_data, buf, size);
|
||||
}
|
||||
} else {
|
||||
ESP_APPTRACE_LOGD("Trace destination for UART not supported!");
|
||||
}
|
||||
|
||||
return ESP_OK;
|
||||
}
|
||||
|
||||
esp_err_t esp_apptrace_down_buffer_config(uint8_t *buf, uint32_t size)
|
||||
uint8_t *esp_apptrace_down_buffer_get(esp_apptrace_dest_t dest, uint32_t *size, uint32_t user_tmo)
|
||||
{
|
||||
if (!buf || size == 0) {
|
||||
return ESP_ERR_INVALID_ARG;
|
||||
}
|
||||
if (!s_trace_ch.hw) {
|
||||
return ESP_ERR_INVALID_STATE;
|
||||
}
|
||||
if (s_trace_ch.hw->down_buffer_config) {
|
||||
s_trace_ch.hw->down_buffer_config(s_trace_ch.hw_data, buf, size);
|
||||
}
|
||||
|
||||
return ESP_OK;
|
||||
}
|
||||
|
||||
uint8_t *esp_apptrace_down_buffer_get(uint32_t *size, uint32_t user_tmo)
|
||||
{
|
||||
ESP_APPTRACE_LOGV("%s(): enter", __func__);
|
||||
|
||||
if (!size || *size == 0) {
|
||||
return NULL;
|
||||
}
|
||||
if (!s_trace_ch.hw) {
|
||||
return NULL;
|
||||
}
|
||||
if (!s_trace_ch.hw->get_down_buffer) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
esp_apptrace_tmo_t tmo;
|
||||
esp_apptrace_tmo_init(&tmo, user_tmo);
|
||||
esp_apptrace_channel_t *ch;
|
||||
|
||||
return s_trace_ch.hw->get_down_buffer(s_trace_ch.hw_data, size, &tmo);
|
||||
ESP_APPTRACE_LOGV("%s(): enter", __func__);
|
||||
if (dest >= ESP_APPTRACE_DEST_MAX) {
|
||||
return NULL;
|
||||
}
|
||||
if (size == NULL || *size == 0) {
|
||||
return NULL;
|
||||
}
|
||||
if (!s_inited) {
|
||||
return NULL;
|
||||
}
|
||||
ch = &s_trace_channels[dest];
|
||||
if (ch->hw == NULL) {
|
||||
ESP_APPTRACE_LOGE("Trace destination %d not supported!", dest);
|
||||
return NULL;
|
||||
}
|
||||
if (ch->hw->get_down_buffer == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
esp_apptrace_tmo_init(&tmo, user_tmo);
|
||||
return ch->hw->get_down_buffer(ch->hw_data, size, &tmo);
|
||||
}
|
||||
|
||||
esp_err_t esp_apptrace_down_buffer_put(uint8_t *ptr, uint32_t user_tmo)
|
||||
esp_err_t esp_apptrace_down_buffer_put(esp_apptrace_dest_t dest, uint8_t *ptr, uint32_t user_tmo)
|
||||
{
|
||||
ESP_APPTRACE_LOGV("%s(): enter", __func__);
|
||||
esp_apptrace_tmo_t tmo;
|
||||
esp_apptrace_channel_t *ch;
|
||||
|
||||
if (!ptr) {
|
||||
ESP_APPTRACE_LOGV("%s(): enter", __func__);
|
||||
if (dest >= ESP_APPTRACE_DEST_MAX) {
|
||||
return ESP_ERR_INVALID_ARG;
|
||||
}
|
||||
if (!s_trace_ch.hw) {
|
||||
if (ptr == NULL) {
|
||||
return ESP_ERR_INVALID_ARG;
|
||||
}
|
||||
if (!s_inited) {
|
||||
return ESP_ERR_INVALID_STATE;
|
||||
}
|
||||
if (!s_trace_ch.hw->get_down_buffer) {
|
||||
ch = &s_trace_channels[dest];
|
||||
if (ch->hw == NULL) {
|
||||
ESP_APPTRACE_LOGE("Trace destination %d not supported!", dest);
|
||||
return ESP_ERR_NOT_SUPPORTED;
|
||||
}
|
||||
if (ch->hw->get_down_buffer == NULL) {
|
||||
return ESP_ERR_NOT_SUPPORTED;
|
||||
}
|
||||
|
||||
esp_apptrace_tmo_t tmo;
|
||||
esp_apptrace_tmo_init(&tmo, user_tmo);
|
||||
|
||||
return s_trace_ch.hw->put_down_buffer(s_trace_ch.hw_data, ptr, &tmo);
|
||||
return ch->hw->put_down_buffer(ch->hw_data, ptr, &tmo);
|
||||
}
|
||||
|
||||
esp_err_t esp_apptrace_read(void *buf, uint32_t *size, uint32_t user_tmo)
|
||||
esp_err_t esp_apptrace_read(esp_apptrace_dest_t dest, void *buf, uint32_t *size, uint32_t user_tmo)
|
||||
{
|
||||
ESP_APPTRACE_LOGV("%s(): enter", __func__);
|
||||
int res = ESP_OK;
|
||||
esp_apptrace_tmo_t tmo;
|
||||
esp_apptrace_channel_t *ch;
|
||||
|
||||
if (!buf || !size || *size == 0) {
|
||||
ESP_APPTRACE_LOGV("%s(): enter", __func__);
|
||||
if (dest >= ESP_APPTRACE_DEST_MAX) {
|
||||
return ESP_ERR_INVALID_ARG;
|
||||
}
|
||||
if (!s_trace_ch.hw) {
|
||||
if (buf == NULL || size == NULL || *size == 0) {
|
||||
return ESP_ERR_INVALID_ARG;
|
||||
}
|
||||
if (!s_inited) {
|
||||
return ESP_ERR_INVALID_STATE;
|
||||
}
|
||||
if (!s_trace_ch.hw->get_down_buffer || !s_trace_ch.hw->put_down_buffer) {
|
||||
ch = &s_trace_channels[dest];
|
||||
if (ch->hw == NULL) {
|
||||
ESP_APPTRACE_LOGE("Trace destination %d not supported!", dest);
|
||||
return ESP_ERR_NOT_SUPPORTED;
|
||||
}
|
||||
if (ch->hw->get_down_buffer == NULL || ch->hw->put_down_buffer == NULL) {
|
||||
return ESP_ERR_NOT_SUPPORTED;
|
||||
}
|
||||
|
||||
//TODO: callback system
|
||||
esp_apptrace_tmo_t tmo;
|
||||
esp_apptrace_tmo_init(&tmo, user_tmo);
|
||||
|
||||
uint32_t act_sz = *size;
|
||||
*size = 0;
|
||||
uint8_t *ptr = s_trace_ch.hw->get_down_buffer(s_trace_ch.hw_data, &act_sz, &tmo);
|
||||
uint8_t *ptr = ch->hw->get_down_buffer(ch->hw_data, &act_sz, &tmo);
|
||||
if (ptr && act_sz > 0) {
|
||||
ESP_APPTRACE_LOGD("Read %" PRIu32 " bytes from host", act_sz);
|
||||
ESP_APPTRACE_LOGD("Read %d bytes from host", act_sz);
|
||||
memcpy(buf, ptr, act_sz);
|
||||
res = ch->hw->put_down_buffer(ch->hw_data, ptr, &tmo);
|
||||
*size = act_sz;
|
||||
return s_trace_ch.hw->put_down_buffer(s_trace_ch.hw_data, ptr, &tmo);
|
||||
} else {
|
||||
res = ESP_ERR_TIMEOUT;
|
||||
}
|
||||
|
||||
return ESP_ERR_TIMEOUT;
|
||||
return res;
|
||||
}
|
||||
|
||||
uint8_t *esp_apptrace_buffer_get(uint32_t size, uint32_t user_tmo)
|
||||
uint8_t *esp_apptrace_buffer_get(esp_apptrace_dest_t dest, uint32_t size, uint32_t user_tmo)
|
||||
{
|
||||
ESP_APPTRACE_LOGV("%s(): enter", __func__);
|
||||
esp_apptrace_tmo_t tmo;
|
||||
esp_apptrace_channel_t *ch;
|
||||
|
||||
ESP_APPTRACE_LOGV("%s(): enter", __func__);
|
||||
if (dest >= ESP_APPTRACE_DEST_MAX) {
|
||||
return NULL;
|
||||
}
|
||||
if (size == 0) {
|
||||
return NULL;
|
||||
}
|
||||
if (!s_trace_ch.hw) {
|
||||
if (!s_inited) {
|
||||
return NULL;
|
||||
}
|
||||
if (!s_trace_ch.hw->get_up_buffer) {
|
||||
ch = &s_trace_channels[dest];
|
||||
if (ch->hw == NULL) {
|
||||
ESP_APPTRACE_LOGE("Trace destination %d not supported!", dest);
|
||||
return NULL;
|
||||
}
|
||||
if (ch->hw->get_up_buffer == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
esp_apptrace_tmo_t tmo;
|
||||
esp_apptrace_tmo_init(&tmo, user_tmo);
|
||||
|
||||
return s_trace_ch.hw->get_up_buffer(s_trace_ch.hw_data, size, &tmo);
|
||||
return ch->hw->get_up_buffer(ch->hw_data, size, &tmo);
|
||||
}
|
||||
|
||||
esp_err_t esp_apptrace_buffer_put(uint8_t *ptr, uint32_t user_tmo)
|
||||
esp_err_t esp_apptrace_buffer_put(esp_apptrace_dest_t dest, uint8_t *ptr, uint32_t user_tmo)
|
||||
{
|
||||
ESP_APPTRACE_LOGV("%s(): enter", __func__);
|
||||
esp_apptrace_tmo_t tmo;
|
||||
esp_apptrace_channel_t *ch;
|
||||
|
||||
if (!ptr) {
|
||||
ESP_APPTRACE_LOGV("%s(): enter", __func__);
|
||||
if (dest >= ESP_APPTRACE_DEST_MAX) {
|
||||
return ESP_ERR_INVALID_ARG;
|
||||
}
|
||||
if (!s_trace_ch.hw) {
|
||||
if (ptr == NULL) {
|
||||
return ESP_ERR_INVALID_ARG;
|
||||
}
|
||||
if (!s_inited) {
|
||||
return ESP_ERR_INVALID_STATE;
|
||||
}
|
||||
if (!s_trace_ch.hw->put_up_buffer) {
|
||||
ch = &s_trace_channels[dest];
|
||||
if (ch->hw == NULL) {
|
||||
ESP_APPTRACE_LOGE("Trace destination %d not supported!", dest);
|
||||
return ESP_ERR_NOT_SUPPORTED;
|
||||
}
|
||||
if (ch->hw->put_up_buffer == NULL) {
|
||||
return ESP_ERR_NOT_SUPPORTED;
|
||||
}
|
||||
|
||||
esp_apptrace_tmo_t tmo;
|
||||
esp_apptrace_tmo_init(&tmo, user_tmo);
|
||||
|
||||
return s_trace_ch.hw->put_up_buffer(s_trace_ch.hw_data, ptr, &tmo);
|
||||
return ch->hw->put_up_buffer(ch->hw_data, ptr, &tmo);
|
||||
}
|
||||
|
||||
esp_err_t esp_apptrace_write(const void *data, uint32_t size, uint32_t user_tmo)
|
||||
esp_err_t esp_apptrace_write(esp_apptrace_dest_t dest, const void *data, uint32_t size, uint32_t user_tmo)
|
||||
{
|
||||
ESP_APPTRACE_LOGV("%s(): enter", __func__);
|
||||
uint8_t *ptr = NULL;
|
||||
esp_apptrace_tmo_t tmo;
|
||||
esp_apptrace_channel_t *ch;
|
||||
|
||||
if (!data || size == 0) {
|
||||
ESP_APPTRACE_LOGV("%s(): enter", __func__);
|
||||
if (dest >= ESP_APPTRACE_DEST_MAX) {
|
||||
return ESP_ERR_INVALID_ARG;
|
||||
}
|
||||
if (!s_trace_ch.hw) {
|
||||
if (data == NULL || size == 0) {
|
||||
return ESP_ERR_INVALID_ARG;
|
||||
}
|
||||
if (!s_inited) {
|
||||
return ESP_ERR_INVALID_STATE;
|
||||
}
|
||||
if (!s_trace_ch.hw->get_up_buffer || !s_trace_ch.hw->put_up_buffer) {
|
||||
ch = &s_trace_channels[dest];
|
||||
if (ch->hw == NULL) {
|
||||
ESP_APPTRACE_LOGE("Trace destination %d not supported!", dest);
|
||||
return ESP_ERR_NOT_SUPPORTED;
|
||||
}
|
||||
if (ch->hw->get_up_buffer == NULL || ch->hw->put_up_buffer == NULL) {
|
||||
return ESP_ERR_NOT_SUPPORTED;
|
||||
}
|
||||
|
||||
esp_apptrace_tmo_t tmo;
|
||||
esp_apptrace_tmo_init(&tmo, user_tmo);
|
||||
|
||||
uint8_t *ptr = s_trace_ch.hw->get_up_buffer(s_trace_ch.hw_data, size, &tmo);
|
||||
if (!ptr) {
|
||||
ptr = ch->hw->get_up_buffer(ch->hw_data, size, &tmo);
|
||||
if (ptr == NULL) {
|
||||
return ESP_ERR_NO_MEM;
|
||||
}
|
||||
|
||||
@@ -233,30 +299,37 @@ esp_err_t esp_apptrace_write(const void *data, uint32_t size, uint32_t user_tmo)
|
||||
memcpy(ptr, data, size);
|
||||
|
||||
// now indicate that this buffer is ready to be sent off to host
|
||||
return s_trace_ch.hw->put_up_buffer(s_trace_ch.hw_data, ptr, &tmo);
|
||||
return ch->hw->put_up_buffer(ch->hw_data, ptr, &tmo);
|
||||
}
|
||||
|
||||
int esp_apptrace_vprintf_to(uint32_t user_tmo, const char *fmt, va_list ap)
|
||||
int esp_apptrace_vprintf_to(esp_apptrace_dest_t dest, uint32_t user_tmo, const char *fmt, va_list ap)
|
||||
{
|
||||
uint16_t nargs = 0;
|
||||
uint8_t *pout, *p = (uint8_t *)fmt;
|
||||
esp_apptrace_tmo_t tmo;
|
||||
esp_apptrace_channel_t *ch;
|
||||
|
||||
ESP_APPTRACE_LOGV("%s(): enter", __func__);
|
||||
|
||||
if (!fmt) {
|
||||
if (dest >= ESP_APPTRACE_DEST_MAX) {
|
||||
return -1;
|
||||
}
|
||||
if (!s_trace_ch.hw) {
|
||||
if (fmt == NULL) {
|
||||
return -1;
|
||||
}
|
||||
if (!s_trace_ch.hw->get_up_buffer || !s_trace_ch.hw->put_up_buffer) {
|
||||
if (!s_inited) {
|
||||
return -1;
|
||||
}
|
||||
ch = &s_trace_channels[dest];
|
||||
if (ch->hw == NULL) {
|
||||
ESP_APPTRACE_LOGE("Trace destination %d not supported!", dest);
|
||||
return -1;
|
||||
}
|
||||
if (ch->hw->get_up_buffer == NULL || ch->hw->put_up_buffer == NULL) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
esp_apptrace_tmo_t tmo;
|
||||
esp_apptrace_tmo_init(&tmo, user_tmo);
|
||||
|
||||
ESP_APPTRACE_LOGD("fmt %p", fmt);
|
||||
ESP_APPTRACE_LOGD("fmt %x", fmt);
|
||||
while ((p = (uint8_t *)strchr((char *)p, '%')) && nargs < ESP_APPTRACE_MAX_VPRINTF_ARGS) {
|
||||
p++;
|
||||
if (*p != '%' && *p != 0) {
|
||||
@@ -268,8 +341,8 @@ int esp_apptrace_vprintf_to(uint32_t user_tmo, const char *fmt, va_list ap)
|
||||
ESP_APPTRACE_LOGE("Failed to store all printf args!");
|
||||
}
|
||||
|
||||
pout = s_trace_ch.hw->get_up_buffer(s_trace_ch.hw_data, 1 + sizeof(char *) + nargs * sizeof(uint32_t), &tmo);
|
||||
if (!pout) {
|
||||
pout = ch->hw->get_up_buffer(ch->hw_data, 1 + sizeof(char *) + nargs * sizeof(uint32_t), &tmo);
|
||||
if (pout == NULL) {
|
||||
ESP_APPTRACE_LOGE("Failed to get buffer!");
|
||||
return -1;
|
||||
}
|
||||
@@ -282,10 +355,10 @@ int esp_apptrace_vprintf_to(uint32_t user_tmo, const char *fmt, va_list ap)
|
||||
uint32_t arg = va_arg(ap, uint32_t);
|
||||
*(uint32_t *)pout = arg;
|
||||
pout += sizeof(uint32_t);
|
||||
ESP_APPTRACE_LOGD("arg %" PRIx32, arg);
|
||||
ESP_APPTRACE_LOGD("arg %x", arg);
|
||||
}
|
||||
|
||||
int ret = s_trace_ch.hw->put_up_buffer(s_trace_ch.hw_data, p, &tmo);
|
||||
int ret = ch->hw->put_up_buffer(ch->hw_data, p, &tmo);
|
||||
if (ret != ESP_OK) {
|
||||
ESP_APPTRACE_LOGE("Failed to put printf buf (%d)!", ret);
|
||||
return -1;
|
||||
@@ -296,87 +369,85 @@ int esp_apptrace_vprintf_to(uint32_t user_tmo, const char *fmt, va_list ap)
|
||||
|
||||
int esp_apptrace_vprintf(const char *fmt, va_list ap)
|
||||
{
|
||||
return esp_apptrace_vprintf_to(0, fmt, ap);
|
||||
return esp_apptrace_vprintf_to(ESP_APPTRACE_DEST_JTAG, 0, fmt, ap);
|
||||
}
|
||||
|
||||
esp_err_t esp_apptrace_flush_nolock(uint32_t min_sz, uint32_t usr_tmo)
|
||||
esp_err_t esp_apptrace_flush_nolock(esp_apptrace_dest_t dest, uint32_t min_sz, uint32_t usr_tmo)
|
||||
{
|
||||
ESP_APPTRACE_LOGV("%s(): enter", __func__);
|
||||
esp_apptrace_tmo_t tmo;
|
||||
esp_apptrace_channel_t *ch;
|
||||
|
||||
if (!s_trace_ch.hw) {
|
||||
ESP_APPTRACE_LOGV("%s(): enter", __func__);
|
||||
if (dest >= ESP_APPTRACE_DEST_MAX) {
|
||||
return ESP_ERR_INVALID_ARG;
|
||||
}
|
||||
if (!s_inited) {
|
||||
return ESP_ERR_INVALID_STATE;
|
||||
}
|
||||
if (!s_trace_ch.hw->flush_up_buffer_nolock) {
|
||||
ch = &s_trace_channels[dest];
|
||||
if (ch->hw == NULL) {
|
||||
ESP_APPTRACE_LOGE("Trace destination %d not supported!", dest);
|
||||
return ESP_ERR_NOT_SUPPORTED;
|
||||
}
|
||||
if (ch->hw->flush_up_buffer_nolock == NULL) {
|
||||
return ESP_ERR_NOT_SUPPORTED;
|
||||
}
|
||||
|
||||
esp_apptrace_tmo_t tmo;
|
||||
esp_apptrace_tmo_init(&tmo, usr_tmo);
|
||||
|
||||
return s_trace_ch.hw->flush_up_buffer_nolock(s_trace_ch.hw_data, min_sz, &tmo);
|
||||
return ch->hw->flush_up_buffer_nolock(ch->hw_data, min_sz, &tmo);
|
||||
}
|
||||
|
||||
esp_err_t esp_apptrace_flush(uint32_t usr_tmo)
|
||||
esp_err_t esp_apptrace_flush(esp_apptrace_dest_t dest, uint32_t usr_tmo)
|
||||
{
|
||||
ESP_APPTRACE_LOGV("%s(): enter", __func__);
|
||||
esp_apptrace_tmo_t tmo;
|
||||
esp_apptrace_channel_t *ch;
|
||||
|
||||
if (!s_trace_ch.hw) {
|
||||
ESP_APPTRACE_LOGV("%s(): enter", __func__);
|
||||
if (dest >= ESP_APPTRACE_DEST_MAX) {
|
||||
return ESP_ERR_INVALID_ARG;
|
||||
}
|
||||
if (!s_inited) {
|
||||
return ESP_ERR_INVALID_STATE;
|
||||
}
|
||||
if (!s_trace_ch.hw->flush_up_buffer) {
|
||||
ch = &s_trace_channels[dest];
|
||||
if (ch->hw == NULL) {
|
||||
ESP_APPTRACE_LOGE("Trace destination %d not supported!", dest);
|
||||
return ESP_ERR_NOT_SUPPORTED;
|
||||
}
|
||||
if (ch->hw->flush_up_buffer == NULL) {
|
||||
return ESP_ERR_NOT_SUPPORTED;
|
||||
}
|
||||
|
||||
esp_apptrace_tmo_t tmo;
|
||||
esp_apptrace_tmo_init(&tmo, usr_tmo);
|
||||
|
||||
return s_trace_ch.hw->flush_up_buffer(s_trace_ch.hw_data, &tmo);
|
||||
return ch->hw->flush_up_buffer(ch->hw_data, &tmo);
|
||||
}
|
||||
|
||||
bool esp_apptrace_host_is_connected(void)
|
||||
bool esp_apptrace_host_is_connected(esp_apptrace_dest_t dest)
|
||||
{
|
||||
esp_apptrace_channel_t *ch;
|
||||
|
||||
ESP_APPTRACE_LOGV("%s(): enter", __func__);
|
||||
|
||||
if (!s_trace_ch.hw) {
|
||||
if (dest >= ESP_APPTRACE_DEST_MAX) {
|
||||
return false;
|
||||
}
|
||||
if (!s_trace_ch.hw->host_is_connected) {
|
||||
if (!s_inited) {
|
||||
return false;
|
||||
}
|
||||
ch = &s_trace_channels[dest];
|
||||
if (ch->hw == NULL) {
|
||||
ESP_APPTRACE_LOGE("Trace destination %d not supported!", dest);
|
||||
return false;
|
||||
}
|
||||
if (ch->hw->host_is_connected == NULL) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return s_trace_ch.hw->host_is_connected(s_trace_ch.hw_data);
|
||||
return ch->hw->host_is_connected(ch->hw_data);
|
||||
}
|
||||
|
||||
esp_apptrace_dest_t esp_apptrace_get_destination(void)
|
||||
#if !CONFIG_APPTRACE_DEST_JTAG
|
||||
esp_apptrace_hw_t *esp_apptrace_jtag_hw_get(void **data)
|
||||
{
|
||||
return s_trace_ch.dest;
|
||||
}
|
||||
|
||||
esp_err_t esp_apptrace_set_header_size(esp_apptrace_header_size_t header_size)
|
||||
{
|
||||
if (!s_trace_ch.hw) {
|
||||
return ESP_ERR_INVALID_STATE;
|
||||
}
|
||||
if (s_trace_ch.hw->set_header_size) {
|
||||
s_trace_ch.hw->set_header_size(s_trace_ch.hw_data, header_size);
|
||||
}
|
||||
return ESP_OK;
|
||||
}
|
||||
|
||||
/* If any trace library (sysview or external) is selected with the apptrace transport,
|
||||
* initialization will be handled by the esp_trace component
|
||||
*/
|
||||
#if CONFIG_ESP_TRACE_LIB_NONE && CONFIG_ESP_TRACE_TRANSPORT_APPTRACE
|
||||
|
||||
esp_apptrace_config_t __attribute__((weak)) esp_apptrace_get_user_params(void)
|
||||
{
|
||||
esp_apptrace_config_t default_config = APPTRACE_CONFIG_DEFAULT();
|
||||
return default_config;
|
||||
}
|
||||
|
||||
ESP_SYSTEM_INIT_FN(apptrace_early_init, SECONDARY, ESP_SYSTEM_INIT_ALL_CORES, 115)
|
||||
{
|
||||
esp_apptrace_config_t config = esp_apptrace_get_user_params();
|
||||
return esp_apptrace_init(&config);
|
||||
return NULL;
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
/*
|
||||
* SPDX-FileCopyrightText: 2021-2025 Espressif Systems (Shanghai) CO LTD
|
||||
* SPDX-FileCopyrightText: 2021-2022 Espressif Systems (Shanghai) CO LTD
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
#include <sys/param.h>
|
||||
@@ -18,16 +18,13 @@
|
||||
* In this case host SW will see that wr_sz < block_sz and will report error.
|
||||
*/
|
||||
typedef struct {
|
||||
union {
|
||||
struct {
|
||||
uint8_t block_sz_8;
|
||||
uint8_t wr_sz_8;
|
||||
};
|
||||
struct {
|
||||
uint16_t block_sz_16;
|
||||
uint16_t wr_sz_16;
|
||||
};
|
||||
};
|
||||
#if CONFIG_APPTRACE_SV_ENABLE
|
||||
uint8_t block_sz; // size of allocated block for user data
|
||||
uint8_t wr_sz; // size of actually written data
|
||||
#else
|
||||
uint16_t block_sz; // size of allocated block for user data
|
||||
uint16_t wr_sz; // size of actually written data
|
||||
#endif
|
||||
} esp_tracedata_hdr_t;
|
||||
|
||||
/** TODO: docs
|
||||
@@ -36,29 +33,41 @@ typedef struct {
|
||||
uint16_t block_sz; // size of allocated block for user data
|
||||
} esp_hostdata_hdr_t;
|
||||
|
||||
#define ESP_APPTRACE_INBLOCK_MARKER(_hw_data_) \
|
||||
((_hw_data_)->state.markers[(_hw_data_)->state.in_block % 2])
|
||||
#if CONFIG_APPTRACE_SV_ENABLE
|
||||
#define ESP_APPTRACE_USR_BLOCK_CORE(_cid_) (0)
|
||||
#define ESP_APPTRACE_USR_BLOCK_LEN(_v_) (_v_)
|
||||
#define ESP_APPTRACE_USR_DATA_LEN_MAX(_hw_data_) 255UL
|
||||
#else
|
||||
#define ESP_APPTRACE_USR_BLOCK_CORE(_cid_) ((_cid_) << 15)
|
||||
#define ESP_APPTRACE_USR_BLOCK_LEN(_v_) (~(1 << 15) & (_v_))
|
||||
#define ESP_APPTRACE_USR_DATA_LEN_MAX(_hw_data_) (ESP_APPTRACE_INBLOCK(_hw_data_)->sz - sizeof(esp_tracedata_hdr_t))
|
||||
#endif
|
||||
#define ESP_APPTRACE_USR_BLOCK_RAW_SZ(_s_) ((_s_) + sizeof(esp_tracedata_hdr_t))
|
||||
|
||||
#define ESP_APPTRACE_INBLOCK(_hw_data_) \
|
||||
(&(_hw_data_)->blocks[(_hw_data_)->state.in_block % 2])
|
||||
#define ESP_APPTRACE_INBLOCK_MARKER(_hw_data_) ((_hw_data_)->state.markers[(_hw_data_)->state.in_block % 2])
|
||||
#define ESP_APPTRACE_INBLOCK_MARKER_UPD(_hw_data_, _v_) do {(_hw_data_)->state.markers[(_hw_data_)->state.in_block % 2] += (_v_);}while(0)
|
||||
#define ESP_APPTRACE_INBLOCK(_hw_data_) (&(_hw_data_)->blocks[(_hw_data_)->state.in_block % 2])
|
||||
|
||||
const static char *TAG = "esp_apptrace";
|
||||
|
||||
static uint32_t esp_apptrace_membufs_down_buffer_write_nolock(esp_apptrace_membufs_proto_data_t *proto,
|
||||
uint8_t *data, uint32_t size);
|
||||
static uint32_t esp_apptrace_membufs_down_buffer_write_nolock(esp_apptrace_membufs_proto_data_t *proto, uint8_t *data, uint32_t size);
|
||||
|
||||
esp_err_t esp_apptrace_membufs_init(esp_apptrace_membufs_proto_data_t *proto,
|
||||
const esp_apptrace_mem_block_t blocks_cfg[2])
|
||||
|
||||
esp_err_t esp_apptrace_membufs_init(esp_apptrace_membufs_proto_data_t *proto, const esp_apptrace_mem_block_t blocks_cfg[2])
|
||||
{
|
||||
// disabled by default
|
||||
esp_apptrace_rb_init(&proto->rb_down, NULL, 0);
|
||||
// membufs proto init
|
||||
for (unsigned int i = 0; i < 2; i++) {
|
||||
for (unsigned i = 0; i < 2; i++) {
|
||||
proto->blocks[i].start = blocks_cfg[i].start;
|
||||
proto->blocks[i].sz = blocks_cfg[i].sz;
|
||||
proto->state.markers[i] = 0;
|
||||
}
|
||||
proto->state.in_block = 0;
|
||||
#if CONFIG_APPTRACE_PENDING_DATA_SIZE_MAX > 0
|
||||
esp_apptrace_rb_init(&proto->rb_pend, proto->pending_data,
|
||||
sizeof(proto->pending_data));
|
||||
#endif
|
||||
return ESP_OK;
|
||||
}
|
||||
|
||||
@@ -72,10 +81,10 @@ static esp_err_t esp_apptrace_membufs_swap(esp_apptrace_membufs_proto_data_t *pr
|
||||
{
|
||||
int prev_block_num = proto->state.in_block % 2;
|
||||
int new_block_num = prev_block_num ? (0) : (1);
|
||||
esp_err_t res = ESP_OK;
|
||||
|
||||
esp_err_t res = proto->hw->swap_start(proto->state.in_block);
|
||||
res = proto->hw->swap_start(proto->state.in_block);
|
||||
if (res != ESP_OK) {
|
||||
ESP_APPTRACE_LOGE("Failed to swap to new block: %d", res);
|
||||
return res;
|
||||
}
|
||||
|
||||
@@ -83,7 +92,7 @@ static esp_err_t esp_apptrace_membufs_swap(esp_apptrace_membufs_proto_data_t *pr
|
||||
// switch to new block
|
||||
proto->state.in_block++;
|
||||
|
||||
proto->hw->swap(new_block_num, proto->state.markers[prev_block_num]);
|
||||
proto->hw->swap(new_block_num);
|
||||
|
||||
// handle data from host
|
||||
esp_hostdata_hdr_t *hdr = (esp_hostdata_hdr_t *)proto->blocks[new_block_num].start;
|
||||
@@ -91,20 +100,41 @@ static esp_err_t esp_apptrace_membufs_swap(esp_apptrace_membufs_proto_data_t *pr
|
||||
if (proto->hw->host_data_pending() && hdr->block_sz > 0) {
|
||||
// TODO: add support for multiple blocks from host, currently there is no need for that
|
||||
uint8_t *p = proto->blocks[new_block_num].start + proto->blocks[new_block_num].sz;
|
||||
ESP_APPTRACE_LOGD("Recvd %" PRIu16 " bytes from host (@ %p) [%x %x %x %x %x %x %x %x .. %x %x %x %x %x %x %x %x]",
|
||||
hdr->block_sz, proto->blocks[new_block_num].start,
|
||||
*(proto->blocks[new_block_num].start + 0), *(proto->blocks[new_block_num].start + 1),
|
||||
*(proto->blocks[new_block_num].start + 2), *(proto->blocks[new_block_num].start + 3),
|
||||
*(proto->blocks[new_block_num].start + 4), *(proto->blocks[new_block_num].start + 5),
|
||||
*(proto->blocks[new_block_num].start + 6), *(proto->blocks[new_block_num].start + 7),
|
||||
*(p - 8), *(p - 7), *(p - 6), *(p - 5), *(p - 4), *(p - 3), *(p - 2), *(p - 1));
|
||||
uint32_t sz = esp_apptrace_membufs_down_buffer_write_nolock(proto, (uint8_t *)(hdr + 1), hdr->block_sz);
|
||||
ESP_APPTRACE_LOGD("Recvd %d bytes from host (@ 0x%x) [%x %x %x %x %x %x %x %x .. %x %x %x %x %x %x %x %x]",
|
||||
hdr->block_sz, proto->blocks[new_block_num].start,
|
||||
*(proto->blocks[new_block_num].start+0), *(proto->blocks[new_block_num].start+1),
|
||||
*(proto->blocks[new_block_num].start+2), *(proto->blocks[new_block_num].start+3),
|
||||
*(proto->blocks[new_block_num].start+4), *(proto->blocks[new_block_num].start+5),
|
||||
*(proto->blocks[new_block_num].start+6), *(proto->blocks[new_block_num].start+7),
|
||||
*(p-8), *(p-7), *(p-6), *(p-5), *(p-4), *(p-3), *(p-2), *(p-1));
|
||||
uint32_t sz = esp_apptrace_membufs_down_buffer_write_nolock(proto, (uint8_t *)(hdr+1), hdr->block_sz);
|
||||
if (sz != hdr->block_sz) {
|
||||
ESP_APPTRACE_LOGE("Failed to write %" PRIu32 " bytes to down buffer (%" PRIu16 " %" PRIu32 ")!",
|
||||
hdr->block_sz - sz, hdr->block_sz, sz);
|
||||
ESP_APPTRACE_LOGE("Failed to write %d bytes to down buffer (%d %d)!", hdr->block_sz - sz, hdr->block_sz, sz);
|
||||
}
|
||||
hdr->block_sz = 0;
|
||||
}
|
||||
#if CONFIG_APPTRACE_PENDING_DATA_SIZE_MAX > 0
|
||||
// copy pending data to block if any
|
||||
while (proto->state.markers[new_block_num] < proto->blocks[new_block_num].sz) {
|
||||
uint32_t read_sz = esp_apptrace_rb_read_size_get(&proto->rb_pend);
|
||||
if (read_sz == 0) {
|
||||
break; // no more data in pending buffer
|
||||
}
|
||||
if (read_sz > proto->blocks[new_block_num].sz - proto->state.markers[new_block_num]) {
|
||||
read_sz = proto->blocks[new_block_num].sz - proto->state.markers[new_block_num];
|
||||
}
|
||||
uint8_t *ptr = esp_apptrace_rb_consume(&proto->rb_pend, read_sz);
|
||||
if (!ptr) {
|
||||
assert(false && "Failed to consume pended bytes!!");
|
||||
break;
|
||||
}
|
||||
ESP_APPTRACE_LOGD("Pump %d pend bytes [%x %x %x %x : %x %x %x %x : %x %x %x %x : %x %x...%x %x]",
|
||||
read_sz, *(ptr+0), *(ptr+1), *(ptr+2), *(ptr+3), *(ptr+4),
|
||||
*(ptr+5), *(ptr+6), *(ptr+7), *(ptr+8), *(ptr+9), *(ptr+10), *(ptr+11), *(ptr+12), *(ptr+13), *(ptr+read_sz-2), *(ptr+read_sz-1));
|
||||
memcpy(proto->blocks[new_block_num].start + proto->state.markers[new_block_num], ptr, read_sz);
|
||||
proto->state.markers[new_block_num] += read_sz;
|
||||
}
|
||||
#endif
|
||||
proto->hw->swap_end(proto->state.in_block, proto->state.markers[prev_block_num]);
|
||||
return res;
|
||||
}
|
||||
@@ -118,24 +148,11 @@ static esp_err_t esp_apptrace_membufs_swap_waitus(esp_apptrace_membufs_proto_dat
|
||||
if (res != ESP_OK) {
|
||||
break;
|
||||
}
|
||||
#if CONFIG_IDF_TARGET_ESP32S3
|
||||
/*
|
||||
* ESP32S3 has a serious data corruption issue with the transferred data to host.
|
||||
* This delay helps reduce the failure rate by temporarily reducing heavy memory writes
|
||||
* from RTOS-level tracing and giving OpenOCD more time to read trace memory before
|
||||
* the current thread continues execution. While this doesn't completely prevent
|
||||
* memory access from other threads/cores/ISRs, it has shown to significantly improve
|
||||
* reliability when combined with CRC checks in OpenOCD. In practice, this reduces the
|
||||
* number of retries needed to read an entire block without corruption.
|
||||
*/
|
||||
esp_rom_delay_us(100);
|
||||
#endif
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
uint8_t *esp_apptrace_membufs_down_buffer_get(esp_apptrace_membufs_proto_data_t *proto,
|
||||
uint32_t *size, esp_apptrace_tmo_t *tmo)
|
||||
uint8_t *esp_apptrace_membufs_down_buffer_get(esp_apptrace_membufs_proto_data_t *proto, uint32_t *size, esp_apptrace_tmo_t *tmo)
|
||||
{
|
||||
uint8_t *ptr = NULL;
|
||||
|
||||
@@ -168,21 +185,19 @@ uint8_t *esp_apptrace_membufs_down_buffer_get(esp_apptrace_membufs_proto_data_t
|
||||
return ptr;
|
||||
}
|
||||
|
||||
esp_err_t esp_apptrace_membufs_down_buffer_put(esp_apptrace_membufs_proto_data_t *proto,
|
||||
uint8_t *ptr, esp_apptrace_tmo_t *tmo)
|
||||
esp_err_t esp_apptrace_membufs_down_buffer_put(esp_apptrace_membufs_proto_data_t *proto, uint8_t *ptr, esp_apptrace_tmo_t *tmo)
|
||||
{
|
||||
/* nothing todo */
|
||||
return ESP_OK;
|
||||
}
|
||||
|
||||
static uint32_t esp_apptrace_membufs_down_buffer_write_nolock(esp_apptrace_membufs_proto_data_t *proto,
|
||||
uint8_t *data, uint32_t size)
|
||||
static uint32_t esp_apptrace_membufs_down_buffer_write_nolock(esp_apptrace_membufs_proto_data_t *proto, uint8_t *data, uint32_t size)
|
||||
{
|
||||
uint32_t total_sz = 0;
|
||||
|
||||
while (total_sz < size) {
|
||||
ESP_APPTRACE_LOGD("esp_apptrace_trax_down_buffer_write_nolock WRS %" PRIu32 "-%" PRIu32 "-%" PRIu32 " %" PRIu32, proto->rb_down.wr, proto->rb_down.rd,
|
||||
proto->rb_down.cur_size, size);
|
||||
ESP_APPTRACE_LOGD("esp_apptrace_trax_down_buffer_write_nolock WRS %d-%d-%d %d", proto->rb_down.wr, proto->rb_down.rd,
|
||||
proto->rb_down.cur_size, size);
|
||||
uint32_t wr_sz = esp_apptrace_rb_write_size_get(&proto->rb_down);
|
||||
if (wr_sz == 0) {
|
||||
break;
|
||||
@@ -191,69 +206,140 @@ static uint32_t esp_apptrace_membufs_down_buffer_write_nolock(esp_apptrace_membu
|
||||
if (wr_sz > size - total_sz) {
|
||||
wr_sz = size - total_sz;
|
||||
}
|
||||
ESP_APPTRACE_LOGD("esp_apptrace_trax_down_buffer_write_nolock wr %" PRIu32, wr_sz);
|
||||
ESP_APPTRACE_LOGD("esp_apptrace_trax_down_buffer_write_nolock wr %d", wr_sz);
|
||||
uint8_t *ptr = esp_apptrace_rb_produce(&proto->rb_down, wr_sz);
|
||||
if (!ptr) {
|
||||
assert(false && "Failed to produce bytes to down buffer!");
|
||||
}
|
||||
ESP_APPTRACE_LOGD("esp_apptrace_trax_down_buffer_write_nolock wr %" PRIu32 " to %p from %p", wr_sz, ptr, data + total_sz + wr_sz);
|
||||
ESP_APPTRACE_LOGD("esp_apptrace_trax_down_buffer_write_nolock wr %d to 0x%x from 0x%x", wr_sz, ptr, data + total_sz + wr_sz);
|
||||
memcpy(ptr, data + total_sz, wr_sz);
|
||||
total_sz += wr_sz;
|
||||
ESP_APPTRACE_LOGD("esp_apptrace_trax_down_buffer_write_nolock wr %" PRIu32 "/%" PRIu32 "", wr_sz, total_sz);
|
||||
ESP_APPTRACE_LOGD("esp_apptrace_trax_down_buffer_write_nolock wr %d/%d", wr_sz, total_sz);
|
||||
}
|
||||
return total_sz;
|
||||
}
|
||||
|
||||
static inline uint32_t esp_apptrace_membufs_usr_data_len_max(esp_apptrace_membufs_proto_data_t *proto)
|
||||
static inline uint8_t *esp_apptrace_membufs_wait4buf(esp_apptrace_membufs_proto_data_t *proto, uint16_t size, esp_apptrace_tmo_t *tmo, int *pended)
|
||||
{
|
||||
return proto->header_size == ESP_APPTRACE_HEADER_SIZE_32 ?
|
||||
ESP_APPTRACE_INBLOCK(proto)->sz - ESP_APPTRACE_HEADER_SIZE_32 : 255;
|
||||
}
|
||||
uint8_t *ptr = NULL;
|
||||
|
||||
uint8_t *esp_apptrace_membufs_up_buffer_get(esp_apptrace_membufs_proto_data_t *proto,
|
||||
uint32_t size, esp_apptrace_tmo_t *tmo)
|
||||
{
|
||||
if (size > esp_apptrace_membufs_usr_data_len_max(proto)) {
|
||||
ESP_APPTRACE_LOGE("Too large user data size %" PRIu32 "!", size);
|
||||
int res = esp_apptrace_membufs_swap_waitus(proto, tmo);
|
||||
if (res != ESP_OK) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (ESP_APPTRACE_INBLOCK_MARKER(proto) + size + proto->header_size > ESP_APPTRACE_INBLOCK(proto)->sz) {
|
||||
int res = esp_apptrace_membufs_swap_waitus(proto, tmo);
|
||||
if (res != ESP_OK) {
|
||||
return NULL;
|
||||
#if CONFIG_APPTRACE_PENDING_DATA_SIZE_MAX > 0
|
||||
// check if we still have pending data
|
||||
if (esp_apptrace_rb_read_size_get(&proto->rb_pend) > 0) {
|
||||
// if after block switch we still have pending data (not all pending data have been pumped to block)
|
||||
// alloc new pending buffer
|
||||
*pended = 1;
|
||||
ptr = esp_apptrace_rb_produce(&proto->rb_pend, size);
|
||||
if (!ptr) {
|
||||
ESP_APPTRACE_LOGE("Failed to alloc pend buf 1: w-r-s %d-%d-%d!", proto->rb_pend.wr, proto->rb_pend.rd, proto->rb_pend.cur_size);
|
||||
}
|
||||
} else
|
||||
#endif
|
||||
{
|
||||
// update block pointers
|
||||
if (ESP_APPTRACE_INBLOCK_MARKER(proto) + size > ESP_APPTRACE_INBLOCK(proto)->sz) {
|
||||
#if CONFIG_APPTRACE_PENDING_DATA_SIZE_MAX > 0
|
||||
*pended = 1;
|
||||
ptr = esp_apptrace_rb_produce(&proto->rb_pend, size);
|
||||
if (ptr == NULL) {
|
||||
ESP_APPTRACE_LOGE("Failed to alloc pend buf 2: w-r-s %d-%d-%d!", proto->rb_pend.wr, proto->rb_pend.rd, proto->rb_pend.cur_size);
|
||||
}
|
||||
#endif
|
||||
} else {
|
||||
*pended = 0;
|
||||
ptr = ESP_APPTRACE_INBLOCK(proto)->start + ESP_APPTRACE_INBLOCK_MARKER(proto);
|
||||
}
|
||||
}
|
||||
|
||||
uint8_t *buf_ptr = ESP_APPTRACE_INBLOCK(proto)->start + ESP_APPTRACE_INBLOCK_MARKER(proto);
|
||||
// update cur block marker
|
||||
proto->state.markers[proto->state.in_block % 2] += size + proto->header_size;
|
||||
|
||||
// update header
|
||||
esp_tracedata_hdr_t *hdr = (esp_tracedata_hdr_t *)buf_ptr;
|
||||
if (proto->header_size == ESP_APPTRACE_HEADER_SIZE_32) {
|
||||
hdr->block_sz_16 = (esp_cpu_get_core_id() << 15) | size;
|
||||
hdr->wr_sz_16 = 0;
|
||||
} else {
|
||||
hdr->block_sz_8 = size;
|
||||
hdr->wr_sz_8 = 0;
|
||||
}
|
||||
ESP_APPTRACE_LOGD("Got %" PRIu32 " bytes from block", size);
|
||||
|
||||
return buf_ptr + proto->header_size;
|
||||
return ptr;
|
||||
}
|
||||
|
||||
esp_err_t esp_apptrace_membufs_up_buffer_put(esp_apptrace_membufs_proto_data_t *proto,
|
||||
uint8_t *ptr, esp_apptrace_tmo_t *tmo)
|
||||
static inline uint8_t *esp_apptrace_membufs_pkt_start(uint8_t *ptr, uint16_t size)
|
||||
{
|
||||
// update header
|
||||
esp_tracedata_hdr_t *hdr = (esp_tracedata_hdr_t *)(ptr - proto->header_size);
|
||||
if (proto->header_size == ESP_APPTRACE_HEADER_SIZE_32) {
|
||||
hdr->wr_sz_16 = hdr->block_sz_16;
|
||||
} else {
|
||||
hdr->wr_sz_8 = hdr->block_sz_8;
|
||||
// it is safe to use esp_cpu_get_core_id() in macro call because arg is used only once inside it
|
||||
((esp_tracedata_hdr_t *)ptr)->block_sz = ESP_APPTRACE_USR_BLOCK_CORE(esp_cpu_get_core_id()) | size;
|
||||
((esp_tracedata_hdr_t *)ptr)->wr_sz = 0;
|
||||
return ptr + sizeof(esp_tracedata_hdr_t);
|
||||
}
|
||||
|
||||
static inline void esp_apptrace_membufs_pkt_end(uint8_t *ptr)
|
||||
{
|
||||
esp_tracedata_hdr_t *hdr = (esp_tracedata_hdr_t *)(ptr - sizeof(esp_tracedata_hdr_t));
|
||||
// update written size
|
||||
hdr->wr_sz = hdr->block_sz;
|
||||
}
|
||||
|
||||
uint8_t *esp_apptrace_membufs_up_buffer_get(esp_apptrace_membufs_proto_data_t *proto, uint32_t size, esp_apptrace_tmo_t *tmo)
|
||||
{
|
||||
uint8_t *buf_ptr = NULL;
|
||||
|
||||
if (size > ESP_APPTRACE_USR_DATA_LEN_MAX(proto)) {
|
||||
ESP_APPTRACE_LOGE("Too large user data size %d!", size);
|
||||
return NULL;
|
||||
}
|
||||
// TODO: mark block as busy in order not to reuse it for other tracing calls until it is completely written
|
||||
|
||||
// check for data in the pending buffer
|
||||
#if CONFIG_APPTRACE_PENDING_DATA_SIZE_MAX > 0
|
||||
if (esp_apptrace_rb_read_size_get(&proto->rb_pend) > 0) {
|
||||
// if we have buffered data try to switch block
|
||||
esp_apptrace_membufs_swap(proto);
|
||||
// if switch was successful, part or all pended data have been copied to block
|
||||
}
|
||||
if (esp_apptrace_rb_read_size_get(&proto->rb_pend) > 0) {
|
||||
// if we have buffered data alloc new pending buffer
|
||||
ESP_APPTRACE_LOGD("Get %d bytes from PEND buffer", size);
|
||||
buf_ptr = esp_apptrace_rb_produce(&proto->rb_pend, ESP_APPTRACE_USR_BLOCK_RAW_SZ(size));
|
||||
if (buf_ptr == NULL) {
|
||||
int pended_buf;
|
||||
buf_ptr = esp_apptrace_membufs_wait4buf(proto, ESP_APPTRACE_USR_BLOCK_RAW_SZ(size), tmo, &pended_buf);
|
||||
if (buf_ptr && !pended_buf) {
|
||||
ESP_APPTRACE_LOGD("Get %d bytes from block", size);
|
||||
// update cur block marker
|
||||
ESP_APPTRACE_INBLOCK_MARKER_UPD(proto, ESP_APPTRACE_USR_BLOCK_RAW_SZ(size));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
#else
|
||||
if (1) {
|
||||
#endif
|
||||
if (ESP_APPTRACE_INBLOCK_MARKER(proto) + ESP_APPTRACE_USR_BLOCK_RAW_SZ(size) > ESP_APPTRACE_INBLOCK(proto)->sz) {
|
||||
#if CONFIG_APPTRACE_PENDING_DATA_SIZE_MAX > 0
|
||||
ESP_APPTRACE_LOGD("Block full. Get %d bytes from PEND buffer", size);
|
||||
buf_ptr = esp_apptrace_rb_produce(&proto->rb_pend, ESP_APPTRACE_USR_BLOCK_RAW_SZ(size));
|
||||
#endif
|
||||
if (buf_ptr == NULL) {
|
||||
int pended_buf;
|
||||
ESP_APPTRACE_LOGD(" full. Get %d bytes from pend buffer", size);
|
||||
buf_ptr = esp_apptrace_membufs_wait4buf(proto, ESP_APPTRACE_USR_BLOCK_RAW_SZ(size), tmo, &pended_buf);
|
||||
if (buf_ptr && !pended_buf) {
|
||||
ESP_APPTRACE_LOGD("Got %d bytes from block", size);
|
||||
// update cur block marker
|
||||
ESP_APPTRACE_INBLOCK_MARKER_UPD(proto, ESP_APPTRACE_USR_BLOCK_RAW_SZ(size));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
ESP_APPTRACE_LOGD("Get %d bytes from buffer", size);
|
||||
// fit to curr nlock
|
||||
buf_ptr = ESP_APPTRACE_INBLOCK(proto)->start + ESP_APPTRACE_INBLOCK_MARKER(proto);
|
||||
// update cur block marker
|
||||
ESP_APPTRACE_INBLOCK_MARKER_UPD(proto, ESP_APPTRACE_USR_BLOCK_RAW_SZ(size));
|
||||
}
|
||||
}
|
||||
if (buf_ptr) {
|
||||
buf_ptr = esp_apptrace_membufs_pkt_start(buf_ptr, size);
|
||||
}
|
||||
|
||||
return buf_ptr;
|
||||
}
|
||||
|
||||
esp_err_t esp_apptrace_membufs_up_buffer_put(esp_apptrace_membufs_proto_data_t *proto, uint8_t *ptr, esp_apptrace_tmo_t *tmo)
|
||||
{
|
||||
esp_apptrace_membufs_pkt_end(ptr);
|
||||
// TODO: mark block as busy in order not to re-use it for other tracing calls until it is completely written
|
||||
// TODO: avoid potential situation when all memory is consumed by low prio tasks which can not complete writing due to
|
||||
// higher prio tasks and the latter can not allocate buffers at all
|
||||
// this is abnormal situation can be detected on host which will receive only uncompleted buffers
|
||||
@@ -261,25 +347,23 @@ esp_err_t esp_apptrace_membufs_up_buffer_put(esp_apptrace_membufs_proto_data_t *
|
||||
return ESP_OK;
|
||||
}
|
||||
|
||||
esp_err_t esp_apptrace_membufs_flush_nolock(esp_apptrace_membufs_proto_data_t *proto,
|
||||
uint32_t min_sz, esp_apptrace_tmo_t *tmo)
|
||||
esp_err_t esp_apptrace_membufs_flush_nolock(esp_apptrace_membufs_proto_data_t *proto, uint32_t min_sz, esp_apptrace_tmo_t *tmo)
|
||||
{
|
||||
int res = ESP_OK;
|
||||
|
||||
if (ESP_APPTRACE_INBLOCK_MARKER(proto) < min_sz) {
|
||||
ESP_APPTRACE_LOGI("Ignore flush request for min %" PRIu32 " bytes. Bytes in block: %" PRIu32, min_sz, ESP_APPTRACE_INBLOCK_MARKER(proto));
|
||||
ESP_APPTRACE_LOGI("Ignore flush request for min %d bytes. Bytes in block: %d.", min_sz, ESP_APPTRACE_INBLOCK_MARKER(proto));
|
||||
return ESP_OK;
|
||||
}
|
||||
// switch block while size of data is more than min size
|
||||
// switch block while size of data (including that in pending buffer) is more than min size
|
||||
while (ESP_APPTRACE_INBLOCK_MARKER(proto) > min_sz) {
|
||||
ESP_APPTRACE_LOGD("Try to flush %" PRIu32 " bytes", ESP_APPTRACE_INBLOCK_MARKER(proto));
|
||||
ESP_APPTRACE_LOGD("Try to flush %d bytes. Wait until block switch for %lld us", ESP_APPTRACE_INBLOCK_MARKER(proto), tmo->tmo);
|
||||
res = esp_apptrace_membufs_swap_waitus(proto, tmo);
|
||||
if (res != ESP_OK) {
|
||||
if (res == ESP_ERR_TIMEOUT) {
|
||||
ESP_APPTRACE_LOGW("Failed to switch to another block in %" PRId32 " us!", (int32_t)tmo->elapsed);
|
||||
} else {
|
||||
ESP_APPTRACE_LOGE("Failed to switch to another block, res: %d", res);
|
||||
}
|
||||
if (tmo->tmo != ESP_APPTRACE_TMO_INFINITE)
|
||||
ESP_APPTRACE_LOGW("Failed to switch to another block in %lld us!", tmo->tmo);
|
||||
else
|
||||
ESP_APPTRACE_LOGE("Failed to switch to another block in %lld us!", tmo->tmo);
|
||||
return res;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
/*
|
||||
* SPDX-FileCopyrightText: 2017-2025 Espressif Systems (Shanghai) CO LTD
|
||||
* SPDX-FileCopyrightText: 2017-2021 Espressif Systems (Shanghai) CO LTD
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
//
|
||||
#include "freertos/FreeRTOS.h"
|
||||
@@ -9,14 +9,12 @@
|
||||
#include "esp_app_trace_util.h"
|
||||
#include "sdkconfig.h"
|
||||
|
||||
#define ESP_APPTRACE_PRINT_LOCK 0
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
///////////////////////////////// Locks /////////////////////////////////////
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
#if ESP_APPTRACE_PRINT_LOCK
|
||||
static esp_apptrace_lock_t s_log_lock = { .mux = portMUX_INITIALIZER_UNLOCKED };
|
||||
static esp_apptrace_lock_t s_log_lock = {.irq_stat = 0, .portmux = portMUX_INITIALIZER_UNLOCKED};
|
||||
#endif
|
||||
|
||||
int esp_apptrace_log_lock(void)
|
||||
@@ -33,7 +31,7 @@ int esp_apptrace_log_lock(void)
|
||||
|
||||
void esp_apptrace_log_unlock(void)
|
||||
{
|
||||
#if ESP_APPTRACE_PRINT_LOCK
|
||||
#if ESP_APPTRACE_PRINT_LOCK
|
||||
esp_apptrace_lock_give(&s_log_lock);
|
||||
#endif
|
||||
}
|
||||
@@ -44,7 +42,7 @@ void esp_apptrace_log_unlock(void)
|
||||
|
||||
esp_err_t esp_apptrace_tmo_check(esp_apptrace_tmo_t *tmo)
|
||||
{
|
||||
if (tmo->tmo != (int64_t) -1) {
|
||||
if (tmo->tmo != (int64_t)-1) {
|
||||
tmo->elapsed = esp_timer_get_time() - tmo->start;
|
||||
if (tmo->elapsed >= tmo->tmo) {
|
||||
return ESP_ERR_TIMEOUT;
|
||||
@@ -57,12 +55,6 @@ esp_err_t esp_apptrace_tmo_check(esp_apptrace_tmo_t *tmo)
|
||||
///////////////////////////////// LOCK ////////////////////////////////////////
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
void esp_apptrace_lock_init(esp_apptrace_lock_t *lock)
|
||||
{
|
||||
portMUX_INITIALIZE(&lock->mux);
|
||||
lock->int_state = 0;
|
||||
}
|
||||
|
||||
esp_err_t esp_apptrace_lock_take(esp_apptrace_lock_t *lock, esp_apptrace_tmo_t *tmo)
|
||||
{
|
||||
esp_err_t ret;
|
||||
@@ -95,7 +87,7 @@ esp_err_t esp_apptrace_lock_give(esp_apptrace_lock_t *lock)
|
||||
uint8_t *esp_apptrace_rb_produce(esp_apptrace_rb_t *rb, uint32_t size)
|
||||
{
|
||||
uint8_t *ptr = rb->data + rb->wr;
|
||||
// check for available space
|
||||
// check for avalable space
|
||||
if (rb->rd <= rb->wr) {
|
||||
// |?R......W??|
|
||||
if (rb->wr + size >= rb->size) {
|
||||
|
||||
197
components/app_trace/gcov/gcov_rtio.c
Normal file
197
components/app_trace/gcov/gcov_rtio.c
Normal file
@@ -0,0 +1,197 @@
|
||||
/*
|
||||
* SPDX-FileCopyrightText: 2017-2023 Espressif Systems (Shanghai) CO LTD
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
// This module implements runtime file I/O API for GCOV.
|
||||
|
||||
#include <string.h>
|
||||
#include "esp_task_wdt.h"
|
||||
#include "freertos/FreeRTOS.h"
|
||||
#include "freertos/task.h"
|
||||
#include "freertos/semphr.h"
|
||||
#include "soc/timer_periph.h"
|
||||
#include "esp_app_trace.h"
|
||||
#include "esp_freertos_hooks.h"
|
||||
#include "esp_private/dbg_stubs.h"
|
||||
#include "esp_ipc.h"
|
||||
#include "hal/wdt_hal.h"
|
||||
#if CONFIG_IDF_TARGET_ESP32
|
||||
#include "esp32/rom/libc_stubs.h"
|
||||
#elif CONFIG_IDF_TARGET_ESP32S2
|
||||
#include "esp32s2/rom/libc_stubs.h"
|
||||
#endif
|
||||
|
||||
#if CONFIG_APPTRACE_GCOV_ENABLE
|
||||
|
||||
#define ESP_GCOV_DOWN_BUF_SIZE 4200
|
||||
|
||||
#include "esp_log.h"
|
||||
const static char *TAG = "esp_gcov_rtio";
|
||||
static volatile bool s_create_gcov_task = false;
|
||||
static volatile bool s_gcov_task_running = false;
|
||||
|
||||
extern void __gcov_dump(void);
|
||||
extern void __gcov_reset(void);
|
||||
|
||||
void gcov_dump_task(void *pvParameter)
|
||||
{
|
||||
int dump_result = 0;
|
||||
bool *running = (bool *)pvParameter;
|
||||
|
||||
ESP_EARLY_LOGV(TAG, "%s stack use in %d", __FUNCTION__, uxTaskGetStackHighWaterMark(NULL));
|
||||
|
||||
ESP_EARLY_LOGV(TAG, "Alloc apptrace down buf %d bytes", ESP_GCOV_DOWN_BUF_SIZE);
|
||||
void *down_buf = malloc(ESP_GCOV_DOWN_BUF_SIZE);
|
||||
if (down_buf == NULL) {
|
||||
ESP_EARLY_LOGE(TAG, "Could not allocate memory for the buffer");
|
||||
dump_result = ESP_ERR_NO_MEM;
|
||||
goto gcov_exit;
|
||||
}
|
||||
ESP_EARLY_LOGV(TAG, "Config apptrace down buf");
|
||||
esp_apptrace_down_buffer_config(down_buf, ESP_GCOV_DOWN_BUF_SIZE);
|
||||
ESP_EARLY_LOGV(TAG, "Dump data...");
|
||||
__gcov_dump();
|
||||
// reset dump status to allow incremental data accumulation
|
||||
__gcov_reset();
|
||||
free(down_buf);
|
||||
ESP_EARLY_LOGV(TAG, "Finish file transfer session");
|
||||
dump_result = esp_apptrace_fstop(ESP_APPTRACE_DEST_TRAX);
|
||||
if (dump_result != ESP_OK) {
|
||||
ESP_EARLY_LOGE(TAG, "Failed to send files transfer stop cmd (%d)!", dump_result);
|
||||
}
|
||||
|
||||
gcov_exit:
|
||||
ESP_EARLY_LOGV(TAG, "dump_result %d", dump_result);
|
||||
if (running) {
|
||||
*running = false;
|
||||
}
|
||||
|
||||
ESP_EARLY_LOGV(TAG, "%s stack use out %d", __FUNCTION__, uxTaskGetStackHighWaterMark(NULL));
|
||||
|
||||
vTaskDelete(NULL);
|
||||
}
|
||||
|
||||
void gcov_create_task(void *arg)
|
||||
{
|
||||
ESP_EARLY_LOGV(TAG, "%s", __FUNCTION__);
|
||||
xTaskCreatePinnedToCore(&gcov_dump_task, "gcov_dump_task", CONFIG_APPTRACE_GCOV_DUMP_TASK_STACK_SIZE,
|
||||
(void *)&s_gcov_task_running, configMAX_PRIORITIES - 1, NULL, 0);
|
||||
}
|
||||
|
||||
void gcov_create_task_tick_hook(void)
|
||||
{
|
||||
extern esp_err_t esp_ipc_start_gcov_from_isr(uint32_t cpu_id, esp_ipc_func_t func, void* arg);
|
||||
if (s_create_gcov_task) {
|
||||
if (esp_ipc_start_gcov_from_isr(xPortGetCoreID(), &gcov_create_task, NULL) == ESP_OK) {
|
||||
s_create_gcov_task = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Triggers gcov info dump task
|
||||
* This function is to be called by OpenOCD, not by normal user code.
|
||||
* TODO: what about interrupted flash access (when cache disabled)
|
||||
*
|
||||
* @return ESP_OK on success, otherwise see esp_err_t
|
||||
*/
|
||||
static int esp_dbg_stub_gcov_entry(void)
|
||||
{
|
||||
/* we are in isr context here */
|
||||
s_create_gcov_task = true;
|
||||
return ESP_OK;
|
||||
}
|
||||
|
||||
void gcov_rtio_init(void)
|
||||
{
|
||||
uint32_t capabilities = 0;
|
||||
ESP_EARLY_LOGV(TAG, "%s", __FUNCTION__);
|
||||
esp_dbg_stub_entry_set(ESP_DBG_STUB_ENTRY_GCOV, (uint32_t)&esp_dbg_stub_gcov_entry);
|
||||
if (esp_dbg_stub_entry_get(ESP_DBG_STUB_ENTRY_CAPABILITIES, &capabilities) == ESP_OK) {
|
||||
esp_dbg_stub_entry_set(ESP_DBG_STUB_ENTRY_CAPABILITIES, capabilities | ESP_DBG_STUB_CAP_GCOV_TASK);
|
||||
}
|
||||
esp_register_freertos_tick_hook(gcov_create_task_tick_hook);
|
||||
}
|
||||
|
||||
void esp_gcov_dump(void)
|
||||
{
|
||||
ESP_EARLY_LOGV(TAG, "%s", __FUNCTION__);
|
||||
|
||||
while (!esp_apptrace_host_is_connected(ESP_APPTRACE_DEST_TRAX)) {
|
||||
vTaskDelay(pdMS_TO_TICKS(10));
|
||||
}
|
||||
|
||||
/* We are not in isr context here. Waiting for the completion is safe */
|
||||
s_gcov_task_running = true;
|
||||
s_create_gcov_task = true;
|
||||
while (s_gcov_task_running) {
|
||||
vTaskDelay(pdMS_TO_TICKS(10));
|
||||
}
|
||||
}
|
||||
|
||||
void *gcov_rtio_fopen(const char *path, const char *mode)
|
||||
{
|
||||
ESP_EARLY_LOGV(TAG, "%s '%s' '%s'", __FUNCTION__, path, mode);
|
||||
void *f = esp_apptrace_fopen(ESP_APPTRACE_DEST_TRAX, path, mode);
|
||||
ESP_EARLY_LOGV(TAG, "%s ret %p", __FUNCTION__, f);
|
||||
return f;
|
||||
}
|
||||
|
||||
int gcov_rtio_fclose(void *stream)
|
||||
{
|
||||
ESP_EARLY_LOGV(TAG, "%s", __FUNCTION__);
|
||||
return esp_apptrace_fclose(ESP_APPTRACE_DEST_TRAX, stream);
|
||||
}
|
||||
|
||||
size_t gcov_rtio_fread(void *ptr, size_t size, size_t nmemb, void *stream)
|
||||
{
|
||||
ESP_EARLY_LOGV(TAG, "%s read %u", __FUNCTION__, size * nmemb);
|
||||
size_t sz = esp_apptrace_fread(ESP_APPTRACE_DEST_TRAX, ptr, size, nmemb, stream);
|
||||
ESP_EARLY_LOGV(TAG, "%s actually read %u", __FUNCTION__, sz);
|
||||
return sz;
|
||||
}
|
||||
|
||||
size_t gcov_rtio_fwrite(const void *ptr, size_t size, size_t nmemb, void *stream)
|
||||
{
|
||||
ESP_EARLY_LOGV(TAG, "%s", __FUNCTION__);
|
||||
return esp_apptrace_fwrite(ESP_APPTRACE_DEST_TRAX, ptr, size, nmemb, stream);
|
||||
}
|
||||
|
||||
int gcov_rtio_fseek(void *stream, long offset, int whence)
|
||||
{
|
||||
int ret = esp_apptrace_fseek(ESP_APPTRACE_DEST_TRAX, stream, offset, whence);
|
||||
ESP_EARLY_LOGV(TAG, "%s(%p %ld %d) = %d", __FUNCTION__, stream, offset, whence, ret);
|
||||
return ret;
|
||||
}
|
||||
|
||||
long gcov_rtio_ftell(void *stream)
|
||||
{
|
||||
long ret = esp_apptrace_ftell(ESP_APPTRACE_DEST_TRAX, stream);
|
||||
ESP_EARLY_LOGV(TAG, "%s(%p) = %ld", __FUNCTION__, stream, ret);
|
||||
return ret;
|
||||
}
|
||||
|
||||
int gcov_rtio_feof(void *stream)
|
||||
{
|
||||
int ret = esp_apptrace_feof(ESP_APPTRACE_DEST_TRAX, stream);
|
||||
ESP_EARLY_LOGV(TAG, "%s(%p) = %d", __FUNCTION__, stream, ret);
|
||||
return ret;
|
||||
}
|
||||
|
||||
void gcov_rtio_setbuf(void *arg1 __attribute__ ((unused)), void *arg2 __attribute__ ((unused)))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
/* Wrappers for Gcov functions */
|
||||
|
||||
extern void __real___gcov_init(void *info);
|
||||
void __wrap___gcov_init(void *info)
|
||||
{
|
||||
__real___gcov_init(info);
|
||||
gcov_rtio_init();
|
||||
}
|
||||
|
||||
#endif
|
||||
8
components/app_trace/gcov/io_sym.map
Normal file
8
components/app_trace/gcov/io_sym.map
Normal file
@@ -0,0 +1,8 @@
|
||||
fopen gcov_rtio_fopen
|
||||
fclose gcov_rtio_fclose
|
||||
fwrite gcov_rtio_fwrite
|
||||
fread gcov_rtio_fread
|
||||
fseek gcov_rtio_fseek
|
||||
ftell gcov_rtio_ftell
|
||||
setbuf gcov_rtio_setbuf
|
||||
feof gcov_rtio_feof
|
||||
115
components/app_trace/heap_trace_tohost.c
Normal file
115
components/app_trace/heap_trace_tohost.c
Normal file
@@ -0,0 +1,115 @@
|
||||
/*
|
||||
* SPDX-FileCopyrightText: 2015-2022 Espressif Systems (Shanghai) CO LTD
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
#include <sdkconfig.h>
|
||||
|
||||
#define HEAP_TRACE_SRCFILE /* don't warn on inclusion here */
|
||||
#include "esp_heap_trace.h"
|
||||
#undef HEAP_TRACE_SRCFILE
|
||||
#include "esp_heap_caps.h"
|
||||
#if CONFIG_APPTRACE_SV_ENABLE
|
||||
#include "esp_app_trace.h"
|
||||
#include "esp_sysview_trace.h"
|
||||
#endif
|
||||
|
||||
#define STACK_DEPTH CONFIG_HEAP_TRACING_STACK_DEPTH
|
||||
|
||||
#ifdef CONFIG_HEAP_TRACING_TOHOST
|
||||
|
||||
#if !CONFIG_APPTRACE_SV_ENABLE
|
||||
#error None of the heap tracing backends is enabled! You must enable SystemView compatible tracing to use this feature.
|
||||
#endif
|
||||
|
||||
static bool s_tracing;
|
||||
|
||||
esp_err_t heap_trace_init_tohost(void)
|
||||
{
|
||||
if (s_tracing) {
|
||||
return ESP_ERR_INVALID_STATE;
|
||||
}
|
||||
return ESP_OK;
|
||||
}
|
||||
|
||||
esp_err_t heap_trace_start(heap_trace_mode_t mode_param)
|
||||
{
|
||||
#if CONFIG_APPTRACE_SV_ENABLE
|
||||
esp_err_t ret = esp_sysview_heap_trace_start((uint32_t)-1);
|
||||
if (ret != ESP_OK) {
|
||||
return ret;
|
||||
}
|
||||
#endif
|
||||
s_tracing = true;
|
||||
return ESP_OK;
|
||||
}
|
||||
|
||||
esp_err_t heap_trace_stop(void)
|
||||
{
|
||||
esp_err_t ret = ESP_ERR_NOT_SUPPORTED;
|
||||
#if CONFIG_APPTRACE_SV_ENABLE
|
||||
ret = esp_sysview_heap_trace_stop();
|
||||
#endif
|
||||
s_tracing = false;
|
||||
return ret;
|
||||
}
|
||||
|
||||
esp_err_t heap_trace_resume(void)
|
||||
{
|
||||
return heap_trace_start(HEAP_TRACE_ALL);
|
||||
}
|
||||
|
||||
size_t heap_trace_get_count(void)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
esp_err_t heap_trace_get(size_t index, heap_trace_record_t *record)
|
||||
{
|
||||
return ESP_ERR_NOT_SUPPORTED;
|
||||
}
|
||||
|
||||
esp_err_t heap_trace_summary(heap_trace_summary_t *summary)
|
||||
{
|
||||
return ESP_ERR_NOT_SUPPORTED;
|
||||
}
|
||||
|
||||
void heap_trace_dump(void)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
void heap_trace_dump_caps(__attribute__((unused)) const uint32_t caps)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
/* Add a new allocation to the heap trace records */
|
||||
static HEAP_IRAM_ATTR void record_allocation(const heap_trace_record_t *record)
|
||||
{
|
||||
if (!s_tracing) {
|
||||
return;
|
||||
}
|
||||
#if CONFIG_APPTRACE_SV_ENABLE
|
||||
esp_sysview_heap_trace_alloc(record->address, record->size, record->alloced_by);
|
||||
#endif
|
||||
}
|
||||
|
||||
/* record a free event in the heap trace log
|
||||
|
||||
For HEAP_TRACE_ALL, this means filling in the freed_by pointer.
|
||||
For HEAP_TRACE_LEAKS, this means removing the record from the log.
|
||||
*/
|
||||
static HEAP_IRAM_ATTR void record_free(void *p, void **callers)
|
||||
{
|
||||
if (!s_tracing) {
|
||||
return;
|
||||
}
|
||||
#if CONFIG_APPTRACE_SV_ENABLE
|
||||
esp_sysview_heap_trace_free(p, callers);
|
||||
#endif
|
||||
}
|
||||
|
||||
#include "heap_trace.inc"
|
||||
|
||||
#endif /*CONFIG_HEAP_TRACING_TOHOST*/
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* SPDX-FileCopyrightText: 2017-2025 Espressif Systems (Shanghai) CO LTD
|
||||
* SPDX-FileCopyrightText: 2017-2023 Espressif Systems (Shanghai) CO LTD
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
@@ -17,6 +17,8 @@
|
||||
#include <string.h>
|
||||
#include "esp_app_trace.h"
|
||||
|
||||
#if CONFIG_APPTRACE_ENABLE
|
||||
|
||||
#include "esp_log.h"
|
||||
const static char *TAG = "esp_host_file_io";
|
||||
|
||||
@@ -77,13 +79,13 @@ typedef struct {
|
||||
void *file;
|
||||
} esp_apptrace_ftell_args_t;
|
||||
|
||||
static esp_err_t esp_apptrace_file_cmd_send(uint8_t cmd, void (*prep_args)(uint8_t *, void *), void *args, uint32_t args_len)
|
||||
static esp_err_t esp_apptrace_file_cmd_send(esp_apptrace_dest_t dest, uint8_t cmd, void (*prep_args)(uint8_t *, void *), void *args, uint32_t args_len)
|
||||
{
|
||||
esp_err_t ret;
|
||||
esp_apptrace_fcmd_hdr_t *hdr;
|
||||
|
||||
ESP_EARLY_LOGV(TAG, "%s %d", __func__, cmd);
|
||||
uint8_t *ptr = esp_apptrace_buffer_get(sizeof(*hdr) + args_len, ESP_APPTRACE_TMO_INFINITE); //TODO: finite tmo
|
||||
uint8_t *ptr = esp_apptrace_buffer_get(dest, sizeof(*hdr) + args_len, ESP_APPTRACE_TMO_INFINITE); //TODO: finite tmo
|
||||
if (ptr == NULL) {
|
||||
return ESP_ERR_NO_MEM;
|
||||
}
|
||||
@@ -95,13 +97,13 @@ static esp_err_t esp_apptrace_file_cmd_send(uint8_t cmd, void (*prep_args)(uint8
|
||||
}
|
||||
|
||||
// now indicate that this buffer is ready to be sent off to host
|
||||
ret = esp_apptrace_buffer_put(ptr, ESP_APPTRACE_TMO_INFINITE);//TODO: finite tmo
|
||||
ret = esp_apptrace_buffer_put(dest, ptr, ESP_APPTRACE_TMO_INFINITE);//TODO: finite tmo
|
||||
if (ret != ESP_OK) {
|
||||
ESP_EARLY_LOGE(TAG, "Failed to put apptrace buffer (%d)!", ret);
|
||||
return ret;
|
||||
}
|
||||
|
||||
ret = esp_apptrace_flush(ESP_APPTRACE_TMO_INFINITE);//TODO: finite tmo
|
||||
ret = esp_apptrace_flush(dest, ESP_APPTRACE_TMO_INFINITE);//TODO: finite tmo
|
||||
if (ret != ESP_OK) {
|
||||
ESP_EARLY_LOGE(TAG, "Failed to flush apptrace buffer (%d)!", ret);
|
||||
return ret;
|
||||
@@ -110,17 +112,17 @@ static esp_err_t esp_apptrace_file_cmd_send(uint8_t cmd, void (*prep_args)(uint8
|
||||
return ESP_OK;
|
||||
}
|
||||
|
||||
static esp_err_t esp_apptrace_file_rsp_recv(uint8_t *buf, uint32_t buf_len)
|
||||
static esp_err_t esp_apptrace_file_rsp_recv(esp_apptrace_dest_t dest, uint8_t *buf, uint32_t buf_len)
|
||||
{
|
||||
uint32_t tot_rd = 0;
|
||||
while (tot_rd < buf_len) {
|
||||
uint32_t rd_size = buf_len - tot_rd;
|
||||
esp_err_t ret = esp_apptrace_read(buf + tot_rd, &rd_size, ESP_APPTRACE_TMO_INFINITE); //TODO: finite tmo
|
||||
esp_err_t ret = esp_apptrace_read(dest, buf + tot_rd, &rd_size, ESP_APPTRACE_TMO_INFINITE); //TODO: finite tmo
|
||||
if (ret != ESP_OK) {
|
||||
ESP_EARLY_LOGE(TAG, "Failed to read (%d)!", ret);
|
||||
return ret;
|
||||
}
|
||||
ESP_EARLY_LOGV(TAG, "%s read %" PRIu32 " bytes", __FUNCTION__, rd_size);
|
||||
ESP_EARLY_LOGV(TAG, "%s read %d bytes", __FUNCTION__, rd_size);
|
||||
tot_rd += rd_size;
|
||||
}
|
||||
|
||||
@@ -135,7 +137,7 @@ static void esp_apptrace_fopen_args_prepare(uint8_t *buf, void *priv)
|
||||
memcpy(buf + args->path_len, args->mode, args->mode_len);
|
||||
}
|
||||
|
||||
void *esp_apptrace_fopen(const char *path, const char *mode)
|
||||
void *esp_apptrace_fopen(esp_apptrace_dest_t dest, const char *path, const char *mode)
|
||||
{
|
||||
esp_apptrace_fopen_args_t cmd_args;
|
||||
|
||||
@@ -149,8 +151,8 @@ void *esp_apptrace_fopen(const char *path, const char *mode)
|
||||
cmd_args.mode = mode;
|
||||
cmd_args.mode_len = strlen(mode) + 1;
|
||||
|
||||
esp_err_t ret = esp_apptrace_file_cmd_send(ESP_APPTRACE_FILE_CMD_FOPEN, esp_apptrace_fopen_args_prepare,
|
||||
&cmd_args, cmd_args.path_len + cmd_args.mode_len);
|
||||
esp_err_t ret = esp_apptrace_file_cmd_send(dest, ESP_APPTRACE_FILE_CMD_FOPEN, esp_apptrace_fopen_args_prepare,
|
||||
&cmd_args, cmd_args.path_len+cmd_args.mode_len);
|
||||
if (ret != ESP_OK) {
|
||||
ESP_EARLY_LOGE(TAG, "Failed to send file cmd (%d)!", ret);
|
||||
return NULL;
|
||||
@@ -158,7 +160,7 @@ void *esp_apptrace_fopen(const char *path, const char *mode)
|
||||
|
||||
// now read the answer
|
||||
void *resp;
|
||||
ret = esp_apptrace_file_rsp_recv((uint8_t *)&resp, sizeof(resp));
|
||||
ret = esp_apptrace_file_rsp_recv(dest, (uint8_t *)&resp, sizeof(resp));
|
||||
if (ret != ESP_OK) {
|
||||
ESP_EARLY_LOGE(TAG, "Failed to read response (%d)!", ret);
|
||||
return NULL;
|
||||
@@ -174,13 +176,13 @@ static void esp_apptrace_fclose_args_prepare(uint8_t *buf, void *priv)
|
||||
memcpy(buf, &args->file, sizeof(args->file));
|
||||
}
|
||||
|
||||
int esp_apptrace_fclose(void *stream)
|
||||
int esp_apptrace_fclose(esp_apptrace_dest_t dest, void *stream)
|
||||
{
|
||||
esp_apptrace_fclose_args_t cmd_args;
|
||||
|
||||
cmd_args.file = stream;
|
||||
esp_err_t ret = esp_apptrace_file_cmd_send(ESP_APPTRACE_FILE_CMD_FCLOSE, esp_apptrace_fclose_args_prepare,
|
||||
&cmd_args, sizeof(cmd_args));
|
||||
esp_err_t ret = esp_apptrace_file_cmd_send(dest, ESP_APPTRACE_FILE_CMD_FCLOSE, esp_apptrace_fclose_args_prepare,
|
||||
&cmd_args, sizeof(cmd_args));
|
||||
if (ret != ESP_OK) {
|
||||
ESP_EARLY_LOGE(TAG, "Failed to send file cmd (%d)!", ret);
|
||||
return EOF;
|
||||
@@ -188,7 +190,7 @@ int esp_apptrace_fclose(void *stream)
|
||||
|
||||
// now read the answer
|
||||
int resp;
|
||||
ret = esp_apptrace_file_rsp_recv((uint8_t *)&resp, sizeof(resp));
|
||||
ret = esp_apptrace_file_rsp_recv(dest, (uint8_t *)&resp, sizeof(resp));
|
||||
if (ret != ESP_OK) {
|
||||
ESP_EARLY_LOGE(TAG, "Failed to read response (%d)!", ret);
|
||||
return EOF;
|
||||
@@ -205,11 +207,11 @@ static void esp_apptrace_fwrite_args_prepare(uint8_t *buf, void *priv)
|
||||
memcpy(buf + sizeof(args->file), args->buf, args->size);
|
||||
}
|
||||
|
||||
size_t esp_apptrace_fwrite(const void *ptr, size_t size, size_t nmemb, void *stream)
|
||||
size_t esp_apptrace_fwrite(esp_apptrace_dest_t dest, const void *ptr, size_t size, size_t nmemb, void *stream)
|
||||
{
|
||||
esp_apptrace_fwrite_args_t cmd_args;
|
||||
|
||||
ESP_EARLY_LOGV(TAG, "esp_apptrace_fwrite f %p l %d", stream, size * nmemb);
|
||||
ESP_EARLY_LOGV(TAG, "esp_apptrace_fwrite f %p l %d", stream, size*nmemb);
|
||||
|
||||
if (ptr == NULL) {
|
||||
return 0;
|
||||
@@ -218,8 +220,8 @@ size_t esp_apptrace_fwrite(const void *ptr, size_t size, size_t nmemb, void *str
|
||||
cmd_args.buf = (void *)ptr;
|
||||
cmd_args.size = size * nmemb;
|
||||
cmd_args.file = stream;
|
||||
esp_err_t ret = esp_apptrace_file_cmd_send(ESP_APPTRACE_FILE_CMD_FWRITE, esp_apptrace_fwrite_args_prepare,
|
||||
&cmd_args, sizeof(cmd_args.file) + cmd_args.size);
|
||||
esp_err_t ret = esp_apptrace_file_cmd_send(dest, ESP_APPTRACE_FILE_CMD_FWRITE, esp_apptrace_fwrite_args_prepare,
|
||||
&cmd_args, sizeof(cmd_args.file)+cmd_args.size);
|
||||
if (ret != ESP_OK) {
|
||||
ESP_EARLY_LOGE(TAG, "Failed to send file cmd (%d)!", ret);
|
||||
return 0;
|
||||
@@ -227,7 +229,7 @@ size_t esp_apptrace_fwrite(const void *ptr, size_t size, size_t nmemb, void *str
|
||||
|
||||
// now read the answer
|
||||
size_t resp;
|
||||
ret = esp_apptrace_file_rsp_recv((uint8_t *)&resp, sizeof(resp));
|
||||
ret = esp_apptrace_file_rsp_recv(dest, (uint8_t *)&resp, sizeof(resp));
|
||||
if (ret != ESP_OK) {
|
||||
ESP_EARLY_LOGE(TAG, "Failed to read response (%d)!", ret);
|
||||
return 0;
|
||||
@@ -247,11 +249,11 @@ static void esp_apptrace_fread_args_prepare(uint8_t *buf, void *priv)
|
||||
memcpy(buf + sizeof(args->file), &args->size, sizeof(args->size));
|
||||
}
|
||||
|
||||
size_t esp_apptrace_fread(void *ptr, size_t size, size_t nmemb, void *stream)
|
||||
size_t esp_apptrace_fread(esp_apptrace_dest_t dest, void *ptr, size_t size, size_t nmemb, void *stream)
|
||||
{
|
||||
esp_apptrace_fread_args_t cmd_args;
|
||||
|
||||
ESP_EARLY_LOGV(TAG, "esp_apptrace_fread f %p l %d", stream, size * nmemb);
|
||||
ESP_EARLY_LOGV(TAG, "esp_apptrace_fread f %p l %d", stream, size*nmemb);
|
||||
|
||||
if (ptr == NULL) {
|
||||
return 0;
|
||||
@@ -259,8 +261,8 @@ size_t esp_apptrace_fread(void *ptr, size_t size, size_t nmemb, void *stream)
|
||||
|
||||
cmd_args.size = size * nmemb;
|
||||
cmd_args.file = stream;
|
||||
esp_err_t ret = esp_apptrace_file_cmd_send(ESP_APPTRACE_FILE_CMD_FREAD, esp_apptrace_fread_args_prepare,
|
||||
&cmd_args, sizeof(cmd_args));
|
||||
esp_err_t ret = esp_apptrace_file_cmd_send(dest, ESP_APPTRACE_FILE_CMD_FREAD, esp_apptrace_fread_args_prepare,
|
||||
&cmd_args, sizeof(cmd_args));
|
||||
if (ret != ESP_OK) {
|
||||
ESP_EARLY_LOGE(TAG, "Failed to send file cmd (%d)!", ret);
|
||||
return 0;
|
||||
@@ -268,7 +270,7 @@ size_t esp_apptrace_fread(void *ptr, size_t size, size_t nmemb, void *stream)
|
||||
|
||||
// now read the answer
|
||||
size_t resp;
|
||||
ret = esp_apptrace_file_rsp_recv((uint8_t *)&resp, sizeof(resp));
|
||||
ret = esp_apptrace_file_rsp_recv(dest, (uint8_t *)&resp, sizeof(resp));
|
||||
if (ret != ESP_OK) {
|
||||
ESP_EARLY_LOGE(TAG, "Failed to read response (%d)!", ret);
|
||||
return 0;
|
||||
@@ -277,7 +279,7 @@ size_t esp_apptrace_fread(void *ptr, size_t size, size_t nmemb, void *stream)
|
||||
return 0;
|
||||
}
|
||||
|
||||
ret = esp_apptrace_file_rsp_recv(ptr, resp);
|
||||
ret = esp_apptrace_file_rsp_recv(dest, ptr, resp);
|
||||
if (ret != ESP_OK) {
|
||||
ESP_EARLY_LOGE(TAG, "Failed to read file data (%d)!", ret);
|
||||
return 0;
|
||||
@@ -286,7 +288,7 @@ size_t esp_apptrace_fread(void *ptr, size_t size, size_t nmemb, void *stream)
|
||||
* fread(buf, 1 ,size, file);
|
||||
* So, total read bytes count returns
|
||||
*/
|
||||
return resp / size; // return the number of items read
|
||||
return resp/size; // return the number of items read
|
||||
}
|
||||
|
||||
static void esp_apptrace_fseek_args_prepare(uint8_t *buf, void *priv)
|
||||
@@ -298,7 +300,7 @@ static void esp_apptrace_fseek_args_prepare(uint8_t *buf, void *priv)
|
||||
memcpy(buf + sizeof(args->file) + sizeof(args->offset), &args->whence, sizeof(args->whence));
|
||||
}
|
||||
|
||||
int esp_apptrace_fseek(void *stream, long offset, int whence)
|
||||
int esp_apptrace_fseek(esp_apptrace_dest_t dest, void *stream, long offset, int whence)
|
||||
{
|
||||
esp_apptrace_fseek_args_t cmd_args;
|
||||
|
||||
@@ -307,8 +309,8 @@ int esp_apptrace_fseek(void *stream, long offset, int whence)
|
||||
cmd_args.file = stream;
|
||||
cmd_args.offset = offset;
|
||||
cmd_args.whence = whence;
|
||||
esp_err_t ret = esp_apptrace_file_cmd_send(ESP_APPTRACE_FILE_CMD_FSEEK, esp_apptrace_fseek_args_prepare,
|
||||
&cmd_args, sizeof(cmd_args));
|
||||
esp_err_t ret = esp_apptrace_file_cmd_send(dest, ESP_APPTRACE_FILE_CMD_FSEEK, esp_apptrace_fseek_args_prepare,
|
||||
&cmd_args, sizeof(cmd_args));
|
||||
if (ret != ESP_OK) {
|
||||
ESP_EARLY_LOGE(TAG, "Failed to send file cmd (%d)!", ret);
|
||||
return -1;
|
||||
@@ -316,7 +318,7 @@ int esp_apptrace_fseek(void *stream, long offset, int whence)
|
||||
|
||||
// now read the answer
|
||||
int resp;
|
||||
ret = esp_apptrace_file_rsp_recv((uint8_t *)&resp, sizeof(resp));
|
||||
ret = esp_apptrace_file_rsp_recv(dest, (uint8_t *)&resp, sizeof(resp));
|
||||
if (ret != ESP_OK) {
|
||||
ESP_EARLY_LOGE(TAG, "Failed to read response (%d)!", ret);
|
||||
return -1;
|
||||
@@ -332,13 +334,13 @@ static void esp_apptrace_ftell_args_prepare(uint8_t *buf, void *priv)
|
||||
memcpy(buf, &args->file, sizeof(args->file));
|
||||
}
|
||||
|
||||
int esp_apptrace_ftell(void *stream)
|
||||
int esp_apptrace_ftell(esp_apptrace_dest_t dest, void *stream)
|
||||
{
|
||||
esp_apptrace_ftell_args_t cmd_args;
|
||||
|
||||
cmd_args.file = stream;
|
||||
esp_err_t ret = esp_apptrace_file_cmd_send(ESP_APPTRACE_FILE_CMD_FTELL, esp_apptrace_ftell_args_prepare,
|
||||
&cmd_args, sizeof(cmd_args));
|
||||
esp_err_t ret = esp_apptrace_file_cmd_send(dest, ESP_APPTRACE_FILE_CMD_FTELL, esp_apptrace_ftell_args_prepare,
|
||||
&cmd_args, sizeof(cmd_args));
|
||||
if (ret != ESP_OK) {
|
||||
ESP_EARLY_LOGE(TAG, "Failed to send file cmd (%d)!", ret);
|
||||
return -1;
|
||||
@@ -346,7 +348,7 @@ int esp_apptrace_ftell(void *stream)
|
||||
|
||||
// now read the answer
|
||||
int resp;
|
||||
ret = esp_apptrace_file_rsp_recv((uint8_t *)&resp, sizeof(resp));
|
||||
ret = esp_apptrace_file_rsp_recv(dest, (uint8_t *)&resp, sizeof(resp));
|
||||
if (ret != ESP_OK) {
|
||||
ESP_EARLY_LOGE(TAG, "Failed to read response (%d)!", ret);
|
||||
return -1;
|
||||
@@ -355,10 +357,10 @@ int esp_apptrace_ftell(void *stream)
|
||||
return resp;
|
||||
}
|
||||
|
||||
int esp_apptrace_fstop(void)
|
||||
int esp_apptrace_fstop(esp_apptrace_dest_t dest)
|
||||
{
|
||||
ESP_EARLY_LOGV(TAG, "%s", __func__);
|
||||
esp_err_t ret = esp_apptrace_file_cmd_send(ESP_APPTRACE_FILE_CMD_STOP, NULL, NULL, 0);
|
||||
esp_err_t ret = esp_apptrace_file_cmd_send(dest, ESP_APPTRACE_FILE_CMD_STOP, NULL, NULL, 0);
|
||||
if (ret != ESP_OK) {
|
||||
ESP_EARLY_LOGE(TAG, "Failed to send files transfer stop cmd (%d)!", ret);
|
||||
}
|
||||
@@ -372,13 +374,13 @@ static void esp_apptrace_feof_args_prepare(uint8_t *buf, void *priv)
|
||||
memcpy(buf, &args->file, sizeof(args->file));
|
||||
}
|
||||
|
||||
int esp_apptrace_feof(void *stream)
|
||||
int esp_apptrace_feof(esp_apptrace_dest_t dest, void *stream)
|
||||
{
|
||||
esp_apptrace_feof_args_t cmd_args;
|
||||
|
||||
cmd_args.file = stream;
|
||||
esp_err_t ret = esp_apptrace_file_cmd_send(ESP_APPTRACE_FILE_CMD_FEOF, esp_apptrace_feof_args_prepare,
|
||||
&cmd_args, sizeof(cmd_args));
|
||||
esp_err_t ret = esp_apptrace_file_cmd_send(dest, ESP_APPTRACE_FILE_CMD_FEOF, esp_apptrace_feof_args_prepare,
|
||||
&cmd_args, sizeof(cmd_args));
|
||||
if (ret != ESP_OK) {
|
||||
ESP_EARLY_LOGE(TAG, "Failed to send file cmd (%d)!", ret);
|
||||
return EOF;
|
||||
@@ -386,7 +388,7 @@ int esp_apptrace_feof(void *stream)
|
||||
|
||||
// now read the answer
|
||||
int resp;
|
||||
ret = esp_apptrace_file_rsp_recv((uint8_t *)&resp, sizeof(resp));
|
||||
ret = esp_apptrace_file_rsp_recv(dest, (uint8_t *)&resp, sizeof(resp));
|
||||
if (ret != ESP_OK) {
|
||||
ESP_EARLY_LOGE(TAG, "Failed to read response (%d)!", ret);
|
||||
return EOF;
|
||||
@@ -394,3 +396,5 @@ int esp_apptrace_feof(void *stream)
|
||||
|
||||
return resp;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* SPDX-FileCopyrightText: 2017-2025 Espressif Systems (Shanghai) CO LTD
|
||||
* SPDX-FileCopyrightText: 2017-2023 Espressif Systems (Shanghai) CO LTD
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
@@ -8,7 +8,6 @@
|
||||
|
||||
#include <stdarg.h>
|
||||
#include "esp_err.h"
|
||||
#include "esp_app_trace_config.h"
|
||||
#include "esp_app_trace_util.h" // ESP_APPTRACE_TMO_INFINITE
|
||||
|
||||
#ifdef __cplusplus
|
||||
@@ -16,70 +15,82 @@ extern "C" {
|
||||
#endif
|
||||
|
||||
/**
|
||||
* @brief Initializes application tracing module for the selected destination and configuration.
|
||||
* Application trace data destinations bits.
|
||||
*/
|
||||
typedef enum {
|
||||
ESP_APPTRACE_DEST_JTAG = 1, ///< JTAG destination
|
||||
ESP_APPTRACE_DEST_TRAX = ESP_APPTRACE_DEST_JTAG, ///< xxx_TRAX name is obsolete, use more common xxx_JTAG
|
||||
ESP_APPTRACE_DEST_UART, ///< UART destination
|
||||
ESP_APPTRACE_DEST_MAX = ESP_APPTRACE_DEST_UART+1,
|
||||
ESP_APPTRACE_DEST_NUM
|
||||
} esp_apptrace_dest_t;
|
||||
|
||||
/**
|
||||
* @brief Initializes application tracing module.
|
||||
*
|
||||
* @note Should be called before any esp_apptrace_xxx call.
|
||||
*
|
||||
* @return ESP_OK on success, otherwise see esp_err_t
|
||||
*/
|
||||
esp_err_t esp_apptrace_init(const esp_apptrace_config_t *config);
|
||||
esp_err_t esp_apptrace_init(void);
|
||||
|
||||
/**
|
||||
* @brief Configures down buffer.
|
||||
*
|
||||
* @note Needs to be called before attempting to receive any data using esp_apptrace_down_buffer_get and
|
||||
* esp_apptrace_read. This function does not protect internal data by lock.
|
||||
* @note Needs to be called before attempting to receive any data using esp_apptrace_down_buffer_get and esp_apptrace_read.
|
||||
* This function does not protect internal data by lock.
|
||||
*
|
||||
* @param buf Address of buffer to use for down channel (host to target) data.
|
||||
* @param size Size of the buffer.
|
||||
*
|
||||
* @return ESP_OK on success, otherwise see esp_err_t
|
||||
*/
|
||||
esp_err_t esp_apptrace_down_buffer_config(uint8_t *buf, uint32_t size);
|
||||
void esp_apptrace_down_buffer_config(uint8_t *buf, uint32_t size);
|
||||
|
||||
/**
|
||||
* @brief Allocates buffer for trace data.
|
||||
* Once the data in the buffer is ready to be sent, esp_apptrace_buffer_put must be called to indicate it.
|
||||
*
|
||||
* @param dest Indicates HW interface to send data.
|
||||
* @param size Size of data to write to trace buffer.
|
||||
* @param tmo Timeout for operation (in us). Use ESP_APPTRACE_TMO_INFINITE to wait indefinitely.
|
||||
*
|
||||
* @return non-NULL on success, otherwise NULL.
|
||||
*/
|
||||
uint8_t *esp_apptrace_buffer_get(uint32_t size, uint32_t tmo);
|
||||
uint8_t *esp_apptrace_buffer_get(esp_apptrace_dest_t dest, uint32_t size, uint32_t tmo);
|
||||
|
||||
/**
|
||||
* @brief Indicates that the data in the buffer is ready to be sent.
|
||||
* This function is a counterpart of and must be preceded by esp_apptrace_buffer_get.
|
||||
*
|
||||
* @param dest Indicates HW interface to send data. Should be identical to the same parameter in call to esp_apptrace_buffer_get.
|
||||
* @param ptr Address of trace buffer to release. Should be the value returned by call to esp_apptrace_buffer_get.
|
||||
* @param tmo Timeout for operation (in us). Use ESP_APPTRACE_TMO_INFINITE to wait indefinitely.
|
||||
*
|
||||
* @return ESP_OK on success, otherwise see esp_err_t
|
||||
*/
|
||||
esp_err_t esp_apptrace_buffer_put(uint8_t *ptr, uint32_t tmo);
|
||||
esp_err_t esp_apptrace_buffer_put(esp_apptrace_dest_t dest, uint8_t *ptr, uint32_t tmo);
|
||||
|
||||
/**
|
||||
* @brief Writes data to trace buffer.
|
||||
*
|
||||
* @param dest Indicates HW interface to send data.
|
||||
* @param data Address of data to write to trace buffer.
|
||||
* @param size Size of data to write to trace buffer.
|
||||
* @param tmo Timeout for operation (in us). Use ESP_APPTRACE_TMO_INFINITE to wait indefinitely.
|
||||
*
|
||||
* @return ESP_OK on success, otherwise see esp_err_t
|
||||
*/
|
||||
esp_err_t esp_apptrace_write(const void *data, uint32_t size, uint32_t tmo);
|
||||
esp_err_t esp_apptrace_write(esp_apptrace_dest_t dest, const void *data, uint32_t size, uint32_t tmo);
|
||||
|
||||
/**
|
||||
* @brief vprintf-like function to send log messages to host via specified HW interface.
|
||||
*
|
||||
* @param dest Indicates HW interface to send data.
|
||||
* @param tmo Timeout for operation (in us). Use ESP_APPTRACE_TMO_INFINITE to wait indefinitely.
|
||||
* @param fmt Address of format string.
|
||||
* @param ap List of arguments.
|
||||
*
|
||||
* @return Number of bytes written.
|
||||
*/
|
||||
int esp_apptrace_vprintf_to(uint32_t tmo, const char *fmt, va_list ap);
|
||||
int esp_apptrace_vprintf_to(esp_apptrace_dest_t dest, uint32_t tmo, const char *fmt, va_list ap);
|
||||
|
||||
/**
|
||||
* @brief vprintf-like function to send log messages to host.
|
||||
@@ -94,198 +105,172 @@ int esp_apptrace_vprintf(const char *fmt, va_list ap);
|
||||
/**
|
||||
* @brief Flushes remaining data in trace buffer to host.
|
||||
*
|
||||
* @param dest Indicates HW interface to flush data on.
|
||||
* @param tmo Timeout for operation (in us). Use ESP_APPTRACE_TMO_INFINITE to wait indefinitely.
|
||||
*
|
||||
* @return ESP_OK on success, otherwise see esp_err_t
|
||||
*/
|
||||
esp_err_t esp_apptrace_flush(uint32_t tmo);
|
||||
esp_err_t esp_apptrace_flush(esp_apptrace_dest_t dest, uint32_t tmo);
|
||||
|
||||
/**
|
||||
* @brief Flushes remaining data in trace buffer to host without locking internal data.
|
||||
* This is a special version of esp_apptrace_flush which should be called from panic handler.
|
||||
*
|
||||
* @param min_sz Threshold for flushing data. If current filling level is above this value, data will be flushed. JTAG destinations only.
|
||||
* @param dest Indicates HW interface to flush data on.
|
||||
* @param min_sz Threshold for flushing data. If current filling level is above this value, data will be flushed. TRAX destinations only.
|
||||
* @param tmo Timeout for operation (in us). Use ESP_APPTRACE_TMO_INFINITE to wait indefinitely.
|
||||
*
|
||||
* @return ESP_OK on success, otherwise see esp_err_t
|
||||
*/
|
||||
esp_err_t esp_apptrace_flush_nolock(uint32_t min_sz, uint32_t tmo);
|
||||
esp_err_t esp_apptrace_flush_nolock(esp_apptrace_dest_t dest, uint32_t min_sz, uint32_t tmo);
|
||||
|
||||
/**
|
||||
* @brief Reads host data from trace buffer.
|
||||
*
|
||||
* @param dest Indicates HW interface to read the data on.
|
||||
* @param data Address of buffer to put data from trace buffer.
|
||||
* @param size Pointer to store size of read data. Before call to this function pointed memory must hold requested size of data
|
||||
* @param tmo Timeout for operation (in us). Use ESP_APPTRACE_TMO_INFINITE to wait indefinitely.
|
||||
*
|
||||
* @return ESP_OK on success, otherwise see esp_err_t
|
||||
*/
|
||||
esp_err_t esp_apptrace_read(void *data, uint32_t *size, uint32_t tmo);
|
||||
esp_err_t esp_apptrace_read(esp_apptrace_dest_t dest, void *data, uint32_t *size, uint32_t tmo);
|
||||
|
||||
/**
|
||||
* @brief Retrieves incoming data buffer if any.
|
||||
* Once data in the buffer is processed, esp_apptrace_down_buffer_put must be called to indicate it.
|
||||
*
|
||||
* @param dest Indicates HW interface to receive data.
|
||||
* @param size Address to store size of available data in down buffer. Must be initialized with requested value.
|
||||
* @param tmo Timeout for operation (in us). Use ESP_APPTRACE_TMO_INFINITE to wait indefinitely.
|
||||
*
|
||||
* @return non-NULL on success, otherwise NULL.
|
||||
*/
|
||||
uint8_t *esp_apptrace_down_buffer_get(uint32_t *size, uint32_t tmo);
|
||||
uint8_t *esp_apptrace_down_buffer_get(esp_apptrace_dest_t dest, uint32_t *size, uint32_t tmo);
|
||||
|
||||
/**
|
||||
* @brief Indicates that the data in the down buffer is processed.
|
||||
* This function is a counterpart of and must be preceded by esp_apptrace_down_buffer_get.
|
||||
*
|
||||
* @param dest Indicates HW interface to receive data. Should be identical to the same parameter in call to esp_apptrace_down_buffer_get.
|
||||
* @param ptr Address of trace buffer to release. Should be the value returned by call to esp_apptrace_down_buffer_get.
|
||||
* @param tmo Timeout for operation (in us). Use ESP_APPTRACE_TMO_INFINITE to wait indefinitely.
|
||||
*
|
||||
* @return ESP_OK on success, otherwise see esp_err_t
|
||||
*/
|
||||
esp_err_t esp_apptrace_down_buffer_put(uint8_t *ptr, uint32_t tmo);
|
||||
esp_err_t esp_apptrace_down_buffer_put(esp_apptrace_dest_t dest, uint8_t *ptr, uint32_t tmo);
|
||||
|
||||
/**
|
||||
* @brief Checks whether host is connected.
|
||||
*
|
||||
* @param dest Indicates HW interface to use.
|
||||
*
|
||||
* @return true if host is connected, otherwise false
|
||||
*/
|
||||
bool esp_apptrace_host_is_connected(void);
|
||||
|
||||
/**
|
||||
* @brief Gets the destination of the application trace.
|
||||
*
|
||||
* @return The destination of the application trace.
|
||||
*/
|
||||
esp_apptrace_dest_t esp_apptrace_get_destination(void);
|
||||
|
||||
/**
|
||||
* @brief Sets the header size of the application trace packet.
|
||||
*
|
||||
* @param header_size The header size to set.
|
||||
*
|
||||
* @return ESP_OK on success, otherwise see esp_err_t
|
||||
*/
|
||||
esp_err_t esp_apptrace_set_header_size(esp_apptrace_header_size_t header_size);
|
||||
bool esp_apptrace_host_is_connected(esp_apptrace_dest_t dest);
|
||||
|
||||
/**
|
||||
* @brief Opens file on host.
|
||||
* This function has the same semantic as 'fopen' except for the first argument.
|
||||
* This function has the same semantic as 'fopen' except for the first argument.
|
||||
*
|
||||
* @param dest Indicates HW interface to use.
|
||||
* @param path Path to file.
|
||||
* @param mode Mode string. See fopen for details.
|
||||
*
|
||||
* @return non zero file handle on success, otherwise 0
|
||||
*/
|
||||
void *esp_apptrace_fopen(const char *path, const char *mode);
|
||||
void *esp_apptrace_fopen(esp_apptrace_dest_t dest, const char *path, const char *mode);
|
||||
|
||||
/**
|
||||
* @brief Closes file on host.
|
||||
* This function has the same semantic as 'fclose' except for the first argument.
|
||||
* This function has the same semantic as 'fclose' except for the first argument.
|
||||
*
|
||||
* @param dest Indicates HW interface to use.
|
||||
* @param stream File handle returned by esp_apptrace_fopen.
|
||||
*
|
||||
* @return Zero on success, otherwise non-zero. See fclose for details.
|
||||
*/
|
||||
int esp_apptrace_fclose(void *stream);
|
||||
int esp_apptrace_fclose(esp_apptrace_dest_t dest, void *stream);
|
||||
|
||||
/**
|
||||
* @brief Writes to file on host.
|
||||
* This function has the same semantic as 'fwrite' except for the first argument.
|
||||
* This function has the same semantic as 'fwrite' except for the first argument.
|
||||
*
|
||||
* @param ptr Address of data to write.
|
||||
* @param size Size of an item.
|
||||
* @param dest Indicates HW interface to use.
|
||||
* @param ptr Address of data to write.
|
||||
* @param size Size of an item.
|
||||
* @param nmemb Number of items to write.
|
||||
* @param stream File handle returned by esp_apptrace_fopen.
|
||||
*
|
||||
* @return Number of written items. See fwrite for details.
|
||||
*/
|
||||
size_t esp_apptrace_fwrite(const void *ptr, size_t size, size_t nmemb, void *stream);
|
||||
size_t esp_apptrace_fwrite(esp_apptrace_dest_t dest, const void *ptr, size_t size, size_t nmemb, void *stream);
|
||||
|
||||
/**
|
||||
* @brief Read file on host.
|
||||
* This function has the same semantic as 'fread' except for the first argument.
|
||||
* This function has the same semantic as 'fread' except for the first argument.
|
||||
*
|
||||
* @param ptr Address to store read data.
|
||||
* @param size Size of an item.
|
||||
* @param dest Indicates HW interface to use.
|
||||
* @param ptr Address to store read data.
|
||||
* @param size Size of an item.
|
||||
* @param nmemb Number of items to read.
|
||||
* @param stream File handle returned by esp_apptrace_fopen.
|
||||
*
|
||||
* @return Number of read items. See fread for details.
|
||||
*/
|
||||
size_t esp_apptrace_fread(void *ptr, size_t size, size_t nmemb, void *stream);
|
||||
size_t esp_apptrace_fread(esp_apptrace_dest_t dest, void *ptr, size_t size, size_t nmemb, void *stream);
|
||||
|
||||
/**
|
||||
* @brief Set position indicator in file on host.
|
||||
* This function has the same semantic as 'fseek' except for the first argument.
|
||||
* This function has the same semantic as 'fseek' except for the first argument.
|
||||
*
|
||||
* @param dest Indicates HW interface to use.
|
||||
* @param stream File handle returned by esp_apptrace_fopen.
|
||||
* @param offset Offset. See fseek for details.
|
||||
* @param whence Position in file. See fseek for details.
|
||||
*
|
||||
* @return Zero on success, otherwise non-zero. See fseek for details.
|
||||
*/
|
||||
int esp_apptrace_fseek(void *stream, long offset, int whence);
|
||||
int esp_apptrace_fseek(esp_apptrace_dest_t dest, void *stream, long offset, int whence);
|
||||
|
||||
/**
|
||||
* @brief Get current position indicator for file on host.
|
||||
* This function has the same semantic as 'ftell' except for the first argument.
|
||||
* This function has the same semantic as 'ftell' except for the first argument.
|
||||
*
|
||||
* @param dest Indicates HW interface to use.
|
||||
* @param stream File handle returned by esp_apptrace_fopen.
|
||||
*
|
||||
* @return Current position in file. See ftell for details.
|
||||
*/
|
||||
int esp_apptrace_ftell(void *stream);
|
||||
int esp_apptrace_ftell(esp_apptrace_dest_t dest, void *stream);
|
||||
|
||||
/**
|
||||
* @brief Indicates to the host that all file operations are complete.
|
||||
* This function should be called after all file operations are finished and
|
||||
* indicate to the host that it can perform cleanup operations (close open files etc.).
|
||||
* This function should be called after all file operations are finished and
|
||||
* indicate to the host that it can perform cleanup operations (close open files etc.).
|
||||
*
|
||||
* @param dest Indicates HW interface to use.
|
||||
*
|
||||
* @return ESP_OK on success, otherwise see esp_err_t
|
||||
*/
|
||||
int esp_apptrace_fstop(void);
|
||||
int esp_apptrace_fstop(esp_apptrace_dest_t dest);
|
||||
|
||||
/**
|
||||
* @brief Test end-of-file indicator on a stream.
|
||||
* This function has the same semantic as 'feof' except for the first argument.
|
||||
* This function has the same semantic as 'feof' except for the first argument.
|
||||
*
|
||||
* @param dest Indicates HW interface to use.
|
||||
* @param stream File handle returned by esp_apptrace_fopen.
|
||||
*
|
||||
* @return Non-Zero if end-of-file indicator is set for stream. See feof for details.
|
||||
*/
|
||||
int esp_apptrace_feof(void *stream);
|
||||
int esp_apptrace_feof(esp_apptrace_dest_t dest, void *stream);
|
||||
|
||||
#if !CONFIG_APPTRACE_DEST_UART // JTAG or NONE
|
||||
#define APPTRACE_JTAG_CONFIG_DEFAULT() { \
|
||||
.dest = ESP_APPTRACE_DEST_JTAG, \
|
||||
.dest_cfg.jtag = {0}, \
|
||||
.flush_tmo = CONFIG_APPTRACE_ONPANIC_HOST_FLUSH_TMO, \
|
||||
.flush_thresh = CONFIG_APPTRACE_POSTMORTEM_FLUSH_THRESH, \
|
||||
}
|
||||
#endif
|
||||
|
||||
#if !CONFIG_APPTRACE_DEST_JTAG // UART or NONE
|
||||
#define APPTRACE_UART_CONFIG_DEFAULT() { \
|
||||
.dest = ESP_APPTRACE_DEST_UART, \
|
||||
.dest_cfg.uart = { \
|
||||
.uart_num = CONFIG_APPTRACE_DEST_UART_NUM, \
|
||||
.tx_pin_num = CONFIG_APPTRACE_UART_TX_GPIO, \
|
||||
.rx_pin_num = CONFIG_APPTRACE_UART_RX_GPIO, \
|
||||
.baud_rate = CONFIG_APPTRACE_UART_BAUDRATE, \
|
||||
.tx_buff_size = CONFIG_APPTRACE_UART_TX_BUFF_SIZE, \
|
||||
.tx_msg_size = CONFIG_APPTRACE_UART_TX_MSG_SIZE, \
|
||||
}, \
|
||||
.flush_tmo = CONFIG_APPTRACE_ONPANIC_HOST_FLUSH_TMO, \
|
||||
.flush_thresh = CONFIG_APPTRACE_POSTMORTEM_FLUSH_THRESH, \
|
||||
}
|
||||
#endif
|
||||
|
||||
// Default picks JTAG if available, otherwise UART
|
||||
#if !CONFIG_APPTRACE_DEST_UART
|
||||
#define APPTRACE_CONFIG_DEFAULT() APPTRACE_JTAG_CONFIG_DEFAULT()
|
||||
#else
|
||||
#define APPTRACE_CONFIG_DEFAULT() APPTRACE_UART_CONFIG_DEFAULT()
|
||||
#endif
|
||||
/**
|
||||
* @brief Triggers gcov info dump.
|
||||
* This function waits for the host to connect to target before dumping data.
|
||||
*/
|
||||
void esp_gcov_dump(void);
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
/*
|
||||
* SPDX-FileCopyrightText: 2025 Espressif Systems (Shanghai) CO LTD
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
#ifndef ESP_APP_TRACE_CONFIG_H_
|
||||
#define ESP_APP_TRACE_CONFIG_H_
|
||||
|
||||
#include "sdkconfig.h"
|
||||
|
||||
/* Default configurations for runtime selection (APPTRACE_DEST_ALL)
|
||||
* These values are used when building with both JTAG and UART enabled
|
||||
* to allow runtime selection. You can switch between destinations
|
||||
* via esp_apptrace_get_user_params(). If this function is
|
||||
* not provided by the application, JTAG is used by default with the
|
||||
* configuration defined below. See esp_app_trace.h for details.
|
||||
*/
|
||||
|
||||
#if !defined(CONFIG_APPTRACE_UART_TX_GPIO) || !defined(CONFIG_APPTRACE_UART_RX_GPIO)
|
||||
#include "soc/uart_pins.h"
|
||||
#endif
|
||||
|
||||
#ifndef CONFIG_APPTRACE_BUF_SIZE
|
||||
#define CONFIG_APPTRACE_BUF_SIZE 16384
|
||||
#endif
|
||||
|
||||
#ifndef CONFIG_APPTRACE_UART_TX_BUFF_SIZE
|
||||
#define CONFIG_APPTRACE_UART_TX_BUFF_SIZE 4096
|
||||
#endif
|
||||
|
||||
#ifndef CONFIG_APPTRACE_UART_TX_MSG_SIZE
|
||||
#define CONFIG_APPTRACE_UART_TX_MSG_SIZE 128
|
||||
#endif
|
||||
|
||||
#ifndef CONFIG_APPTRACE_UART_BAUDRATE
|
||||
#define CONFIG_APPTRACE_UART_BAUDRATE 1000000
|
||||
#endif
|
||||
|
||||
#ifndef CONFIG_APPTRACE_UART_TX_GPIO
|
||||
#define CONFIG_APPTRACE_UART_TX_GPIO U1TXD_GPIO_NUM
|
||||
#endif
|
||||
|
||||
#ifndef CONFIG_APPTRACE_UART_RX_GPIO
|
||||
#define CONFIG_APPTRACE_UART_RX_GPIO U1RXD_GPIO_NUM
|
||||
#endif
|
||||
|
||||
#ifndef CONFIG_APPTRACE_DEST_UART_NUM
|
||||
#define CONFIG_APPTRACE_DEST_UART_NUM 1
|
||||
#endif
|
||||
|
||||
#endif /* ESP_APP_TRACE_CONFIG_H_ */
|
||||
@@ -1,95 +0,0 @@
|
||||
/*
|
||||
* SPDX-FileCopyrightText: 2025 Espressif Systems (Shanghai) CO LTD
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
#ifndef ESP_APP_TRACE_TYPES_H_
|
||||
#define ESP_APP_TRACE_TYPES_H_
|
||||
|
||||
#include <stdint.h>
|
||||
#include "spinlock.h"
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
/** Structure which holds data necessary for measuring time intervals.
|
||||
*
|
||||
* After initialization via esp_apptrace_tmo_init() user needs to call esp_apptrace_tmo_check()
|
||||
* periodically to check timeout for expiration.
|
||||
*/
|
||||
typedef struct {
|
||||
int64_t start; ///< time interval start (in us)
|
||||
int64_t tmo; ///< timeout value (in us)
|
||||
int64_t elapsed; ///< elapsed time (in us)
|
||||
} esp_apptrace_tmo_t;
|
||||
|
||||
/** Tracing module synchronization lock */
|
||||
typedef struct {
|
||||
spinlock_t mux;
|
||||
unsigned int int_state;
|
||||
} esp_apptrace_lock_t;
|
||||
|
||||
/** Ring buffer control structure.
|
||||
*
|
||||
* @note For purposes of application tracing module if there is no enough space for user data and write pointer can be wrapped
|
||||
* current ring buffer size can be temporarily shrunk in order to provide buffer with requested size.
|
||||
*/
|
||||
typedef struct {
|
||||
uint8_t *data; ///< pointer to data storage
|
||||
volatile uint32_t size; ///< size of data storage
|
||||
volatile uint32_t cur_size; ///< current size of data storage
|
||||
volatile uint32_t rd; ///< read pointer
|
||||
volatile uint32_t wr; ///< write pointer
|
||||
} esp_apptrace_rb_t;
|
||||
|
||||
/**
|
||||
* Application trace data destinations
|
||||
*/
|
||||
typedef enum {
|
||||
ESP_APPTRACE_DEST_JTAG,
|
||||
ESP_APPTRACE_DEST_UART,
|
||||
} esp_apptrace_dest_t;
|
||||
|
||||
/**
|
||||
* Application trace configuration for UART destination
|
||||
*/
|
||||
typedef struct {
|
||||
int uart_num; ///< Port number
|
||||
int tx_pin_num; ///< TX pin number
|
||||
int rx_pin_num; ///< RX pin number
|
||||
int baud_rate; ///< Baud rate
|
||||
uint32_t tx_buff_size; ///< TX ring buffer size
|
||||
uint32_t tx_msg_size; ///< Maximum size of the single message to transfer.
|
||||
} esp_apptrace_uart_config_t;
|
||||
|
||||
/**
|
||||
* Application trace trace header size in bytes. It is 2 bytes for SEGGER SystemView
|
||||
*/
|
||||
typedef enum {
|
||||
ESP_APPTRACE_HEADER_SIZE_16 = 2,
|
||||
ESP_APPTRACE_HEADER_SIZE_32 = 4,
|
||||
} esp_apptrace_header_size_t;
|
||||
|
||||
/**
|
||||
* Application trace configuration
|
||||
*/
|
||||
typedef struct {
|
||||
esp_apptrace_dest_t dest; ///< Destination type (JTAG or UART)
|
||||
|
||||
union {
|
||||
esp_apptrace_uart_config_t uart; ///< UART configuration (when dest is ESP_APPTRACE_DEST_UART)
|
||||
struct { ///< Reserved for JTAG (when dest is ESP_APPTRACE_DEST_JTAG)
|
||||
uint8_t _unused;
|
||||
} jtag;
|
||||
} dest_cfg; ///< Destination-specific configuration
|
||||
|
||||
uint32_t flush_tmo; ///< Flush timeout in milliseconds
|
||||
uint32_t flush_thresh; ///< Flush threshold in bytes
|
||||
} esp_apptrace_config_t;
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif /* ESP_APP_TRACE_TYPES_H_ */
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* SPDX-FileCopyrightText: 2017-2025 Espressif Systems (Shanghai) CO LTD
|
||||
* SPDX-FileCopyrightText: 2017-2021 Espressif Systems (Shanghai) CO LTD
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
@@ -10,23 +10,34 @@
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
#include "freertos/FreeRTOS.h"
|
||||
#include "esp_err.h"
|
||||
#include "esp_timer.h"
|
||||
#include "esp_app_trace_types.h"
|
||||
|
||||
/** Infinite waiting timeout */
|
||||
#define ESP_APPTRACE_TMO_INFINITE ((uint32_t)-1)
|
||||
|
||||
/** Structure which holds data necessary for measuring time intervals.
|
||||
*
|
||||
* After initialization via esp_apptrace_tmo_init() user needs to call esp_apptrace_tmo_check()
|
||||
* periodically to check timeout for expiration.
|
||||
*/
|
||||
typedef struct {
|
||||
int64_t start; ///< time interval start (in us)
|
||||
int64_t tmo; ///< timeout value (in us)
|
||||
int64_t elapsed; ///< elapsed time (in us)
|
||||
} esp_apptrace_tmo_t;
|
||||
|
||||
/**
|
||||
* @brief Initializes timeout structure.
|
||||
*
|
||||
* @param tmo Pointer to timeout structure to be initialized.
|
||||
* @param user_tmo Timeout value (in us). Use ESP_APPTRACE_TMO_INFINITE to wait indefinitely.
|
||||
* @param user_tmo Timeout value (in us). Use ESP_APPTRACE_TMO_INFINITE to wait indefinetly.
|
||||
*/
|
||||
static inline void esp_apptrace_tmo_init(esp_apptrace_tmo_t *tmo, uint32_t user_tmo)
|
||||
{
|
||||
tmo->start = esp_timer_get_time();
|
||||
tmo->tmo = user_tmo == ESP_APPTRACE_TMO_INFINITE ? (int64_t) -1 : (int64_t)user_tmo;
|
||||
tmo->tmo = user_tmo == ESP_APPTRACE_TMO_INFINITE ? (int64_t)-1 : (int64_t)user_tmo;
|
||||
tmo->elapsed = 0;
|
||||
}
|
||||
|
||||
@@ -41,15 +52,25 @@ esp_err_t esp_apptrace_tmo_check(esp_apptrace_tmo_t *tmo);
|
||||
|
||||
static inline uint32_t esp_apptrace_tmo_remaining_us(esp_apptrace_tmo_t *tmo)
|
||||
{
|
||||
return tmo->tmo != (int64_t) -1 ? (tmo->elapsed - tmo->tmo) : ESP_APPTRACE_TMO_INFINITE;
|
||||
return tmo->tmo != (int64_t)-1 ? (tmo->elapsed - tmo->tmo) : ESP_APPTRACE_TMO_INFINITE;
|
||||
}
|
||||
|
||||
/** Tracing module synchronization lock */
|
||||
typedef struct {
|
||||
spinlock_t mux;
|
||||
unsigned int_state;
|
||||
} esp_apptrace_lock_t;
|
||||
|
||||
/**
|
||||
* @brief Initializes lock structure.
|
||||
*
|
||||
* @param lock Pointer to lock structure to be initialized.
|
||||
*/
|
||||
void esp_apptrace_lock_init(esp_apptrace_lock_t *lock);
|
||||
static inline void esp_apptrace_lock_init(esp_apptrace_lock_t *lock)
|
||||
{
|
||||
portMUX_INITIALIZE(&lock->mux);
|
||||
lock->int_state = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Tries to acquire lock in specified time period.
|
||||
@@ -70,6 +91,19 @@ esp_err_t esp_apptrace_lock_take(esp_apptrace_lock_t *lock, esp_apptrace_tmo_t *
|
||||
*/
|
||||
esp_err_t esp_apptrace_lock_give(esp_apptrace_lock_t *lock);
|
||||
|
||||
/** Ring buffer control structure.
|
||||
*
|
||||
* @note For purposes of application tracing module if there is no enough space for user data and write pointer can be wrapped
|
||||
* current ring buffer size can be temporarily shrinked in order to provide buffer with requested size.
|
||||
*/
|
||||
typedef struct {
|
||||
uint8_t *data; ///< pointer to data storage
|
||||
volatile uint32_t size; ///< size of data storage
|
||||
volatile uint32_t cur_size; ///< current size of data storage
|
||||
volatile uint32_t rd; ///< read pointer
|
||||
volatile uint32_t wr; ///< write pointer
|
||||
} esp_apptrace_rb_t;
|
||||
|
||||
/**
|
||||
* @brief Initializes ring buffer control structure.
|
||||
*
|
||||
|
||||
@@ -1,69 +0,0 @@
|
||||
/*
|
||||
* SPDX-FileCopyrightText: 2017-2025 Espressif Systems (Shanghai) CO LTD
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
#ifndef ESP_DBG_STUBS_H_
|
||||
#define ESP_DBG_STUBS_H_
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
#include "esp_err.h"
|
||||
|
||||
/**
|
||||
* Debug stubs entries IDs
|
||||
*/
|
||||
typedef enum {
|
||||
ESP_DBG_STUB_MAGIC_NUM,
|
||||
ESP_DBG_STUB_TABLE_SIZE,
|
||||
ESP_DBG_STUB_CONTROL_DATA, ///< stubs descriptor entry
|
||||
ESP_DBG_STUB_ENTRY_FIRST,
|
||||
ESP_DBG_STUB_ENTRY_GCOV ///< GCOV entry
|
||||
= ESP_DBG_STUB_ENTRY_FIRST,
|
||||
ESP_DBG_STUB_ENTRY_CAPABILITIES,
|
||||
ESP_DBG_STUB_ENTRY_MAX
|
||||
} esp_dbg_stub_id_t;
|
||||
|
||||
#define ESP_DBG_STUB_MAGIC_NUM_VAL 0xFEEDBEEF
|
||||
#define ESP_DBG_STUB_CAP_GCOV_TASK (1 << 0)
|
||||
|
||||
/**
|
||||
* @brief Initializes debug stubs.
|
||||
*
|
||||
* @note Must be called after esp_apptrace_init() if app tracing is enabled.
|
||||
*/
|
||||
void esp_dbg_stubs_init(void);
|
||||
|
||||
/**
|
||||
* @brief Initializes application tracing module.
|
||||
*
|
||||
* @note Should be called before any esp_apptrace_xxx call.
|
||||
*
|
||||
* @param id Stub ID.
|
||||
* @param entry Stub entry. Usually it is stub entry function address,
|
||||
* but can be any value meaningful for OpenOCD command/code
|
||||
* such as capabilities
|
||||
* @return ESP_OK on success, otherwise see esp_err_t
|
||||
*/
|
||||
esp_err_t esp_dbg_stub_entry_set(esp_dbg_stub_id_t id, uint32_t entry);
|
||||
|
||||
/**
|
||||
* @brief Retrieves the corresponding stub entry
|
||||
*
|
||||
* @param id Stub ID.
|
||||
* @param entry Stub entry. Usually it is stub entry function address,
|
||||
* but can be any value meaningful for OpenOCD command/code
|
||||
* such as capabilities
|
||||
*
|
||||
* @return ESP_OK on success, otherwise see esp_err_t
|
||||
*/
|
||||
esp_err_t esp_dbg_stub_entry_get(esp_dbg_stub_id_t id, uint32_t *entry);
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif // ESP_DBG_STUBS_H_
|
||||
80
components/app_trace/include/esp_sysview_trace.h
Normal file
80
components/app_trace/include/esp_sysview_trace.h
Normal file
@@ -0,0 +1,80 @@
|
||||
/*
|
||||
* SPDX-FileCopyrightText: 2018-2021 Espressif Systems (Shanghai) CO LTD
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
#ifndef ESP_SYSVIEW_TRACE_H_
|
||||
#define ESP_SYSVIEW_TRACE_H_
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
#include <stdarg.h>
|
||||
#include "esp_err.h"
|
||||
#include "SEGGER_RTT.h" // SEGGER_RTT_ESP_Flush
|
||||
#include "esp_app_trace_util.h" // ESP_APPTRACE_TMO_INFINITE
|
||||
|
||||
/**
|
||||
* @brief Flushes remaining data in SystemView trace buffer to host.
|
||||
*
|
||||
* @param tmo Timeout for operation (in us). Use ESP_APPTRACE_TMO_INFINITE to wait indefinetly.
|
||||
*
|
||||
* @return ESP_OK.
|
||||
*/
|
||||
static inline esp_err_t esp_sysview_flush(uint32_t tmo)
|
||||
{
|
||||
SEGGER_RTT_ESP_Flush(0, tmo);
|
||||
return ESP_OK;
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief vprintf-like function to sent log messages to the host.
|
||||
*
|
||||
* @param format Address of format string.
|
||||
* @param args List of arguments.
|
||||
*
|
||||
* @return Number of bytes written.
|
||||
*/
|
||||
int esp_sysview_vprintf(const char * format, va_list args);
|
||||
|
||||
/**
|
||||
* @brief Starts SystemView heap tracing.
|
||||
*
|
||||
* @param tmo Timeout (in us) to wait for the host to be connected. Use -1 to wait forever.
|
||||
*
|
||||
* @return ESP_OK on success, ESP_ERR_TIMEOUT if operation has been timed out.
|
||||
*/
|
||||
esp_err_t esp_sysview_heap_trace_start(uint32_t tmo);
|
||||
|
||||
/**
|
||||
* @brief Stops SystemView heap tracing.
|
||||
*
|
||||
* @return ESP_OK.
|
||||
*/
|
||||
esp_err_t esp_sysview_heap_trace_stop(void);
|
||||
|
||||
/**
|
||||
* @brief Sends heap allocation event to the host.
|
||||
*
|
||||
* @param addr Address of allocated block.
|
||||
* @param size Size of allocated block.
|
||||
* @param callers Pointer to array with callstack addresses.
|
||||
* Array size must be CONFIG_HEAP_TRACING_STACK_DEPTH.
|
||||
*/
|
||||
void esp_sysview_heap_trace_alloc(void *addr, uint32_t size, const void *callers);
|
||||
|
||||
/**
|
||||
* @brief Sends heap de-allocation event to the host.
|
||||
*
|
||||
* @param addr Address of de-allocated block.
|
||||
* @param callers Pointer to array with callstack addresses.
|
||||
* Array size must be CONFIG_HEAP_TRACING_STACK_DEPTH.
|
||||
*/
|
||||
void esp_sysview_heap_trace_free(void *addr, const void *callers);
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif //ESP_SYSVIEW_TRACE_H_
|
||||
@@ -1,11 +1,23 @@
|
||||
[mapping:app_trace]
|
||||
archive: libapp_trace.a
|
||||
entries:
|
||||
if ESP_TRACE_TRANSPORT_APPTRACE = y:
|
||||
app_trace (noflash)
|
||||
app_trace_util (noflash)
|
||||
if APPTRACE_DEST_JTAG = y || APPTRACE_DEST_ALL = y:
|
||||
port_jtag (noflash)
|
||||
app_trace_membufs_proto (noflash)
|
||||
if APPTRACE_DEST_UART = y || APPTRACE_DEST_ALL = y:
|
||||
port_uart (noflash)
|
||||
app_trace (noflash)
|
||||
port_uart (noflash)
|
||||
app_trace_util (noflash)
|
||||
if APPTRACE_MEMBUFS_APPTRACE_PROTO_ENABLE:
|
||||
app_trace_membufs_proto (noflash)
|
||||
if APPTRACE_DEST_JTAG = y:
|
||||
port (noflash)
|
||||
if APPTRACE_SV_ENABLE = y:
|
||||
SEGGER_SYSVIEW (noflash)
|
||||
SEGGER_RTT_esp (noflash)
|
||||
SEGGER_SYSVIEW_Config_FreeRTOS (noflash)
|
||||
SEGGER_SYSVIEW_FreeRTOS (noflash)
|
||||
|
||||
[mapping:app_trace_driver]
|
||||
archive: libdriver.a
|
||||
entries:
|
||||
if APPTRACE_SV_TS_SOURCE_GPTIMER = y:
|
||||
gptimer (noflash)
|
||||
else:
|
||||
* (default)
|
||||
|
||||
@@ -15,7 +15,7 @@ extern "C" {
|
||||
|
||||
/** Apptrace HW interface. */
|
||||
typedef struct {
|
||||
esp_err_t (*init)(void *hw_data, const esp_apptrace_config_t *config);
|
||||
esp_err_t (*init)(void *hw_data);
|
||||
uint8_t *(*get_up_buffer)(void *hw_data, uint32_t, esp_apptrace_tmo_t *);
|
||||
esp_err_t (*put_up_buffer)(void *hw_data, uint8_t *, esp_apptrace_tmo_t *);
|
||||
esp_err_t (*flush_up_buffer_nolock)(void *hw_data, uint32_t, esp_apptrace_tmo_t *);
|
||||
@@ -24,7 +24,6 @@ typedef struct {
|
||||
uint8_t *(*get_down_buffer)(void *hw_data, uint32_t *, esp_apptrace_tmo_t *);
|
||||
esp_err_t (*put_down_buffer)(void *hw_data, uint8_t *, esp_apptrace_tmo_t *);
|
||||
bool (*host_is_connected)(void *hw_data);
|
||||
void (*set_header_size)(void *hw_data, esp_apptrace_header_size_t header_size);
|
||||
} esp_apptrace_hw_t;
|
||||
|
||||
esp_apptrace_hw_t *esp_apptrace_jtag_hw_get(void **data);
|
||||
|
||||
@@ -1,126 +1,93 @@
|
||||
/*
|
||||
* SPDX-FileCopyrightText: 2017-2025 Espressif Systems (Shanghai) CO LTD
|
||||
* SPDX-FileCopyrightText: 2017-2021 Espressif Systems (Shanghai) CO LTD
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
#include <stdint.h>
|
||||
#include <stdbool.h>
|
||||
#include <string.h>
|
||||
#include <inttypes.h>
|
||||
#include "esp_err.h"
|
||||
|
||||
#include "soc/soc.h"
|
||||
#include "esp_log.h"
|
||||
#include "esp_cpu.h"
|
||||
#include "esp_attr.h"
|
||||
#include "esp_private/uart_share_hw_ctrl.h"
|
||||
#include "hal/uart_hal.h"
|
||||
#include "hal/gpio_hal.h"
|
||||
#include "driver/uart.h"
|
||||
#include "soc/uart_periph.h"
|
||||
#include "esp_clk_tree.h"
|
||||
#include "esp_private/esp_clk_tree_common.h"
|
||||
#include "soc/gpio_periph.h"
|
||||
#include "esp_rom_gpio.h"
|
||||
#include "hal/uart_ll.h"
|
||||
#include "esp_intr_alloc.h"
|
||||
#include "esp_heap_caps.h"
|
||||
#include "esp_private/esp_gpio_reserve.h"
|
||||
|
||||
#include "esp_app_trace_port.h"
|
||||
#include "esp_app_trace_util.h"
|
||||
#include "esp_app_trace_types.h"
|
||||
|
||||
static const char *TAG = "esp_apptrace_uart";
|
||||
#include "driver/uart.h"
|
||||
#include "hal/uart_ll.h"
|
||||
#include "string.h"
|
||||
#include "driver/gpio.h"
|
||||
|
||||
#ifndef MIN
|
||||
#define MIN(a, b) ((a) < (b) ? (a) : (b))
|
||||
#endif
|
||||
|
||||
#define APPTRACE_DEST_UART (CONFIG_APPTRACE_DEST_UART0 | CONFIG_APPTRACE_DEST_UART1 | CONFIG_APPTRACE_DEST_UART2)
|
||||
|
||||
#define APP_TRACE_MAX_TX_BUFF_UART CONFIG_APPTRACE_UART_TX_BUFF_SIZE
|
||||
#define APP_TRACE_MAX_TX_MSG_UART CONFIG_APPTRACE_UART_TX_MSG_SIZE
|
||||
|
||||
/** UART HW transport data */
|
||||
typedef struct {
|
||||
uint8_t *buffer; ///< Ring buffer data
|
||||
uint32_t max_size; ///< Ring buffer maximum size (must be power of 2)
|
||||
volatile uint32_t count; ///< Number of bytes currently in the buffer
|
||||
volatile uint32_t head; ///< Write pointer index
|
||||
volatile uint32_t tail; ///< Read pointer index
|
||||
} esp_apptrace_uart_rb_t;
|
||||
|
||||
typedef struct {
|
||||
int inited;
|
||||
volatile bool tx_busy; ///< TX busy flag
|
||||
uart_hal_context_t hal_ctx; ///< UART HAL context
|
||||
esp_apptrace_uart_rb_t tx_ring; ///< TX ring buffer
|
||||
intr_handle_t intr_handle; ///< Interrupt handle
|
||||
|
||||
/* TX message buffer */
|
||||
uint8_t *tx_msg_buff; ///< TX message buffer to provide with get_up_buffer
|
||||
uint32_t tx_msg_buff_size; ///< TX message buffer size & maximum size of the single message to transfer.
|
||||
uint32_t tx_pending_msg_size; ///< Pending message size to send with put_up_buffer
|
||||
|
||||
/* RX message buffer */
|
||||
uint8_t *rx_msg_buff; ///< RX message buffer provided with down_buffer_config function
|
||||
uint32_t rx_msg_buff_size; ///< RX message buffer size provided with down_buffer_config function
|
||||
|
||||
uint8_t inited;
|
||||
#if CONFIG_APPTRACE_LOCK_ENABLE
|
||||
esp_apptrace_lock_t lock; ///< Sync lock
|
||||
esp_apptrace_lock_t lock; // sync lock
|
||||
#endif
|
||||
uart_port_t port_num;
|
||||
// TX data ring buffer
|
||||
uint8_t *tx_data_buff;
|
||||
int32_t tx_data_buff_in;
|
||||
int32_t tx_data_buff_out;
|
||||
// TX message buffer
|
||||
uint8_t *tx_msg_buff;
|
||||
uint32_t tx_msg_buff_size;
|
||||
|
||||
// RX message buffer
|
||||
uint8_t *down_buffer;
|
||||
uint32_t down_buffer_size;
|
||||
// Buffer overflow flags
|
||||
bool message_buff_overflow;
|
||||
bool circular_buff_overflow;
|
||||
} esp_apptrace_uart_data_t;
|
||||
|
||||
static inline bool is_power_of_two(uint32_t n)
|
||||
#if APPTRACE_DEST_UART
|
||||
static esp_err_t esp_apptrace_uart_init(esp_apptrace_uart_data_t *hw_data);
|
||||
static esp_err_t esp_apptrace_uart_flush(esp_apptrace_uart_data_t *hw_data, esp_apptrace_tmo_t *tmo);
|
||||
static esp_err_t esp_apptrace_uart_flush_nolock(esp_apptrace_uart_data_t *hw_data, uint32_t min_sz, esp_apptrace_tmo_t *tmo);
|
||||
static uint8_t *esp_apptrace_uart_up_buffer_get(esp_apptrace_uart_data_t *hw_data, uint32_t size, esp_apptrace_tmo_t *tmo);
|
||||
static esp_err_t esp_apptrace_uart_up_buffer_put(esp_apptrace_uart_data_t *hw_data, uint8_t *ptr, esp_apptrace_tmo_t *tmo);
|
||||
static void esp_apptrace_uart_down_buffer_config(esp_apptrace_uart_data_t *hw_data, uint8_t *buf, uint32_t size);
|
||||
static uint8_t *esp_apptrace_uart_down_buffer_get(esp_apptrace_uart_data_t *hw_data, uint32_t *size, esp_apptrace_tmo_t *tmo);
|
||||
static esp_err_t esp_apptrace_uart_down_buffer_put(esp_apptrace_uart_data_t *hw_data, uint8_t *ptr, esp_apptrace_tmo_t *tmo);
|
||||
static bool esp_apptrace_uart_host_is_connected(esp_apptrace_uart_data_t *hw_data);
|
||||
|
||||
#endif // APPTRACE_DEST_UART
|
||||
const static char *TAG = "esp_apptrace_uart";
|
||||
|
||||
esp_apptrace_hw_t *esp_apptrace_uart_hw_get(int num, void **data)
|
||||
{
|
||||
return n != 0 && (n & (n - 1)) == 0;
|
||||
ESP_LOGD(TAG,"esp_apptrace_uart_hw_get - %i", num);
|
||||
#if APPTRACE_DEST_UART
|
||||
static esp_apptrace_uart_data_t s_uart_hw_data = {
|
||||
};
|
||||
static esp_apptrace_hw_t s_uart_hw = {
|
||||
.init = (esp_err_t (*)(void *))esp_apptrace_uart_init,
|
||||
.get_up_buffer = (uint8_t *(*)(void *, uint32_t, esp_apptrace_tmo_t *))esp_apptrace_uart_up_buffer_get,
|
||||
.put_up_buffer = (esp_err_t (*)(void *, uint8_t *, esp_apptrace_tmo_t *))esp_apptrace_uart_up_buffer_put,
|
||||
.flush_up_buffer_nolock = (esp_err_t (*)(void *, uint32_t, esp_apptrace_tmo_t *))esp_apptrace_uart_flush_nolock,
|
||||
.flush_up_buffer = (esp_err_t (*)(void *, esp_apptrace_tmo_t *))esp_apptrace_uart_flush,
|
||||
.down_buffer_config = (void (*)(void *, uint8_t *, uint32_t ))esp_apptrace_uart_down_buffer_config,
|
||||
.get_down_buffer = (uint8_t *(*)(void *, uint32_t *, esp_apptrace_tmo_t *))esp_apptrace_uart_down_buffer_get,
|
||||
.put_down_buffer = (esp_err_t (*)(void *, uint8_t *, esp_apptrace_tmo_t *))esp_apptrace_uart_down_buffer_put,
|
||||
.host_is_connected = (bool (*)(void *))esp_apptrace_uart_host_is_connected,
|
||||
};
|
||||
s_uart_hw_data.port_num = num;
|
||||
*data = &s_uart_hw_data;
|
||||
return &s_uart_hw;
|
||||
#else
|
||||
return NULL;
|
||||
#endif
|
||||
}
|
||||
|
||||
static inline uint32_t ring_buffer_mask(const esp_apptrace_uart_rb_t *rb)
|
||||
{
|
||||
return rb->max_size - 1;
|
||||
}
|
||||
#if APPTRACE_DEST_UART
|
||||
|
||||
/* Get the length of the data in the ring buffer */
|
||||
static inline uint32_t ring_buffer_data_len(const esp_apptrace_uart_rb_t *rb)
|
||||
{
|
||||
return rb->count;
|
||||
}
|
||||
|
||||
/* Get the length of the free space in the ring buffer */
|
||||
static inline uint32_t ring_buffer_free_len(const esp_apptrace_uart_rb_t *rb)
|
||||
{
|
||||
return rb->max_size - rb->count;
|
||||
}
|
||||
|
||||
static inline void ring_buffer_advance_tail(esp_apptrace_uart_rb_t *rb, uint32_t count)
|
||||
{
|
||||
rb->tail = (rb->tail + count) & ring_buffer_mask(rb);
|
||||
rb->count -= count;
|
||||
}
|
||||
|
||||
static inline void ring_buffer_advance_head(esp_apptrace_uart_rb_t *rb, uint32_t count)
|
||||
{
|
||||
rb->head = (rb->head + count) & ring_buffer_mask(rb);
|
||||
rb->count += count;
|
||||
}
|
||||
|
||||
static inline uint32_t ring_buffer_calc_to_send(const esp_apptrace_uart_rb_t *rb, uint32_t tx_msg_size)
|
||||
{
|
||||
uint32_t used = ring_buffer_data_len(rb);
|
||||
if (used == 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
uint32_t cont = rb->max_size - rb->tail;
|
||||
uint32_t n = MIN(used, cont);
|
||||
|
||||
/* Apply message size limit if specified */
|
||||
if (tx_msg_size && tx_msg_size < n) {
|
||||
return tx_msg_size;
|
||||
}
|
||||
return n;
|
||||
}
|
||||
|
||||
static esp_err_t esp_apptrace_uart_lock(void *hw_data, esp_apptrace_tmo_t *tmo)
|
||||
static esp_err_t esp_apptrace_uart_lock(esp_apptrace_uart_data_t *hw_data, esp_apptrace_tmo_t *tmo)
|
||||
{
|
||||
#if CONFIG_APPTRACE_LOCK_ENABLE
|
||||
esp_apptrace_uart_data_t *uart_data = hw_data;
|
||||
esp_err_t ret = esp_apptrace_lock_take(&uart_data->lock, tmo);
|
||||
esp_err_t ret = esp_apptrace_lock_take(&hw_data->lock, tmo);
|
||||
if (ret != ESP_OK) {
|
||||
return ESP_FAIL;
|
||||
}
|
||||
@@ -128,395 +95,254 @@ static esp_err_t esp_apptrace_uart_lock(void *hw_data, esp_apptrace_tmo_t *tmo)
|
||||
return ESP_OK;
|
||||
}
|
||||
|
||||
static esp_err_t esp_apptrace_uart_unlock(void *hw_data)
|
||||
static esp_err_t esp_apptrace_uart_unlock(esp_apptrace_uart_data_t *hw_data)
|
||||
{
|
||||
esp_err_t ret = ESP_OK;
|
||||
#if CONFIG_APPTRACE_LOCK_ENABLE
|
||||
esp_apptrace_uart_data_t *uart_data = hw_data;
|
||||
ret = esp_apptrace_lock_give(&uart_data->lock);
|
||||
assert(ret == ESP_OK && "Failed to unlock apptrace uart lock!");
|
||||
ret = esp_apptrace_lock_give(&hw_data->lock);
|
||||
#endif
|
||||
return ret;
|
||||
}
|
||||
|
||||
static esp_err_t ring_buffer_put(esp_apptrace_uart_rb_t *rb, const uint8_t *data, uint32_t len)
|
||||
static inline void esp_apptrace_uart_hw_init(void)
|
||||
{
|
||||
/* Drop oldest. Make available space if needed */
|
||||
uint32_t free_len = ring_buffer_free_len(rb);
|
||||
if (len > free_len) {
|
||||
uint32_t need = len - free_len;
|
||||
ring_buffer_advance_tail(rb, need);
|
||||
ESP_APPTRACE_LOGI("Initialized UART on CPU%d", esp_cpu_get_core_id());
|
||||
}
|
||||
|
||||
|
||||
/*****************************************************************************************/
|
||||
/***************************** Apptrace HW iface *****************************************/
|
||||
/*****************************************************************************************/
|
||||
|
||||
static esp_err_t esp_apptrace_send_uart_data(esp_apptrace_uart_data_t *hw_data, const char *data, uint32_t size, esp_apptrace_tmo_t *tmo)
|
||||
{
|
||||
esp_err_t res = esp_apptrace_uart_lock(hw_data, tmo);
|
||||
if (res != ESP_OK) {
|
||||
return res;
|
||||
}
|
||||
// We store current out position to handle it without lock
|
||||
volatile int32_t out_position = hw_data->tx_data_buff_out;
|
||||
|
||||
uint32_t head = rb->head;
|
||||
uint32_t space_to_end = rb->max_size - head;
|
||||
|
||||
if (len <= space_to_end) {
|
||||
memcpy(&rb->buffer[head], data, len);
|
||||
int len_free = APP_TRACE_MAX_TX_BUFF_UART - (hw_data->tx_data_buff_in - out_position);
|
||||
if (out_position > hw_data->tx_data_buff_in) {
|
||||
len_free = out_position - hw_data->tx_data_buff_in;
|
||||
}
|
||||
int check_len = APP_TRACE_MAX_TX_BUFF_UART - hw_data->tx_data_buff_in;
|
||||
if (size <= len_free)
|
||||
{
|
||||
if ( check_len >= size) {
|
||||
memcpy(&hw_data->tx_data_buff[hw_data->tx_data_buff_in], data, size);
|
||||
hw_data->tx_data_buff_in += size;
|
||||
} else {
|
||||
memcpy(&hw_data->tx_data_buff[hw_data->tx_data_buff_in], data, APP_TRACE_MAX_TX_BUFF_UART - hw_data->tx_data_buff_in);
|
||||
memcpy(&hw_data->tx_data_buff[0], &data[APP_TRACE_MAX_TX_BUFF_UART - hw_data->tx_data_buff_in], size - (APP_TRACE_MAX_TX_BUFF_UART - hw_data->tx_data_buff_in));
|
||||
hw_data->tx_data_buff_in = size - (APP_TRACE_MAX_TX_BUFF_UART - hw_data->tx_data_buff_in);
|
||||
}
|
||||
if (hw_data->tx_data_buff_in >= APP_TRACE_MAX_TX_BUFF_UART) {
|
||||
hw_data->tx_data_buff_in = 0;
|
||||
}
|
||||
} else {
|
||||
memcpy(&rb->buffer[head], data, space_to_end);
|
||||
memcpy(&rb->buffer[0], &data[space_to_end], len - space_to_end);
|
||||
hw_data->circular_buff_overflow = true;
|
||||
}
|
||||
|
||||
ring_buffer_advance_head(rb, len);
|
||||
if (esp_apptrace_uart_unlock(hw_data) != ESP_OK) {
|
||||
assert(false && "Failed to unlock apptrace data!");
|
||||
}
|
||||
|
||||
return ESP_OK;
|
||||
}
|
||||
|
||||
static esp_err_t ring_buffer_init(esp_apptrace_uart_rb_t *rb, uint32_t size)
|
||||
static void send_buff_data(esp_apptrace_uart_data_t *hw_data, esp_apptrace_tmo_t *tmo)
|
||||
{
|
||||
rb->buffer = heap_caps_malloc(size, MALLOC_CAP_INTERNAL | MALLOC_CAP_8BIT);
|
||||
if (!rb->buffer) {
|
||||
return ESP_ERR_NO_MEM;
|
||||
if (hw_data->tx_data_buff_in == hw_data->tx_data_buff_out) {
|
||||
return;
|
||||
}
|
||||
|
||||
rb->max_size = size;
|
||||
rb->count = 0;
|
||||
rb->head = 0;
|
||||
rb->tail = 0;
|
||||
|
||||
return ESP_OK;
|
||||
}
|
||||
|
||||
static void IRAM_ATTR esp_apptrace_uart_isr_handler(void *arg)
|
||||
{
|
||||
esp_apptrace_uart_data_t *uart_data = arg;
|
||||
esp_apptrace_uart_rb_t *rb = &uart_data->tx_ring;
|
||||
|
||||
uint32_t intr_status = uart_hal_get_intsts_mask(&uart_data->hal_ctx);
|
||||
|
||||
if (intr_status & UART_INTR_TXFIFO_EMPTY) {
|
||||
uart_hal_clr_intsts_mask(&uart_data->hal_ctx, UART_INTR_TXFIFO_EMPTY);
|
||||
|
||||
uint32_t to_send = ring_buffer_calc_to_send(rb, uart_data->tx_msg_buff_size);
|
||||
if (to_send > 0) {
|
||||
uint32_t written = 0;
|
||||
uart_hal_write_txfifo(&uart_data->hal_ctx, &rb->buffer[rb->tail], to_send, &written);
|
||||
ring_buffer_advance_tail(rb, written);
|
||||
}
|
||||
|
||||
/* If ring buffer is empty, disable TX interrupt */
|
||||
if (ring_buffer_data_len(rb) == 0) {
|
||||
uart_ll_disable_intr_mask(uart_data->hal_ctx.dev, UART_INTR_TXFIFO_EMPTY);
|
||||
uart_data->tx_busy = false;
|
||||
// We store current in position to handle it without lock
|
||||
volatile int32_t in_position = hw_data->tx_data_buff_in;
|
||||
if (in_position > hw_data->tx_data_buff_out) {
|
||||
int bytes_sent = uart_write_bytes(hw_data->port_num, &hw_data->tx_data_buff[hw_data->tx_data_buff_out], in_position - hw_data->tx_data_buff_out);
|
||||
hw_data->tx_data_buff_out += bytes_sent;
|
||||
} else {
|
||||
int bytes_sent = uart_write_bytes(hw_data->port_num, &hw_data->tx_data_buff[hw_data->tx_data_buff_out], APP_TRACE_MAX_TX_BUFF_UART - hw_data->tx_data_buff_out);
|
||||
hw_data->tx_data_buff_out += bytes_sent;
|
||||
if (hw_data->tx_data_buff_out >= APP_TRACE_MAX_TX_BUFF_UART) {
|
||||
hw_data->tx_data_buff_out = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static esp_err_t esp_apptrace_uart_init(void *hw_data, const esp_apptrace_config_t *config)
|
||||
{
|
||||
esp_err_t ret = ESP_ERR_INVALID_ARG;
|
||||
uint64_t gpio_mask = 0;
|
||||
esp_apptrace_uart_data_t *uart_data = hw_data;
|
||||
const esp_apptrace_uart_config_t *uart_config = &config->dest_cfg.uart;
|
||||
#define APP_TRACE_UART_STOP_WAIT_TMO 1000000 //us
|
||||
|
||||
/* Init function is called on every core, so ensure to do main setup only once */
|
||||
static void esp_apptrace_send_uart_tx_task(void *arg)
|
||||
{
|
||||
esp_apptrace_uart_data_t *hw_data = (esp_apptrace_uart_data_t *)arg;
|
||||
esp_apptrace_tmo_t tmo;
|
||||
esp_apptrace_tmo_init(&tmo, APP_TRACE_UART_STOP_WAIT_TMO);
|
||||
|
||||
vTaskDelay(10);
|
||||
while (1) {
|
||||
send_buff_data(hw_data, &tmo);
|
||||
vTaskDelay(10);
|
||||
if (hw_data->circular_buff_overflow == true)
|
||||
{
|
||||
hw_data->circular_buff_overflow = false;
|
||||
ESP_LOGE(TAG, "Buffer overflow. Please increase UART baudrate, or increase UART TX ring buffer size in menuconfig.");
|
||||
}
|
||||
if (hw_data->message_buff_overflow == true)
|
||||
{
|
||||
hw_data->message_buff_overflow = false;
|
||||
ESP_LOGE(TAG, "Message size more then message buffer!");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static const int APP_TRACE_UART_RX_BUF_SIZE = 4024;
|
||||
|
||||
static esp_err_t esp_apptrace_uart_init(esp_apptrace_uart_data_t *hw_data)
|
||||
{
|
||||
int core_id = esp_cpu_get_core_id();
|
||||
if (core_id == 0) {
|
||||
|
||||
if (uart_config->uart_num == CONFIG_ESP_CONSOLE_UART_NUM) {
|
||||
ESP_APPTRACE_LOGE("Application trace UART and console UART cannot use the same port number");
|
||||
return ESP_ERR_INVALID_ARG;
|
||||
hw_data->tx_data_buff = (uint8_t *)heap_caps_malloc(APP_TRACE_MAX_TX_BUFF_UART, MALLOC_CAP_INTERNAL|MALLOC_CAP_8BIT);
|
||||
if (hw_data->tx_data_buff == NULL){
|
||||
return ESP_ERR_NO_MEM;
|
||||
}
|
||||
|
||||
if (uart_config->uart_num >= SOC_UART_HP_NUM) {
|
||||
ESP_APPTRACE_LOGE("UART port number %d is not supported!", uart_config->uart_num);
|
||||
return ESP_ERR_NOT_SUPPORTED;
|
||||
hw_data->tx_data_buff_in = 0;
|
||||
hw_data->tx_data_buff_out = 0;
|
||||
hw_data->tx_msg_buff = (uint8_t *)heap_caps_malloc(APP_TRACE_MAX_TX_MSG_UART, MALLOC_CAP_INTERNAL|MALLOC_CAP_8BIT);
|
||||
if (hw_data->tx_msg_buff == NULL)
|
||||
{
|
||||
return ESP_ERR_NO_MEM;
|
||||
}
|
||||
hw_data->tx_msg_buff_size = 0;
|
||||
hw_data->down_buffer_size = 0;
|
||||
hw_data->message_buff_overflow = false;
|
||||
hw_data->circular_buff_overflow = false;
|
||||
|
||||
if (GPIO_IS_VALID_GPIO(uart_config->tx_pin_num)) {
|
||||
gpio_mask |= BIT64(uart_config->tx_pin_num);
|
||||
}
|
||||
if (GPIO_IS_VALID_GPIO(uart_config->rx_pin_num)) {
|
||||
gpio_mask |= BIT64(uart_config->rx_pin_num);
|
||||
}
|
||||
|
||||
if (gpio_mask == 0) {
|
||||
ESP_LOGE(TAG, "No valid GPIOs to reserve");
|
||||
return ESP_ERR_INVALID_STATE;
|
||||
}
|
||||
const uart_config_t uart_config = {
|
||||
.baud_rate = CONFIG_APPTRACE_UART_BAUDRATE,
|
||||
.data_bits = UART_DATA_8_BITS,
|
||||
.parity = UART_PARITY_DISABLE,
|
||||
.stop_bits = UART_STOP_BITS_1,
|
||||
.flow_ctrl = UART_HW_FLOWCTRL_DISABLE,
|
||||
.source_clk = UART_SCLK_DEFAULT,
|
||||
};
|
||||
ESP_LOGI(TAG, "UART baud rate: %i", CONFIG_APPTRACE_UART_BAUDRATE);
|
||||
// We won't use a buffer for sending data.
|
||||
esp_err_t err = uart_driver_install(hw_data->port_num, APP_TRACE_UART_RX_BUF_SIZE, APP_TRACE_UART_RX_BUF_SIZE, 0, NULL, 0);
|
||||
assert((err == ESP_OK) && "Not possible to install UART. Please check and change menuconfig parameters!");
|
||||
err = uart_param_config(hw_data->port_num, &uart_config);
|
||||
assert((err == ESP_OK) && "Not possible to configure UART. Please check and change menuconfig parameters!");
|
||||
err = uart_set_pin(hw_data->port_num, CONFIG_APPTRACE_UART_TX_GPIO, CONFIG_APPTRACE_UART_RX_GPIO, UART_PIN_NO_CHANGE, UART_PIN_NO_CHANGE);
|
||||
assert((err == ESP_OK) && "Not possible to configure UART RX/TX pins. Please check and change menuconfig parameters!");
|
||||
|
||||
uint64_t r = esp_gpio_reserve(gpio_mask);
|
||||
if (r & gpio_mask) {
|
||||
ESP_LOGE(TAG, "GPIO(s) are already reserved: 0x%"PRIx64, r & gpio_mask);
|
||||
return ESP_ERR_INVALID_STATE;
|
||||
}
|
||||
|
||||
uart_data->hal_ctx.dev = UART_LL_GET_HW(uart_config->uart_num);
|
||||
|
||||
HP_UART_BUS_CLK_ATOMIC() {
|
||||
uart_ll_enable_bus_clock(uart_config->uart_num, true);
|
||||
uart_ll_reset_register(uart_config->uart_num);
|
||||
}
|
||||
HP_UART_SRC_CLK_ATOMIC() {
|
||||
uart_ll_sclk_enable(uart_data->hal_ctx.dev);
|
||||
}
|
||||
|
||||
uint32_t sclk_hz;
|
||||
esp_clk_tree_src_get_freq_hz(UART_SCLK_DEFAULT, ESP_CLK_TREE_SRC_FREQ_PRECISION_CACHED, &sclk_hz);
|
||||
/* Enable the default clock source */
|
||||
esp_clk_tree_enable_src(UART_SCLK_DEFAULT, true);
|
||||
|
||||
/* Initialize UART HAL (sets default 8N1 mode) */
|
||||
uart_hal_init(&uart_data->hal_ctx, uart_config->uart_num);
|
||||
|
||||
ESP_LOGI(TAG, "uart_hal_init: %d", uart_config->uart_num);
|
||||
|
||||
HP_UART_SRC_CLK_ATOMIC() {
|
||||
uart_hal_set_sclk(&uart_data->hal_ctx, UART_SCLK_DEFAULT);
|
||||
uart_hal_set_baudrate(&uart_data->hal_ctx, uart_config->baud_rate, sclk_hz);
|
||||
}
|
||||
|
||||
/* Configure FIFO thresholds */
|
||||
uart_hal_set_txfifo_empty_thr(&uart_data->hal_ctx, 16); /* Slow down IRQ rate */
|
||||
uart_hal_set_rxfifo_full_thr(&uart_data->hal_ctx, 1);
|
||||
|
||||
/* Initialize TX ring buffer */
|
||||
if (uart_config->tx_buff_size == 0 || !is_power_of_two(uart_config->tx_buff_size)) {
|
||||
ESP_APPTRACE_LOGE("TX ring buffer size (%u) must be a power of two and greater than 0",
|
||||
uart_config->tx_buff_size);
|
||||
goto err_init_ring_buff;
|
||||
}
|
||||
ret = ring_buffer_init(&uart_data->tx_ring, uart_config->tx_buff_size);
|
||||
if (ret != ESP_OK) {
|
||||
ESP_APPTRACE_LOGE("Failed to initialize TX ring buffer");
|
||||
goto err_init_ring_buff;
|
||||
}
|
||||
|
||||
/* Initialize TX message buffer for providing with get_up_buffer */
|
||||
uart_data->tx_msg_buff_size = uart_config->tx_msg_size;
|
||||
uart_data->tx_msg_buff = heap_caps_malloc(uart_data->tx_msg_buff_size, MALLOC_CAP_INTERNAL | MALLOC_CAP_8BIT);
|
||||
if (uart_data->tx_msg_buff == NULL) {
|
||||
ESP_APPTRACE_LOGE("Failed to initialize TX message buffer");
|
||||
ret = ESP_ERR_NO_MEM;
|
||||
goto err_alloc_msg_buff;
|
||||
}
|
||||
|
||||
/* Disable all interrupts and clear status */
|
||||
uart_ll_disable_intr_mask(uart_data->hal_ctx.dev, UART_LL_INTR_MASK);
|
||||
uart_ll_clr_intsts_mask(uart_data->hal_ctx.dev, UART_LL_INTR_MASK);
|
||||
|
||||
/* Install interrupt handler */
|
||||
int intr_alloc_flags = 0;
|
||||
ret = esp_intr_alloc(uart_periph_signal[uart_config->uart_num].irq, intr_alloc_flags,
|
||||
esp_apptrace_uart_isr_handler, uart_data, &uart_data->intr_handle);
|
||||
if (ret != ESP_OK) {
|
||||
ESP_APPTRACE_LOGE("Failed to allocate interrupt: %s", esp_err_to_name(ret));
|
||||
goto err_alloc_intr;
|
||||
}
|
||||
|
||||
/* Reset FIFOs */
|
||||
uart_hal_rxfifo_rst(&uart_data->hal_ctx);
|
||||
uart_hal_txfifo_rst(&uart_data->hal_ctx);
|
||||
|
||||
/* Configure GPIO pins for RX and TX */
|
||||
const uint32_t tx_idx = UART_PERIPH_SIGNAL(uart_config->uart_num, SOC_UART_PERIPH_SIGNAL_TX);
|
||||
const uint32_t rx_idx = UART_PERIPH_SIGNAL(uart_config->uart_num, SOC_UART_PERIPH_SIGNAL_RX);
|
||||
|
||||
/* Configure TX pin */
|
||||
gpio_ll_func_sel(&GPIO, uart_config->tx_pin_num, PIN_FUNC_GPIO);
|
||||
esp_rom_gpio_pad_pullup_only(uart_config->tx_pin_num);
|
||||
esp_rom_gpio_connect_out_signal(uart_config->tx_pin_num, tx_idx, 0, 0);
|
||||
|
||||
/* Configure RX pin */
|
||||
gpio_ll_input_enable(&GPIO, uart_config->rx_pin_num);
|
||||
esp_rom_gpio_pad_pullup_only(uart_config->rx_pin_num);
|
||||
esp_rom_gpio_connect_in_signal(uart_config->rx_pin_num, rx_idx, 0);
|
||||
int uart_prio = CONFIG_APPTRACE_UART_TASK_PRIO;
|
||||
if (uart_prio >= (configMAX_PRIORITIES-1)) uart_prio = configMAX_PRIORITIES - 1;
|
||||
err = xTaskCreate(esp_apptrace_send_uart_tx_task, "app_trace_uart_tx_task", 2500, hw_data, uart_prio, NULL);
|
||||
assert((err == pdPASS) && "Not possible to configure UART. Not possible to create task!");
|
||||
|
||||
#if CONFIG_APPTRACE_LOCK_ENABLE
|
||||
esp_apptrace_lock_init(&uart_data->lock);
|
||||
esp_apptrace_lock_init(&hw_data->lock);
|
||||
#endif
|
||||
|
||||
}
|
||||
|
||||
uart_data->inited |= 1 << core_id;
|
||||
uart_data->tx_busy = false;
|
||||
// init UART on this CPU
|
||||
esp_apptrace_uart_hw_init();
|
||||
hw_data->inited |= 1 << core_id;
|
||||
|
||||
return ESP_OK;
|
||||
|
||||
err_alloc_intr:
|
||||
heap_caps_free(uart_data->tx_msg_buff);
|
||||
err_alloc_msg_buff:
|
||||
heap_caps_free(uart_data->tx_ring.buffer);
|
||||
err_init_ring_buff:
|
||||
esp_clk_tree_enable_src(UART_SCLK_DEFAULT, false);
|
||||
HP_UART_SRC_CLK_ATOMIC() {
|
||||
uart_ll_sclk_disable(uart_data->hal_ctx.dev);
|
||||
}
|
||||
HP_UART_BUS_CLK_ATOMIC() {
|
||||
uart_ll_enable_bus_clock(uart_config->uart_num, false);
|
||||
}
|
||||
esp_gpio_revoke(gpio_mask);
|
||||
return ret;
|
||||
}
|
||||
|
||||
static uint8_t *esp_apptrace_uart_up_buffer_get(void *hw_data, uint32_t size, esp_apptrace_tmo_t *tmo)
|
||||
static uint8_t *esp_apptrace_uart_up_buffer_get(esp_apptrace_uart_data_t *hw_data, uint32_t size, esp_apptrace_tmo_t *tmo)
|
||||
{
|
||||
esp_apptrace_uart_data_t *uart_data = hw_data;
|
||||
|
||||
if (size == 0 || size > uart_data->tx_msg_buff_size) {
|
||||
uint8_t *ptr;
|
||||
if (size > APP_TRACE_MAX_TX_MSG_UART) {
|
||||
hw_data->message_buff_overflow = true;
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (esp_apptrace_uart_lock(uart_data, tmo) != ESP_OK) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (uart_data->tx_pending_msg_size != 0) {
|
||||
if (hw_data->tx_msg_buff_size != 0)
|
||||
{
|
||||
// A previous message was not sent.
|
||||
esp_apptrace_uart_unlock(uart_data);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
uart_data->tx_pending_msg_size = size;
|
||||
|
||||
esp_apptrace_uart_unlock(uart_data);
|
||||
|
||||
return uart_data->tx_msg_buff;
|
||||
}
|
||||
|
||||
static esp_err_t esp_apptrace_uart_up_buffer_put(void *hw_data, uint8_t *ptr, esp_apptrace_tmo_t *tmo)
|
||||
{
|
||||
esp_apptrace_uart_data_t *uart_data = hw_data;
|
||||
esp_apptrace_uart_rb_t *rb = &uart_data->tx_ring;
|
||||
|
||||
esp_err_t res = esp_apptrace_uart_lock(uart_data, tmo);
|
||||
esp_err_t res = esp_apptrace_uart_lock(hw_data, tmo);
|
||||
if (res != ESP_OK) {
|
||||
return res;
|
||||
return NULL;
|
||||
}
|
||||
ptr = hw_data->tx_msg_buff;
|
||||
hw_data->tx_msg_buff_size = size;
|
||||
|
||||
/* Add data to ring buffer */
|
||||
ring_buffer_put(rb, ptr, uart_data->tx_pending_msg_size);
|
||||
uart_data->tx_pending_msg_size = 0;
|
||||
|
||||
esp_apptrace_uart_unlock(uart_data);
|
||||
|
||||
// Trigger transmission if not already in progress
|
||||
if (!uart_data->tx_busy) {
|
||||
uart_data->tx_busy = true;
|
||||
/* Enable TX interrupt */
|
||||
uart_ll_clr_intsts_mask(uart_data->hal_ctx.dev, UART_INTR_TXFIFO_EMPTY);
|
||||
uart_ll_ena_intr_mask(uart_data->hal_ctx.dev, UART_INTR_TXFIFO_EMPTY);
|
||||
// now we can safely unlock apptrace to allow other tasks/ISRs to get other buffers and write their data
|
||||
if (esp_apptrace_uart_unlock(hw_data) != ESP_OK) {
|
||||
assert(false && "Failed to unlock apptrace data!");
|
||||
}
|
||||
|
||||
return ESP_OK;
|
||||
return ptr;
|
||||
}
|
||||
|
||||
static void esp_apptrace_uart_down_buffer_config(void *hw_data, uint8_t *buf, uint32_t size)
|
||||
static esp_err_t esp_apptrace_uart_up_buffer_put(esp_apptrace_uart_data_t *hw_data, uint8_t *ptr, esp_apptrace_tmo_t *tmo)
|
||||
{
|
||||
esp_apptrace_uart_data_t *uart_data = hw_data;
|
||||
|
||||
assert(buf != NULL && "Down buffer cannot be NULL");
|
||||
assert(size > 0 && "Down buffer size must be greater than 0");
|
||||
|
||||
uart_data->rx_msg_buff = buf;
|
||||
uart_data->rx_msg_buff_size = size;
|
||||
esp_err_t res = esp_apptrace_send_uart_data(hw_data, (const char *)ptr, hw_data->tx_msg_buff_size, tmo);
|
||||
// Clear size to indicate that we've sent data
|
||||
hw_data->tx_msg_buff_size = 0;
|
||||
return res;
|
||||
}
|
||||
|
||||
static uint8_t *esp_apptrace_uart_down_buffer_get(void *hw_data, uint32_t *size, esp_apptrace_tmo_t *tmo)
|
||||
static void esp_apptrace_uart_down_buffer_config(esp_apptrace_uart_data_t *hw_data, uint8_t *buf, uint32_t size)
|
||||
{
|
||||
esp_apptrace_uart_data_t *uart_data = hw_data;
|
||||
if (!size || *size == 0) {
|
||||
hw_data->down_buffer = (uint8_t *)malloc(size);
|
||||
if (hw_data->down_buffer == NULL){
|
||||
assert(false && "Failed to allocate apptrace uart down buffer!");
|
||||
}
|
||||
hw_data->down_buffer_size = size;
|
||||
}
|
||||
|
||||
static uint8_t *esp_apptrace_uart_down_buffer_get(esp_apptrace_uart_data_t *hw_data, uint32_t *size, esp_apptrace_tmo_t *tmo)
|
||||
{
|
||||
uint8_t *ptr = NULL;
|
||||
|
||||
if (*size > hw_data->down_buffer_size) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (!uart_data->rx_msg_buff) {
|
||||
ESP_APPTRACE_LOGE("RX message buffer is not configured. Call down_buffer_config() first.");
|
||||
esp_err_t res = esp_apptrace_uart_lock(hw_data, tmo);
|
||||
if (res != ESP_OK) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (esp_apptrace_uart_lock(uart_data, tmo) != ESP_OK) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
uint32_t rx_len = uart_ll_get_rxfifo_len(uart_data->hal_ctx.dev);
|
||||
int to_read = MIN(rx_len, MIN(uart_data->rx_msg_buff_size, *size));
|
||||
if (to_read) {
|
||||
uart_hal_read_rxfifo(&uart_data->hal_ctx, uart_data->rx_msg_buff, &to_read);
|
||||
}
|
||||
*size = to_read;
|
||||
|
||||
esp_apptrace_uart_unlock(uart_data);
|
||||
|
||||
return (*size > 0) ? uart_data->rx_msg_buff : NULL;
|
||||
}
|
||||
|
||||
static esp_err_t esp_apptrace_uart_down_buffer_put(void *hw_data, uint8_t *ptr, esp_apptrace_tmo_t *tmo)
|
||||
{
|
||||
(void)hw_data;
|
||||
(void)ptr;
|
||||
(void)tmo;
|
||||
|
||||
/* No action needed - data was already read in get function */
|
||||
return ESP_OK;
|
||||
}
|
||||
|
||||
static bool esp_apptrace_uart_host_is_connected(void *hw_data)
|
||||
{
|
||||
esp_apptrace_uart_data_t *uart_data = hw_data;
|
||||
|
||||
return uart_data->inited & 1;
|
||||
}
|
||||
|
||||
static esp_err_t esp_apptrace_uart_flush_nolock(void *hw_data, uint32_t min_sz, esp_apptrace_tmo_t *tmo)
|
||||
{
|
||||
esp_apptrace_uart_data_t *uart_data = hw_data;
|
||||
esp_apptrace_uart_rb_t *rb = &uart_data->tx_ring;
|
||||
|
||||
uint32_t pending = ring_buffer_data_len(rb);
|
||||
if (pending < min_sz) {
|
||||
ESP_APPTRACE_LOGD("Ignore UART flush request for min %" PRIu32 " bytes. Pending bytes: %" PRIu32, min_sz, pending);
|
||||
return ESP_OK;
|
||||
}
|
||||
|
||||
/* Trigger transmission if there's data but not busy */
|
||||
if (pending > 0 && !uart_data->tx_busy) {
|
||||
uart_data->tx_busy = true;
|
||||
uart_ll_clr_intsts_mask(uart_data->hal_ctx.dev, UART_INTR_TXFIFO_EMPTY);
|
||||
uart_ll_ena_intr_mask(uart_data->hal_ctx.dev, UART_INTR_TXFIFO_EMPTY);
|
||||
}
|
||||
|
||||
while (uart_data->tx_busy || ring_buffer_data_len(rb) > 0) {
|
||||
if (esp_apptrace_tmo_check(tmo) != ESP_OK) {
|
||||
return ESP_ERR_TIMEOUT;
|
||||
size_t uart_fifolen = 0;
|
||||
uart_get_buffered_data_len(hw_data->port_num, &uart_fifolen);
|
||||
if (uart_fifolen > 0) {
|
||||
if (*size < uart_fifolen) {
|
||||
uart_fifolen = *size;
|
||||
}
|
||||
esp_rom_delay_us(100);
|
||||
*size = uart_fifolen;
|
||||
ptr = hw_data->down_buffer;
|
||||
*size =uart_read_bytes(hw_data->port_num, ptr, uart_fifolen, 0);
|
||||
}
|
||||
|
||||
if (esp_apptrace_uart_unlock(hw_data) != ESP_OK) {
|
||||
assert(false && "Failed to unlock apptrace data!");
|
||||
}
|
||||
return ptr;
|
||||
}
|
||||
|
||||
static esp_err_t esp_apptrace_uart_down_buffer_put(esp_apptrace_uart_data_t *hw_data, uint8_t *ptr, esp_apptrace_tmo_t *tmo)
|
||||
{
|
||||
return ESP_OK;
|
||||
}
|
||||
|
||||
static esp_err_t esp_apptrace_uart_flush(void *hw_data, esp_apptrace_tmo_t *tmo)
|
||||
static bool esp_apptrace_uart_host_is_connected(esp_apptrace_uart_data_t *hw_data)
|
||||
{
|
||||
esp_apptrace_uart_data_t *uart_data = hw_data;
|
||||
esp_err_t res = esp_apptrace_uart_lock(uart_data, tmo);
|
||||
if (res != ESP_OK) {
|
||||
return res;
|
||||
}
|
||||
|
||||
esp_err_t ret = esp_apptrace_uart_flush_nolock(hw_data, 0, tmo);
|
||||
|
||||
esp_apptrace_uart_unlock(uart_data);
|
||||
|
||||
return ret;
|
||||
return hw_data->inited & 1;
|
||||
}
|
||||
|
||||
esp_apptrace_hw_t *esp_apptrace_uart_hw_get(int num, void **data)
|
||||
static esp_err_t esp_apptrace_uart_flush_nolock(esp_apptrace_uart_data_t *hw_data, uint32_t min_sz, esp_apptrace_tmo_t *tmo)
|
||||
{
|
||||
ESP_APPTRACE_LOGD("esp_apptrace_uart_hw_get - %i", num);
|
||||
|
||||
static esp_apptrace_uart_data_t s_uart_hw_data;
|
||||
static esp_apptrace_hw_t s_uart_hw = {
|
||||
.init = esp_apptrace_uart_init,
|
||||
.get_up_buffer = esp_apptrace_uart_up_buffer_get,
|
||||
.put_up_buffer = esp_apptrace_uart_up_buffer_put,
|
||||
.flush_up_buffer_nolock = esp_apptrace_uart_flush_nolock,
|
||||
.flush_up_buffer = esp_apptrace_uart_flush,
|
||||
.down_buffer_config = esp_apptrace_uart_down_buffer_config,
|
||||
.get_down_buffer = esp_apptrace_uart_down_buffer_get,
|
||||
.put_down_buffer = esp_apptrace_uart_down_buffer_put,
|
||||
.host_is_connected = esp_apptrace_uart_host_is_connected,
|
||||
};
|
||||
*data = &s_uart_hw_data;
|
||||
return &s_uart_hw;
|
||||
return ESP_OK;
|
||||
}
|
||||
|
||||
static esp_err_t esp_apptrace_uart_flush(esp_apptrace_uart_data_t *hw_data, esp_apptrace_tmo_t *tmo)
|
||||
{
|
||||
return ESP_OK;
|
||||
}
|
||||
|
||||
#endif // APPTRACE_DEST_UART
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user