forked from espressif/esp-idf
Compare commits
414 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cbce221e88 | ||
|
|
c570f67461 | ||
|
|
2fb799c52e | ||
|
|
79e12a1840 | ||
|
|
3cad4ca937 | ||
|
|
e44a6888ea | ||
|
|
4bc762621d | ||
|
|
42592552b5 | ||
|
|
5a21bea309 | ||
|
|
cb88d20f9e | ||
|
|
d28cb4a954 | ||
|
|
c5e580e522 | ||
|
|
ddc16a47fe | ||
|
|
03832dbd5e | ||
|
|
ecf46dd5da | ||
|
|
e8d9104953 | ||
|
|
6cd76ba321 | ||
|
|
5a9cff0d34 | ||
|
|
aa741b90ad | ||
|
|
22adf838c9 | ||
|
|
f4f45345ee | ||
|
|
7b09d12c89 | ||
|
|
30f8f83d52 | ||
|
|
5cd24826bb | ||
|
|
0d13f6f09d | ||
|
|
871dd9e21a | ||
|
|
273d973489 | ||
|
|
8df4a852f5 | ||
|
|
bb4218285b | ||
|
|
ea5e0ff298 | ||
|
|
125d00c0f7 | ||
|
|
8c714acfb5 | ||
|
|
702afbb0f7 | ||
|
|
b0e2f33082 | ||
|
|
7eb9af5806 | ||
|
|
2016bddbda | ||
|
|
d9e289a5fb | ||
|
|
6ad6fb9755 | ||
|
|
56677dabe8 | ||
|
|
9310ed608e | ||
|
|
057cf2b8ee | ||
|
|
829fdd172c | ||
|
|
8cecbafe18 | ||
|
|
786faa3cac | ||
|
|
6477500cf1 | ||
|
|
3ef8c77588 | ||
|
|
9aedb4bd83 | ||
|
|
576489f8cf | ||
|
|
a32201f6ec | ||
|
|
774696285d | ||
|
|
d345beb02a | ||
|
|
cb6b14a0f8 | ||
|
|
eccb5318f9 | ||
|
|
1df2dcc9fe | ||
|
|
00e777aff9 | ||
|
|
267c5e37a2 | ||
|
|
2220a07ec1 | ||
|
|
a3ecb8fe93 | ||
|
|
d9de899ed7 | ||
|
|
871bbdcec3 | ||
|
|
050367ea37 | ||
|
|
8bfb5c837e | ||
|
|
7412d1a1a9 | ||
|
|
edf9f9eff7 | ||
|
|
fc5fe1132a | ||
|
|
ed32d7a267 | ||
|
|
0d12613ab9 | ||
|
|
cc7c851cb2 | ||
|
|
501c7d1101 | ||
|
|
05c98d8b53 | ||
|
|
19f300fa24 | ||
|
|
c58915842e | ||
|
|
6cc4ab56e4 | ||
|
|
bd0f9b8512 | ||
|
|
43a67a5da3 | ||
|
|
ed076c2bc8 | ||
|
|
7fcba0fbf8 | ||
|
|
8b7cd4dc08 | ||
|
|
57b31ed545 | ||
|
|
2010d70892 | ||
|
|
f503b0f621 | ||
|
|
5b4c95c50f | ||
|
|
79fab2eb92 | ||
|
|
f79088f115 | ||
|
|
38a1cfe59c | ||
|
|
0814386710 | ||
|
|
431f5b81a2 | ||
|
|
e8dba711ff | ||
|
|
bd5b1008de | ||
|
|
9f7475dd98 | ||
|
|
a6cbf68991 | ||
|
|
6ba1c6c44f | ||
|
|
82c6c8149c | ||
|
|
b3bde42d8c | ||
|
|
83ac726851 | ||
|
|
133e19d6ee | ||
|
|
8702e49057 | ||
|
|
4943844764 | ||
|
|
b525e273ce | ||
|
|
0ed8499898 | ||
|
|
0122794a4e | ||
|
|
c92ddbbf6f | ||
|
|
5bd2b5bc41 | ||
|
|
b311a3ffc2 | ||
|
|
8adb48cbfd | ||
|
|
e86b8136bd | ||
|
|
9b73003628 | ||
|
|
3bdbad6aa7 | ||
|
|
df7a27e36d | ||
|
|
b62cbebb81 | ||
|
|
c907f489d6 | ||
|
|
9b61156746 | ||
|
|
b8bd147466 | ||
|
|
e0b91b748e | ||
|
|
726ed144e4 | ||
|
|
accb45f466 | ||
|
|
c5003e6220 | ||
|
|
96e1f6e7a2 | ||
|
|
d17ab36645 | ||
|
|
a056e655d8 | ||
|
|
aed737b351 | ||
|
|
0edb7ee43a | ||
|
|
689db30956 | ||
|
|
59b6634a9a | ||
|
|
24ae831b3d | ||
|
|
78d88afbef | ||
|
|
b8365dced6 | ||
|
|
b7199e88b7 | ||
|
|
2701677eb6 | ||
|
|
2165ff386e | ||
|
|
79a2cc224c | ||
|
|
6cfc6f53be | ||
|
|
e35897db33 | ||
|
|
7e49268933 | ||
|
|
a7021c3e44 | ||
|
|
190e9e7212 | ||
|
|
fc69e53e40 | ||
|
|
6f459d4ea8 | ||
|
|
1d89e24199 | ||
|
|
2379ee36ab | ||
| 58fa57af93 | |||
|
|
b1c85cc1d5 | ||
|
|
cf5e2c3962 | ||
|
|
b77479df39 | ||
|
|
ac42a8f2c7 | ||
|
|
e6f018a309 | ||
|
|
2afb56d189 | ||
|
|
d751960b27 | ||
|
|
7ddb440384 | ||
|
|
a891aa7e4d | ||
|
|
d3d73ed8b2 | ||
|
|
a498871111 | ||
|
|
5d6b59109b | ||
|
|
48ab527148 | ||
|
|
51a9057d9e | ||
|
|
6ae2c3c240 | ||
|
|
8b8a6a4450 | ||
|
|
6b0d93efd4 | ||
|
|
d795abeb03 | ||
|
|
70feed14dd | ||
|
|
ae4d1c1f81 | ||
|
|
eec03e6a32 | ||
|
|
6b31235f3f | ||
|
|
2744e6ce07 | ||
|
|
b3c5ee767a | ||
|
|
ad5044c5f6 | ||
|
|
4a35536244 | ||
|
|
670996f484 | ||
|
|
8bb8c144c7 | ||
|
|
653ba59b23 | ||
|
|
319e0689a5 | ||
|
|
6b86fc7ad7 | ||
|
|
f4c5fdbd1a | ||
|
|
7b93cf91aa | ||
|
|
63153794c9 | ||
|
|
19a2e42770 | ||
|
|
45fd8feba3 | ||
|
|
f5020d3f1b | ||
|
|
08600cb1a3 | ||
|
|
236d601e98 | ||
|
|
ee18b19b8f | ||
|
|
27f044bd7f | ||
|
|
5a353ab1ca | ||
|
|
bac34f23bc | ||
|
|
8a08cfe7d1 | ||
|
|
242713ceee | ||
|
|
1d3a08d06f | ||
|
|
e6de764ca1 | ||
|
|
3948949019 | ||
|
|
24cc2d714e | ||
|
|
16696d98c1 | ||
|
|
daf2622a5b | ||
|
|
864f5532fa | ||
|
|
d4a3427eaf | ||
|
|
94d6614c88 | ||
|
|
de5a9ac1f3 | ||
|
|
46f104b3ae | ||
|
|
dd747bd54d | ||
|
|
7d96d9e306 | ||
|
|
1111fd2630 | ||
|
|
d12ad17373 | ||
|
|
4db113a2eb | ||
|
|
bba2581f1a | ||
|
|
938bcc0337 | ||
|
|
845efafc76 | ||
|
|
78b9f5e151 | ||
|
|
8dbe966d89 | ||
|
|
38570b052b | ||
|
|
4477f3e559 | ||
|
|
25e8069532 | ||
|
|
90e354a723 | ||
|
|
2004bf4e11 | ||
|
|
a0a6e34f4f | ||
|
|
6b5b7f09f9 | ||
|
|
42d5b865dd | ||
|
|
123da6baa7 | ||
|
|
2654d29bc5 | ||
|
|
b3b85cafb1 | ||
|
|
21fd9aaee8 | ||
|
|
8fd3b342aa | ||
|
|
1c1c6d59b3 | ||
|
|
f2d144166a | ||
|
|
e704f72356 | ||
|
|
4f1fc73fc2 | ||
|
|
2508350137 | ||
|
|
058bb0edd0 | ||
|
|
71d19fa9c9 | ||
|
|
07d1e19107 | ||
|
|
1042115566 | ||
|
|
b97cf6ca7c | ||
|
|
5775e1d3b7 | ||
|
|
4a74ae4921 | ||
|
|
5fac5b0191 | ||
|
|
abc099ce9f | ||
|
|
174ef6c4a3 | ||
|
|
89c06b3c51 | ||
|
|
62a5ad5fc1 | ||
|
|
c7ca30e62f | ||
|
|
6ff1059da7 | ||
|
|
d6a1ccb27f | ||
|
|
e575e0b44a | ||
|
|
cbd210b431 | ||
|
|
14dac35540 | ||
|
|
71a19d238c | ||
|
|
961018d882 | ||
|
|
c710a69952 | ||
|
|
eeea0a7ea8 | ||
|
|
5fdb9b00c5 | ||
|
|
310e5e71ed | ||
|
|
9a58988bae | ||
|
|
d89db7e4a7 | ||
|
|
c0c9227883 | ||
|
|
c49dce48eb | ||
|
|
1a03cb02c2 | ||
|
|
193f581cec | ||
|
|
f03e3c164e | ||
|
|
8d383980a1 | ||
|
|
202b18b5fa | ||
|
|
3048251be0 | ||
|
|
c35f7cad45 | ||
|
|
1b4e46428d | ||
|
|
5bba395bc8 | ||
|
|
8719a45d97 | ||
|
|
0e484e1daa | ||
|
|
0be6802198 | ||
|
|
9f80af3770 | ||
|
|
411c392d97 | ||
|
|
3724bf6256 | ||
|
|
a64cbdea10 | ||
|
|
728c1ffe59 | ||
|
|
2bc721739c | ||
|
|
34e0ed613a | ||
|
|
7492e0a2c7 | ||
|
|
ea65b1fefb | ||
|
|
a33c60a36f | ||
|
|
3178718a3d | ||
|
|
deedc51cf1 | ||
|
|
f7e3f76899 | ||
|
|
46511c8fd1 | ||
|
|
96b3f52c4e | ||
|
|
2117b7a1dc | ||
|
|
3ae079ac12 | ||
|
|
0bac174058 | ||
|
|
856cfa4ae2 | ||
|
|
0d13dbbb1f | ||
|
|
51c058d80f | ||
|
|
aea120b98b | ||
|
|
7909394274 | ||
|
|
6bfa2b7078 | ||
|
|
86a673946c | ||
|
|
aedcec9be5 | ||
|
|
934f0477a4 | ||
|
|
81988b0007 | ||
|
|
de45fb1c33 | ||
|
|
29be39086f | ||
|
|
934c2c3a58 | ||
|
|
0aad1efe15 | ||
|
|
87328d594f | ||
|
|
954a6a2cff | ||
|
|
cae47ce37e | ||
|
|
8f24b34f21 | ||
|
|
804a9ea1f6 | ||
|
|
46deef434f | ||
|
|
5ccf93d788 | ||
|
|
4c3c6b7f99 | ||
|
|
70fc0bde01 | ||
|
|
1564884cc1 | ||
|
|
86226770b8 | ||
|
|
e121775d6c | ||
|
|
ef3da6b372 | ||
|
|
1b6461b9f8 | ||
|
|
16f3317496 | ||
|
|
3118120659 | ||
|
|
e1b89eeae0 | ||
|
|
cfcb57333b | ||
|
|
24bfb8a8e5 | ||
|
|
f61e219667 | ||
|
|
40b0dbae10 | ||
|
|
ee4e5c014a | ||
|
|
4dcd055612 | ||
|
|
6b28967b64 | ||
|
|
f6e0867eb8 | ||
|
|
30cc0769ac | ||
|
|
f793584c68 | ||
|
|
3a34660d54 | ||
|
|
d54af90022 | ||
|
|
a0df46d342 | ||
|
|
959f92e744 | ||
|
|
e23ce8209d | ||
|
|
92965cd124 | ||
|
|
8c11edb852 | ||
|
|
f67a860cf0 | ||
|
|
9d114e30b5 | ||
|
|
e104fa1904 | ||
|
|
d7b344c97a | ||
|
|
5bea8592d9 | ||
|
|
0f8bf38913 | ||
|
|
e24e674e2f | ||
|
|
52ca53746f | ||
|
|
a26449844f | ||
|
|
d020a58be1 | ||
|
|
4f40bfd864 | ||
|
|
75002dce56 | ||
|
|
b8718506cd | ||
|
|
0116dcb578 | ||
|
|
3152dea192 | ||
|
|
31e42e77de | ||
|
|
f0b2bd5c81 | ||
|
|
83a682ca1a | ||
|
|
9ddd08c502 | ||
|
|
4e73cb77e5 | ||
|
|
f9ebbdf6ea | ||
|
|
39caffc592 | ||
|
|
ce5444d349 | ||
|
|
79dabf50b0 | ||
|
|
a4dbb3a0a1 | ||
|
|
8438887cb4 | ||
|
|
7a410499f3 | ||
|
|
c020a68e1e | ||
|
|
55d44e0bee | ||
|
|
5736694dbc | ||
|
|
6102cfdd27 | ||
|
|
5b18007d92 | ||
|
|
bead0d741b | ||
|
|
a218144f4c | ||
|
|
46fe70fb46 | ||
|
|
c5fe3ec05d | ||
|
|
3562cb8051 | ||
|
|
7b41d6004a | ||
|
|
5c88e0d801 | ||
|
|
c6559a9b64 | ||
|
|
ca3d871a21 | ||
|
|
113e4dc520 | ||
|
|
80315b77a0 | ||
|
|
aeabe8d742 | ||
|
|
1644050652 | ||
|
|
a12e124410 | ||
|
|
36d6a927d1 | ||
|
|
93eeb4265c | ||
|
|
f608431421 | ||
|
|
21c536c563 | ||
|
|
de33225c31 | ||
|
|
b07a534984 | ||
|
|
b0a1c0d045 | ||
|
|
73f895217b | ||
|
|
0fb6316888 | ||
|
|
be8727cf88 | ||
|
|
8b0860ef95 | ||
|
|
6bb4dc35ab | ||
|
|
34fea0d38f | ||
|
|
a6b1ebce31 | ||
|
|
b5357e8b01 | ||
|
|
59666637bc | ||
|
|
48c5f74cbc | ||
|
|
8e41186ac8 | ||
|
|
49718b20a5 | ||
|
|
f1a2bc777e | ||
|
|
0ed6610212 | ||
|
|
51405fd9d4 | ||
|
|
71786a7413 | ||
|
|
4102628a3b | ||
|
|
d0011b778a | ||
|
|
c2fe7be50e | ||
|
|
13e3480f03 | ||
|
|
024e201097 | ||
|
|
62467fbca5 | ||
|
|
84f81437a3 | ||
|
|
c00759ad34 | ||
|
|
472cc06f6c | ||
|
|
b5572b1db0 | ||
|
|
2b461df8a8 | ||
|
|
7a716377a3 | ||
|
|
965e9abd7b | ||
|
|
a47169f450 |
10
.github/ISSUE_TEMPLATE/02_runtime_bug.yml
vendored
10
.github/ISSUE_TEMPLATE/02_runtime_bug.yml
vendored
@@ -22,14 +22,6 @@ body:
|
||||
placeholder: ex. v3.2-dev-1148-g96cd3b75c
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: chip_revision
|
||||
attributes:
|
||||
label: Espressif SoC revision.
|
||||
description: On which Espressif SoC revision does your application run on? Run `esptool chip_id` to find it.
|
||||
placeholder: ex. ESP32-C3 (QFN32) (revision v0.3)
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: operating_system
|
||||
attributes:
|
||||
@@ -71,7 +63,7 @@ body:
|
||||
attributes:
|
||||
label: Development Kit.
|
||||
description: On which Development Kit does this issue occur on?
|
||||
placeholder: ex. ESP32-Wrover-Kit v2 | Custom Board | QEMU
|
||||
placeholder: ex. ESP32-Wrover-Kit v2 | Custom Board
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
||||
5
.github/dangerjs/.gitignore
vendored
5
.github/dangerjs/.gitignore
vendored
@@ -1,5 +0,0 @@
|
||||
# Transpiled JavaScript (if any)
|
||||
dist
|
||||
|
||||
# Installed dependencies
|
||||
node_modules
|
||||
47
.github/dangerjs/README.md
vendored
47
.github/dangerjs/README.md
vendored
@@ -1,47 +0,0 @@
|
||||
# DangerJS pull request automatic review tool - GitHub
|
||||
|
||||
## Implementation
|
||||
The main development is done in Espressif Gitlab project.
|
||||
Espressif [GitHub project espressif/esp-idf](https://github.com/espressif/esp-idf) is only a public mirror.
|
||||
|
||||
Therefore, all changes and updates to DangerJS files (`.github/dangerjs`) must be made via MR in the **Gitlab** repository by Espressif engineer.
|
||||
|
||||
When adding a new Danger rule or updating existing one, might be a good idea to test it on the developer's fork of GitHub project. This way, the new feature can be tested using a GitHub action without concern of damaging Espressif's GitHub repository.
|
||||
|
||||
Danger for Espressif GitHub is implemented in TypeScript. This makes the code more readable and robust than plain JavaScript.
|
||||
Compilation to JavaScript code (using `tsc`) is not necessary; Danger handles TypeScript natively.
|
||||
|
||||
A good practice is to store each Danger rule in a separate module, and then import these modules into the main Danger file `.github/dangerjs/dangerfile.ts` (see how this is done for currently present modules when adding a new one).
|
||||
|
||||
If the Danger module (new check/rule) uses an external NPM module (e.g. `axios`), be sure to add this dependency to `.github/dangerjs/package.json` and also update `.github/dangerjs/package-lock.json`.
|
||||
|
||||
In the GitHub action, `danger` is not installed globally (nor are its dependencies) and the `npx` call is used to start the `danger` checks in CI.
|
||||
|
||||
|
||||
## Adding new Danger rule
|
||||
For local development you can use following strategy
|
||||
|
||||
#### Install dependencies
|
||||
```sh
|
||||
cd .github/dangerjs
|
||||
npm install
|
||||
```
|
||||
(If the IDE still shows compiler/typing errors, reload the IDE window.)
|
||||
|
||||
#### Add new code as needed or make updates
|
||||
|
||||
#### Test locally
|
||||
Danger rules can be tested locally (without running the GitHub action pipeline).
|
||||
To do this, you have to first export the ENV variables used by Danger in the local terminal:
|
||||
|
||||
```sh
|
||||
export GITHUB_TOKEN='**************************************'
|
||||
```
|
||||
|
||||
Then you can call Danger by:
|
||||
```sh
|
||||
cd .github/dangerjs
|
||||
|
||||
danger pr https://github.com/espressif/esp-idf/pull/<number_of_pull_request>
|
||||
```
|
||||
The result will be displayed in your terminal.
|
||||
48
.github/dangerjs/dangerfile.ts
vendored
48
.github/dangerjs/dangerfile.ts
vendored
@@ -1,48 +0,0 @@
|
||||
import { DangerResults } from "danger";
|
||||
declare const results: DangerResults;
|
||||
declare const message: (message: string, results?: DangerResults) => void;
|
||||
declare const markdown: (message: string, results?: DangerResults) => void;
|
||||
|
||||
// Import modules with danger rules
|
||||
// (Modules with checks are stored in ".github/dangerjs/<module_name>.ts". To import them, use path relative to "dangerfile.ts")
|
||||
import prCommitsTooManyCommits from "./prCommitsTooManyCommits";
|
||||
import prDescription from "./prDescription";
|
||||
import prTargetBranch from "./prTargetBranch";
|
||||
import prInfoContributor from "./prInfoContributor";
|
||||
import prCommitMessage from "./prCommitMessage";
|
||||
|
||||
async function runDangerRules(): Promise<void> {
|
||||
// Message to contributor about review and merge process
|
||||
const prInfoContributorMessage: string = await prInfoContributor();
|
||||
markdown(prInfoContributorMessage);
|
||||
|
||||
// Run danger checks
|
||||
prCommitsTooManyCommits();
|
||||
prDescription();
|
||||
prTargetBranch();
|
||||
prCommitMessage();
|
||||
|
||||
// Add success log if no issues
|
||||
const dangerFails: number = results.fails.length;
|
||||
const dangerWarns: number = results.warnings.length;
|
||||
const dangerInfos: number = results.messages.length;
|
||||
if (!dangerFails && !dangerWarns && !dangerInfos) {
|
||||
return message("Good Job! All checks are passing!");
|
||||
}
|
||||
|
||||
// Add retry link
|
||||
addRetryLink();
|
||||
}
|
||||
|
||||
runDangerRules();
|
||||
|
||||
function addRetryLink(): void {
|
||||
const serverUrl: string | undefined = process.env.GITHUB_SERVER_URL;
|
||||
const repoName: string | undefined = process.env.GITHUB_REPOSITORY;
|
||||
const runId: string | undefined = process.env.GITHUB_RUN_ID;
|
||||
|
||||
const retryLinkUrl: string = `${serverUrl}/${repoName}/actions/runs/${runId}`;
|
||||
const retryLink: string = `<sub>:repeat: You can re-run automatic PR checks by retrying the <a href="${retryLinkUrl}">DangerJS action</a></sub>`;
|
||||
|
||||
markdown(retryLink);
|
||||
}
|
||||
1999
.github/dangerjs/package-lock.json
generated
vendored
1999
.github/dangerjs/package-lock.json
generated
vendored
File diff suppressed because it is too large
Load Diff
18
.github/dangerjs/package.json
vendored
18
.github/dangerjs/package.json
vendored
@@ -1,18 +0,0 @@
|
||||
{
|
||||
"name": "dangerjs-github",
|
||||
"description": "GitHub PR reviewing with DangerJS",
|
||||
"main": "dangerfile.ts",
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"axios": "^1.3.3",
|
||||
"danger": "^11.2.3",
|
||||
"request": "^2.88.2",
|
||||
"sync-request": "^6.1.0",
|
||||
"typescript": "^5.0.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^18.15.11"
|
||||
}
|
||||
}
|
||||
67
.github/dangerjs/prCommitMessage.ts
vendored
67
.github/dangerjs/prCommitMessage.ts
vendored
@@ -1,67 +0,0 @@
|
||||
import { DangerDSLType, DangerResults } from "danger";
|
||||
declare const danger: DangerDSLType;
|
||||
declare const warn: (message: string, results?: DangerResults) => void;
|
||||
|
||||
interface Commit {
|
||||
message: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if commit messages are sufficiently descriptive (not too short).
|
||||
*
|
||||
* Search for commit messages that appear to be automatically generated or temporary messages and report them.
|
||||
*
|
||||
* @dangerjs WARN
|
||||
*/
|
||||
export default function (): void {
|
||||
const prCommits: Commit[] = danger.git.commits;
|
||||
|
||||
const detectRegexes: RegExp[] = [
|
||||
/^Merge pull request #\d+ from .*/i, // Automatically generated message by GitHub
|
||||
/^Merged .+:.+ into .+/i, // Automatically generated message by GitHub
|
||||
/^Automatic merge by GitHub Action/i, // Automatically generated message by GitHub
|
||||
/^Merge branch '.*' of .+ into .+/i, // Automatically generated message by GitHub
|
||||
/^Create\s[a-zA-Z0-9_.-]+(\.[a-zA-Z0-9]{1,4})?(?=\s|$)/, // Automatically generated message by GitHub using UI
|
||||
/^Delete\s[a-zA-Z0-9_.-]+(\.[a-zA-Z0-9]{1,4})?(?=\s|$)/, // Automatically generated message by GitHub using UI
|
||||
/^Update\s[a-zA-Z0-9_.-]+(\.[a-zA-Z0-9]{1,4})?(?=\s|$)/, // Automatically generated message by GitHub using UI
|
||||
/^Initial commit/i, // Automatically generated message by GitHub
|
||||
/^WIP.*/i, // Message starts with prefix "WIP"
|
||||
/^Cleaned.*/i, // Message starts "Cleaned", , probably temporary
|
||||
/^Test:.*/i, // Message starts with "test" prefix, probably temporary
|
||||
/clean ?up/i, // Message contains "clean up", probably temporary
|
||||
/^[^A-Za-z0-9\s].*/, // Message starts with special characters
|
||||
];
|
||||
|
||||
let partMessages: string[] = [];
|
||||
|
||||
for (const commit of prCommits) {
|
||||
const commitMessage: string = commit.message;
|
||||
const commitMessageTitle: string = commit.message.split("\n")[0];
|
||||
|
||||
// Check if the commit message matches any regex from "detectRegexes"
|
||||
if (detectRegexes.some((regex) => commitMessage.match(regex))) {
|
||||
partMessages.push(
|
||||
`- the commit message \`${commitMessageTitle}\` appears to be a temporary or automatically generated message`
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if the commit message is not too short
|
||||
const shortCommitMessageThreshold: number = 20; // commit message is considered too short below this number of characters
|
||||
if (commitMessage.length < shortCommitMessageThreshold) {
|
||||
partMessages.push(
|
||||
`- the commit message \`${commitMessageTitle}\` may not be sufficiently descriptive`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Create report
|
||||
if (partMessages.length) {
|
||||
partMessages.sort();
|
||||
let dangerMessage = `\nSome issues found for the commit messages in this MR:\n${partMessages.join(
|
||||
"\n"
|
||||
)}
|
||||
\nPlease consider updating these commit messages.`;
|
||||
warn(dangerMessage);
|
||||
}
|
||||
}
|
||||
19
.github/dangerjs/prCommitsTooManyCommits.ts
vendored
19
.github/dangerjs/prCommitsTooManyCommits.ts
vendored
@@ -1,19 +0,0 @@
|
||||
import { DangerDSLType, DangerResults } from "danger";
|
||||
declare const danger: DangerDSLType;
|
||||
declare const message: (message: string, results?: DangerResults) => void;
|
||||
|
||||
/**
|
||||
* Check if pull request has not an excessive numbers of commits (if squashed)
|
||||
*
|
||||
* @dangerjs INFO
|
||||
*/
|
||||
export default function (): void {
|
||||
const tooManyCommitThreshold: number = 2; // above this number of commits, squash commits is suggested
|
||||
const prCommits: number = danger.github.commits.length;
|
||||
|
||||
if (prCommits > tooManyCommitThreshold) {
|
||||
return message(
|
||||
`You might consider squashing your ${prCommits} commits (simplifying branch history).`
|
||||
);
|
||||
}
|
||||
}
|
||||
19
.github/dangerjs/prDescription.ts
vendored
19
.github/dangerjs/prDescription.ts
vendored
@@ -1,19 +0,0 @@
|
||||
import { DangerDSLType, DangerResults } from "danger";
|
||||
declare const danger: DangerDSLType;
|
||||
declare const warn: (message: string, results?: DangerResults) => void;
|
||||
|
||||
/**
|
||||
* Check if pull request has has a sufficiently accurate description
|
||||
*
|
||||
* @dangerjs WARN
|
||||
*/
|
||||
export default function (): void {
|
||||
const prDescription: string = danger.github.pr.body;
|
||||
const shortPrDescriptionThreshold: number = 100; // Description is considered too short below this number of characters
|
||||
|
||||
if (prDescription.length < shortPrDescriptionThreshold) {
|
||||
return warn(
|
||||
"The PR description looks very brief, please check if more details can be added."
|
||||
);
|
||||
}
|
||||
}
|
||||
58
.github/dangerjs/prInfoContributor.ts
vendored
58
.github/dangerjs/prInfoContributor.ts
vendored
@@ -1,58 +0,0 @@
|
||||
import { DangerDSLType } from "danger";
|
||||
declare const danger: DangerDSLType;
|
||||
|
||||
interface Contributor {
|
||||
login?: string;
|
||||
}
|
||||
|
||||
const authorLogin = danger.github.pr.user.login;
|
||||
const messageKnownContributor: string = `
|
||||
***
|
||||
👋 **Hi ${authorLogin}**, thank you for your another contribution to \`espressif/esp-idf\` project!
|
||||
|
||||
If the change is approved and passes the tests in our internal git repository, it will appear in this public Github repository on the next sync.
|
||||
***
|
||||
`;
|
||||
|
||||
const messageFirstContributor: string = `
|
||||
***
|
||||
👋 **Welcome ${authorLogin}**, thank you for your first contribution to \`espressif/esp-idf\` project!
|
||||
|
||||
📘 Please check [Contributions Guide](https://docs.espressif.com/projects/esp-idf/en/latest/esp32/contribute/index.html#contributions-guide) for the contribution checklist, information regarding code and documentation style, testing and other topics.
|
||||
|
||||
🖊️ Please also make sure you have **read and signed** the [Contributor License Agreement for espressif/esp-idf project](https://cla-assistant.io/espressif/esp-idf).
|
||||
|
||||
#### Pull request review and merge process you can expect
|
||||
Espressif develops the ESP-IDF project in an internal repository (Gitlab). We do welcome contributions in the form of bug reports, feature requests and pull requests via this public GitHub repository.
|
||||
|
||||
1. An internal issue has been created for the PR, we assign it to the relevant engineer
|
||||
2. They review the PR and either approve it or ask you for changes or clarifications
|
||||
3. Once the Github PR is approved, we synchronize it into our internal git repository
|
||||
4. In the internal git repository we do the final review, collect approvals from core owners and make sure all the automated tests are passing
|
||||
- At this point we may do some adjustments to the proposed change, or extend it by adding tests or documentation.
|
||||
5. If the change is approved and passes the tests it is merged into the \`master\` branch
|
||||
6. On next sync from the internal git repository merged change will appear in this public Github repository
|
||||
|
||||
***
|
||||
`;
|
||||
|
||||
/**
|
||||
* Check whether the author of the pull request is known or a first-time contributor, and add a message to the PR with information about the review and merge process.
|
||||
*/
|
||||
export default async function (): Promise<string> {
|
||||
const contributors = await danger.github.api.repos.listContributors({
|
||||
owner: danger.github.thisPR.owner,
|
||||
repo: danger.github.thisPR.repo,
|
||||
});
|
||||
|
||||
const contributorsData: Contributor[] = contributors.data;
|
||||
const knownContributors: (string | undefined)[] = contributorsData.map(
|
||||
(contributor: Contributor) => contributor.login
|
||||
);
|
||||
|
||||
if (knownContributors.includes(authorLogin)) {
|
||||
return messageKnownContributor;
|
||||
} else {
|
||||
return messageFirstContributor;
|
||||
}
|
||||
}
|
||||
19
.github/dangerjs/prTargetBranch.ts
vendored
19
.github/dangerjs/prTargetBranch.ts
vendored
@@ -1,19 +0,0 @@
|
||||
import { DangerDSLType, DangerResults } from "danger";
|
||||
declare const danger: DangerDSLType;
|
||||
declare const fail: (message: string, results?: DangerResults) => void;
|
||||
|
||||
/**
|
||||
* Check if the target branch is "master"
|
||||
*
|
||||
* @dangerjs FAIL
|
||||
*/
|
||||
export default function (): void {
|
||||
const prTargetBranch: string = danger.github?.pr?.base?.ref;
|
||||
|
||||
if (prTargetBranch !== "master") {
|
||||
return fail(`
|
||||
The target branch for this pull request should be \`master\`.\n
|
||||
If you would like to add this feature to the release branch, please state this in the PR description and we will consider backporting it.
|
||||
`);
|
||||
}
|
||||
}
|
||||
17
.github/dangerjs/tsconfig.json
vendored
17
.github/dangerjs/tsconfig.json
vendored
@@ -1,17 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"module": "commonjs",
|
||||
"moduleResolution": "node",
|
||||
"esModuleInterop": true,
|
||||
"target": "es6",
|
||||
"noImplicitAny": true,
|
||||
"noUnusedParameters": true,
|
||||
"strictNullChecks": true,
|
||||
"sourceMap": true,
|
||||
"removeComments": true,
|
||||
"outDir": "./dist"
|
||||
},
|
||||
"include": [
|
||||
"./*.ts"
|
||||
]
|
||||
}
|
||||
15
.github/dependabot.yml
vendored
15
.github/dependabot.yml
vendored
@@ -1,15 +0,0 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "all"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
ignore:
|
||||
- directory: ".gitlab/dangerjs"
|
||||
patterns:
|
||||
- "package-lock.json"
|
||||
- directory: ".github/dangerjs"
|
||||
patterns:
|
||||
- "package-lock.json"
|
||||
# Disable "version updates" (keep only "security updates")
|
||||
open-pull-requests-limit: 0
|
||||
36
.github/workflows/dangerjs.yml
vendored
36
.github/workflows/dangerjs.yml
vendored
@@ -1,36 +0,0 @@
|
||||
name: DangerJS Pull Request review
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [opened, edited, reopened, synchronize]
|
||||
|
||||
permissions:
|
||||
pull-requests: write
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
danger-check:
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: .github/dangerjs
|
||||
steps:
|
||||
- name: Check out PR head
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
|
||||
- name: Setup NodeJS environment
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 18
|
||||
cache: npm
|
||||
cache-dependency-path: .github/dangerjs/package-lock.json
|
||||
|
||||
- name: Install DangerJS dependencies
|
||||
run: npm install
|
||||
|
||||
- name: Run DangerJS
|
||||
run: npx danger ci --failOnErrors -v
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
4
.github/workflows/pre_commit_check.yml
vendored
4
.github/workflows/pre_commit_check.yml
vendored
@@ -10,8 +10,6 @@ permissions:
|
||||
jobs:
|
||||
pre_commit_check:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
SKIP: "cleanup-ignore-lists" # Comma-separated string of ignored pre-commit check IDs
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
@@ -23,7 +21,7 @@ jobs:
|
||||
- name: Set up Python environment
|
||||
uses: actions/setup-python@master
|
||||
with:
|
||||
python-version: v3.8
|
||||
python-version: v3.7
|
||||
- name: Install python packages
|
||||
run: |
|
||||
pip install pre-commit
|
||||
|
||||
12
.gitignore
vendored
12
.gitignore
vendored
@@ -78,10 +78,6 @@ test_multi_heap_host
|
||||
*.swp
|
||||
*.swo
|
||||
|
||||
# Sublime Text files
|
||||
*.sublime-project
|
||||
*.sublime-workspace
|
||||
|
||||
# Clion IDE CMake build & config
|
||||
.idea/
|
||||
cmake-build-*/
|
||||
@@ -101,11 +97,3 @@ managed_components
|
||||
|
||||
# pytest log
|
||||
pytest_embedded_log/
|
||||
list_job_*.txt
|
||||
size_info.txt
|
||||
|
||||
# clang config (for LSP)
|
||||
.clangd
|
||||
|
||||
# Vale
|
||||
.vale/styles/*
|
||||
|
||||
226
.gitlab-ci.yml
226
.gitlab-ci.yml
@@ -1,31 +1,241 @@
|
||||
stages:
|
||||
- upload_cache
|
||||
- pre_check
|
||||
- build
|
||||
- assign_test
|
||||
- build_doc
|
||||
- target_test
|
||||
- host_test
|
||||
- test_deploy
|
||||
- deploy
|
||||
- post_deploy
|
||||
|
||||
workflow:
|
||||
rules:
|
||||
# Disable those non-protected push triggered pipelines
|
||||
- if: '$CI_COMMIT_REF_NAME != "master" && $CI_COMMIT_BRANCH !~ /^release\/v/ && $CI_COMMIT_TAG !~ /^v\d+\.\d+(\.\d+)?($|-)/ && $CI_COMMIT_TAG !~ /^qa-test/ && $CI_PIPELINE_SOURCE == "push"'
|
||||
- if: '$CI_COMMIT_REF_NAME != "master" && $CI_COMMIT_BRANCH !~ /^release\/v/ && $CI_COMMIT_TAG !~ /^v\d+\.\d+(\.\d+)?($|-)/ && $CI_PIPELINE_SOURCE == "push"'
|
||||
when: never
|
||||
# when running merged result pipelines, CI_COMMIT_SHA represents the temp commit it created.
|
||||
# Please use PIPELINE_COMMIT_SHA at all places that require a commit sha of the original commit.
|
||||
# when running merged result pipelines, it would create a temp commit id. use $CI_MERGE_REQUEST_SOURCE_BRANCH_SHA instead of $CI_COMMIT_SHA.
|
||||
# Please use PIPELINE_COMMIT_SHA at all places that require a commit sha
|
||||
- if: $CI_OPEN_MERGE_REQUESTS != null
|
||||
variables:
|
||||
PIPELINE_COMMIT_SHA: $CI_MERGE_REQUEST_SOURCE_BRANCH_SHA
|
||||
IS_MR_PIPELINE: 1
|
||||
- if: $CI_OPEN_MERGE_REQUESTS == null
|
||||
variables:
|
||||
PIPELINE_COMMIT_SHA: $CI_COMMIT_SHA
|
||||
IS_MR_PIPELINE: 0
|
||||
- when: always
|
||||
|
||||
# Place the default settings in `.gitlab/ci/common.yml` instead
|
||||
variables:
|
||||
# System environment
|
||||
|
||||
# Common parameters for the 'make' during CI tests
|
||||
MAKEFLAGS: "-j5 --no-keep-going"
|
||||
|
||||
# GitLab-CI environment
|
||||
|
||||
# XXX_ATTEMPTS variables (https://docs.gitlab.com/ce/ci/yaml/README.html#job-stages-attempts) are not defined here.
|
||||
# Use values from "CI / CD Settings" - "Variables".
|
||||
|
||||
# GIT_STRATEGY is not defined here.
|
||||
# Use an option from "CI / CD Settings" - "General pipelines".
|
||||
|
||||
# we will download archive for each submodule instead of clone.
|
||||
# we don't do "recursive" when fetch submodule as they're not used in CI now.
|
||||
GIT_SUBMODULE_STRATEGY: none
|
||||
SUBMODULE_FETCH_TOOL: "tools/ci/ci_fetch_submodule.py"
|
||||
# by default we will fetch all submodules
|
||||
# jobs can overwrite this variable to only fetch submodules they required
|
||||
# set to "none" if don't need to fetch submodules
|
||||
SUBMODULES_TO_FETCH: "all"
|
||||
# tell build system do not check submodule update as we download archive instead of clone
|
||||
IDF_SKIP_CHECK_SUBMODULES: 1
|
||||
|
||||
IDF_PATH: "$CI_PROJECT_DIR"
|
||||
BATCH_BUILD: "1"
|
||||
V: "0"
|
||||
CHECKOUT_REF_SCRIPT: "$CI_PROJECT_DIR/tools/ci/checkout_project_ref.py"
|
||||
PYTHON_VER: 3.7.10
|
||||
|
||||
CLANG_TIDY_RUNNER_PROJ: 2107 # idf/clang-tidy-runner
|
||||
IDF_BUILD_APPS_PROJ: 2818 # espressif/idf-build-apps
|
||||
|
||||
# Docker images
|
||||
BOT_DOCKER_IMAGE_TAG: ":latest"
|
||||
|
||||
ESP_ENV_IMAGE: "$CI_DOCKER_REGISTRY/esp-env-v5.1:1"
|
||||
CLANG_STATIC_ANALYSIS_IMAGE: "${CI_DOCKER_REGISTRY}/clang-static-analysis-v5.1:1-1"
|
||||
ESP_IDF_DOC_ENV_IMAGE: "$CI_DOCKER_REGISTRY/esp-idf-doc-env-v5.1:1-1"
|
||||
QEMU_IMAGE: "${CI_DOCKER_REGISTRY}/qemu-v5.1:1-20220802"
|
||||
TARGET_TEST_ENV_IMAGE: "$CI_DOCKER_REGISTRY/target-test-env-v5.1:1"
|
||||
|
||||
SONARQUBE_SCANNER_IMAGE: "${CI_DOCKER_REGISTRY}/sonarqube-scanner:3"
|
||||
|
||||
PRE_COMMIT_IMAGE: "$CI_DOCKER_REGISTRY/esp-idf-pre-commit:1"
|
||||
|
||||
# target test config file, used by assign test job
|
||||
CI_TARGET_TEST_CONFIG_FILE: "$CI_PROJECT_DIR/.gitlab/ci/target-test.yml"
|
||||
|
||||
# target test repo parameters
|
||||
TEST_ENV_CONFIG_REPO: "https://gitlab-ci-token:${BOT_TOKEN}@${CI_SERVER_HOST}:${CI_SERVER_PORT}/qa/ci-test-runner-configs.git"
|
||||
|
||||
# cache python dependencies
|
||||
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
|
||||
|
||||
# Set this variable to the branch of idf-constraints repo in order to test a custom Python constraint file. The
|
||||
# branch name must be without the remote part ("origin/"). Keep the variable empty in order to use the constraint
|
||||
# file from https://dl.espressif.com/dl/esp-idf.
|
||||
CI_PYTHON_CONSTRAINT_BRANCH: ""
|
||||
|
||||
# Update the filename for a specific ESP-IDF release. It is used only with CI_PYTHON_CONSTRAINT_BRANCH.
|
||||
CI_PYTHON_CONSTRAINT_FILE: "espidf.constraints.v5.1.txt"
|
||||
|
||||
# Set this variable to repository name of a Python tool you wish to install and test in the context of ESP-IDF CI.
|
||||
# Keep the variable empty when not used.
|
||||
CI_PYTHON_TOOL_REPO: ""
|
||||
|
||||
# Set this variable to the branch of a Python tool repo specified in CI_PYTHON_TOOL_REPO. The
|
||||
# branch name must be without the remote part ("origin/"). Keep the variable empty when not used.
|
||||
# This is used only if CI_PYTHON_TOOL_REPO is not empty.
|
||||
CI_PYTHON_TOOL_BRANCH: ""
|
||||
|
||||
IDF_CI_BUILD: 1
|
||||
|
||||
cache:
|
||||
# pull only for most of the use cases since it's cache dir.
|
||||
# Only set "push" policy for "upload_cache" stage jobs
|
||||
- key: pip-cache
|
||||
paths:
|
||||
- .cache/pip
|
||||
policy: pull
|
||||
- key: submodule-cache
|
||||
paths:
|
||||
- .cache/submodule_archives
|
||||
policy: pull
|
||||
|
||||
.common_before_scripts: &common-before_scripts |
|
||||
source tools/ci/utils.sh
|
||||
is_based_on_commits $REQUIRED_ANCESTOR_COMMITS
|
||||
|
||||
if [[ -n "$IDF_DONT_USE_MIRRORS" ]]; then
|
||||
export IDF_MIRROR_PREFIX_MAP=
|
||||
fi
|
||||
|
||||
if echo "$CI_MERGE_REQUEST_LABELS" | egrep "^([^,\n\r]+,)*include_nightly_run(,[^,\n\r]+)*$"; then
|
||||
export INCLUDE_NIGHTLY_RUN="1"
|
||||
fi
|
||||
|
||||
# configure cmake related flags
|
||||
source tools/ci/configure_ci_environment.sh
|
||||
|
||||
# add extra python packages
|
||||
export PYTHONPATH="$IDF_PATH/tools:$IDF_PATH/tools/esp_app_trace:$IDF_PATH/components/partition_table:$IDF_PATH/tools/ci/python_packages:$PYTHONPATH"
|
||||
|
||||
.setup_tools_and_idf_python_venv: &setup_tools_and_idf_python_venv |
|
||||
# must use after setup_tools_except_target_test
|
||||
# otherwise the export.sh won't work properly
|
||||
|
||||
# download constraint file for dev
|
||||
if [[ -n "$CI_PYTHON_CONSTRAINT_BRANCH" ]]; then
|
||||
wget -O /tmp/constraint.txt --header="Authorization:Bearer ${ESPCI_TOKEN}" ${GITLAB_HTTP_SERVER}/api/v4/projects/2581/repository/files/${CI_PYTHON_CONSTRAINT_FILE}/raw?ref=${CI_PYTHON_CONSTRAINT_BRANCH}
|
||||
mkdir -p ~/.espressif
|
||||
mv /tmp/constraint.txt ~/.espressif/${CI_PYTHON_CONSTRAINT_FILE}
|
||||
fi
|
||||
|
||||
# Mirror
|
||||
if [[ -n "$IDF_DONT_USE_MIRRORS" ]]; then
|
||||
export IDF_MIRROR_PREFIX_MAP=
|
||||
fi
|
||||
|
||||
# install latest python packages
|
||||
# target test jobs
|
||||
if [[ "${CI_JOB_STAGE}" == "target_test" ]]; then
|
||||
# ttfw jobs
|
||||
if ! echo "${CI_JOB_NAME}" | egrep ".*pytest.*"; then
|
||||
run_cmd bash install.sh --enable-ci --enable-ttfw
|
||||
else
|
||||
run_cmd bash install.sh --enable-ci --enable-pytest
|
||||
fi
|
||||
elif [[ "${CI_JOB_STAGE}" == "build_doc" ]]; then
|
||||
run_cmd bash install.sh --enable-ci --enable-docs
|
||||
elif [[ "${CI_JOB_STAGE}" == "build" ]]; then
|
||||
run_cmd bash install.sh --enable-ci --enable-pytest
|
||||
else
|
||||
if ! echo "${CI_JOB_NAME}" | egrep ".*pytest.*"; then
|
||||
run_cmd bash install.sh --enable-ci
|
||||
else
|
||||
run_cmd bash install.sh --enable-ci --enable-pytest
|
||||
fi
|
||||
fi
|
||||
|
||||
# Install esp-clang if necessary
|
||||
if [[ "$IDF_TOOLCHAIN" == "clang" ]]; then
|
||||
$IDF_PATH/tools/idf_tools.py --non-interactive install esp-clang
|
||||
fi
|
||||
|
||||
source ./export.sh
|
||||
|
||||
# Custom OpenOCD
|
||||
if [[ ! -z "$OOCD_DISTRO_URL" && "$CI_JOB_STAGE" == "target_test" ]]; then
|
||||
echo "Using custom OpenOCD from ${OOCD_DISTRO_URL}"
|
||||
wget $OOCD_DISTRO_URL
|
||||
ARCH_NAME=$(basename $OOCD_DISTRO_URL)
|
||||
tar -x -f $ARCH_NAME
|
||||
export OPENOCD_SCRIPTS=$PWD/openocd-esp32/share/openocd/scripts
|
||||
export PATH=$PWD/openocd-esp32/bin:$PATH
|
||||
fi
|
||||
|
||||
if [[ -n "$CI_PYTHON_TOOL_REPO" ]]; then
|
||||
git clone --quiet --depth=1 -b ${CI_PYTHON_TOOL_BRANCH} https://gitlab-ci-token:${ESPCI_TOKEN}@${GITLAB_HTTPS_HOST}/espressif/${CI_PYTHON_TOOL_REPO}.git
|
||||
pip install ./${CI_PYTHON_TOOL_REPO}
|
||||
rm -rf ${CI_PYTHON_TOOL_REPO}
|
||||
fi
|
||||
|
||||
before_script:
|
||||
- *common-before_scripts
|
||||
- *setup_tools_and_idf_python_venv
|
||||
- add_gitlab_ssh_keys
|
||||
- fetch_submodules
|
||||
|
||||
.before_script_minimal:
|
||||
before_script:
|
||||
- *common-before_scripts
|
||||
|
||||
.before_script_macos:
|
||||
before_script:
|
||||
- *common-before_scripts
|
||||
# On macOS, these tools need to be installed
|
||||
- export IDF_TOOLS_PATH="${HOME}/.espressif_runner_${CI_RUNNER_ID}_${CI_CONCURRENT_ID}"
|
||||
- $IDF_PATH/tools/idf_tools.py --non-interactive install cmake ninja
|
||||
# This adds tools (compilers) and the version-specific Python environment to PATH
|
||||
- *setup_tools_and_idf_python_venv
|
||||
- fetch_submodules
|
||||
|
||||
.before_script_build_jobs:
|
||||
before_script:
|
||||
- *common-before_scripts
|
||||
- *setup_tools_and_idf_python_venv
|
||||
- add_gitlab_ssh_keys
|
||||
- fetch_submodules
|
||||
- export EXTRA_CFLAGS=${PEDANTIC_CFLAGS}
|
||||
- export EXTRA_CXXFLAGS=${PEDANTIC_CXXFLAGS}
|
||||
|
||||
default:
|
||||
retry:
|
||||
max: 2
|
||||
when:
|
||||
# In case of a runner failure we could hop to another one, or a network error could go away.
|
||||
- runner_system_failure
|
||||
# Job execution timeout may be caused by a network issue.
|
||||
- job_execution_timeout
|
||||
|
||||
include:
|
||||
- '.gitlab/ci/common.yml'
|
||||
- '.gitlab/ci/rules.yml'
|
||||
- '.gitlab/ci/upload_cache.yml'
|
||||
- '.gitlab/ci/docs.yml'
|
||||
- '.gitlab/ci/static-code-analysis.yml'
|
||||
- '.gitlab/ci/pre_check.yml'
|
||||
- '.gitlab/ci/build.yml'
|
||||
- '.gitlab/ci/integration_test.yml'
|
||||
- '.gitlab/ci/assign-test.yml'
|
||||
- '.gitlab/ci/host-test.yml'
|
||||
- '.gitlab/ci/target-test.yml'
|
||||
- '.gitlab/ci/deploy.yml'
|
||||
|
||||
@@ -21,14 +21,11 @@
|
||||
- [Shell Script Related](#shell-script-related)
|
||||
- [Manifest File to Control the Build/Test apps](#manifest-file-to-control-the-buildtest-apps)
|
||||
- [Grammar](#grammar)
|
||||
- [Special Rules](#special-rules)
|
||||
- [Upload/Download Artifacts to Internal Minio Server](#uploaddownload-artifacts-to-internal-minio-server)
|
||||
- [Users Without Access to Minio](#users-without-access-to-minio)
|
||||
- [Users With Access to Minio](#users-with-access-to-minio)
|
||||
- [Env Vars for Minio](#env-vars-for-minio)
|
||||
- [Artifacts Types and File Patterns](#artifacts-types-and-file-patterns)
|
||||
- [Upload](#upload)
|
||||
- [Download](#download)
|
||||
- [Operands](#operands)
|
||||
- [Operators](#operators)
|
||||
- [Limitation:](#limitation)
|
||||
- [How does it work?](#how-does-it-work)
|
||||
- [Example](#example)
|
||||
|
||||
## General Workflow
|
||||
|
||||
@@ -59,17 +56,19 @@
|
||||
- `example_test[_esp32/esp32s2/...]`
|
||||
- `fuzzer_test`
|
||||
- `host_test`
|
||||
- `integration_test`
|
||||
- `integration_test[_wifi/ble]`
|
||||
- `iperf_stress_test`
|
||||
- `macos`
|
||||
- `macos_test`
|
||||
- `nvs_coverage`
|
||||
- `submodule`
|
||||
- `unit_test[_esp32/esp32s2/...]`
|
||||
- `weekend_test`
|
||||
- `windows`
|
||||
|
||||
There are two general labels (not recommended since these two labels will trigger a lot of jobs)
|
||||
|
||||
- `target_test`: includes all target for `example_test`, `custom_test`, `component_ut`, `integration_test`
|
||||
- `target_test`: includes all target for `example_test`, `custom_test`, `component_ut`, `unit_test`, `integration_test`
|
||||
- `all_test`: includes all test labels
|
||||
|
||||
### How to trigger a `detached` pipeline without pushing new commits?
|
||||
@@ -145,11 +144,10 @@ check if there's a suitable `.if-<if-anchor-you-need>` anchor
|
||||
1. if there is, create a rule following [`rules` Template Naming Rules](#rules-template-naming-rules).For detail information, please refer to [GitLab Documentation `rules-if`](https://docs.gitlab.com/ee/ci/yaml/README.html#rulesif). Here's an example.
|
||||
|
||||
```yaml
|
||||
.rules:patterns:python-files:
|
||||
.rules:dev:
|
||||
rules:
|
||||
- <<: *if-protected
|
||||
- <<: *if-trigger
|
||||
- <<: *if-dev-push
|
||||
changes: *patterns-python-files
|
||||
```
|
||||
|
||||
2. if there isn't
|
||||
@@ -198,7 +196,7 @@ if a name has multi phrases, use `-` to concatenate them.
|
||||
|
||||
- `target_test`
|
||||
|
||||
a combination of `example_test`, `custom_test`, `component_ut`, `integration_test` and all targets
|
||||
a combination of `example_test`, `custom_test`, `unit_test`, `component_ut`, `integration_test` and all targets
|
||||
|
||||
#### `rules` Template Naming Rules
|
||||
|
||||
@@ -244,82 +242,3 @@ We're using the latest version of [idf-build-apps][idf-build-apps]. Please refer
|
||||
|
||||
[idf-build-apps]: https://github.com/espressif/idf-build-apps
|
||||
[manifest-doc]: https://docs.espressif.com/projects/idf-build-apps/en/latest/manifest.html
|
||||
|
||||
### Special Rules
|
||||
|
||||
In ESP-IDF CI, there's a few more special rules are additionally supported to disable the check app dependencies feature:
|
||||
|
||||
- Add MR labels `BUILD_AND_TEST_ALL_APPS`
|
||||
- Run in protected branches
|
||||
|
||||
## Upload/Download Artifacts to Internal Minio Server
|
||||
|
||||
### Users Without Access to Minio
|
||||
|
||||
If you don't have access to the internal Minio server, you can still download the artifacts from the shared link in the job log.
|
||||
|
||||
The log will look like this:
|
||||
|
||||
```shell
|
||||
Pipeline ID : 587355
|
||||
Job name : build_clang_test_apps_esp32
|
||||
Job ID : 40272275
|
||||
Created archive file: 40272275.zip, uploading as 587355/build_dir_without_map_and_elf_files/build_clang_test_apps_esp32/40272275.zip
|
||||
Please download the archive file includes build_dir_without_map_and_elf_files from [INTERNAL_URL]
|
||||
```
|
||||
|
||||
### Users With Access to Minio
|
||||
|
||||
#### Env Vars for Minio
|
||||
|
||||
Minio takes these env vars to connect to the server:
|
||||
|
||||
- `IDF_S3_SERVER`
|
||||
- `IDF_S3_ACCESS_KEY`
|
||||
- `IDF_S3_SECRET_KEY`
|
||||
- `IDF_S3_BUCKET`
|
||||
|
||||
#### Artifacts Types and File Patterns
|
||||
|
||||
The artifacts types and corresponding file patterns are defined in tools/ci/artifacts_handler.py, inside `ArtifactType` and `TYPE_PATTERNS_DICT`.
|
||||
|
||||
#### Upload
|
||||
|
||||
```shell
|
||||
python tools/ci/artifacts_handler.py upload
|
||||
```
|
||||
|
||||
will upload the files that match the file patterns to minio object storage with name:
|
||||
|
||||
`<pipeline_id>/<artifact_type>/<job_name>/<job_id>.zip`
|
||||
|
||||
For example, job 39043328 will upload these four files:
|
||||
|
||||
- `575500/map_and_elf_files/build_pytest_examples_esp32/39043328.zip`
|
||||
- `575500/build_dir_without_map_and_elf_files/build_pytest_examples_esp32/39043328.zip`
|
||||
- `575500/logs/build_pytest_examples_esp32/39043328.zip`
|
||||
- `575500/size_reports/build_pytest_examples_esp32/39043328.zip`
|
||||
|
||||
#### Download
|
||||
|
||||
You may run
|
||||
|
||||
```shell
|
||||
python tools/ci/artifacts_handler.py download --pipeline_id <pipeline_id>
|
||||
```
|
||||
|
||||
to download all files of the pipeline, or
|
||||
|
||||
```shell
|
||||
python tools/ci/artifacts_handler.py download --pipeline_id <pipeline_id> --job_name <job_name_or_pattern>
|
||||
```
|
||||
|
||||
to download all files with the specified job name or pattern, or
|
||||
|
||||
```shell
|
||||
python tools/ci/artifacts_handler.py download --pipeline_id <pipeline_id> --job_name <job_name_or_pattern> --type <artifact_type> <artifact_type> ...
|
||||
```
|
||||
|
||||
to download all files with the specified job name or pattern and artifact type(s).
|
||||
|
||||
You may check all detailed documentation with `python tools/ci/artifacts_handler.py download -h`
|
||||
|
||||
99
.gitlab/ci/assign-test.yml
Normal file
99
.gitlab/ci/assign-test.yml
Normal file
@@ -0,0 +1,99 @@
|
||||
.assign_test_template:
|
||||
image: $TARGET_TEST_ENV_IMAGE
|
||||
stage: assign_test
|
||||
tags:
|
||||
- assign_test
|
||||
variables:
|
||||
SUBMODULES_TO_FETCH: "none"
|
||||
artifacts:
|
||||
paths:
|
||||
- ${TEST_DIR}/test_configs/
|
||||
- artifact_index.json
|
||||
when: always
|
||||
expire_in: 1 week
|
||||
script:
|
||||
- run_cmd python tools/ci/python_packages/ttfw_idf/IDFAssignTest.py $TEST_TYPE $TEST_DIR -c $CI_TARGET_TEST_CONFIG_FILE -o $TEST_DIR/test_configs
|
||||
|
||||
assign_example_test:
|
||||
extends:
|
||||
- .assign_test_template
|
||||
- .rules:build:example_test
|
||||
needs:
|
||||
- job: build_examples_cmake_esp32
|
||||
artifacts: false
|
||||
optional: true
|
||||
- job: build_examples_cmake_esp32s2
|
||||
artifacts: false
|
||||
optional: true
|
||||
- job: build_examples_cmake_esp32c2
|
||||
artifacts: false
|
||||
optional: true
|
||||
- job: build_examples_cmake_esp32c3
|
||||
artifacts: false
|
||||
optional: true
|
||||
- job: build_examples_cmake_esp32c6
|
||||
artifacts: false
|
||||
optional: true
|
||||
- job: build_examples_cmake_esp32h2
|
||||
artifacts: false
|
||||
optional: true
|
||||
- job: build_examples_cmake_esp32s3
|
||||
artifacts: false
|
||||
optional: true
|
||||
variables:
|
||||
TEST_TYPE: example_test
|
||||
TEST_DIR: examples
|
||||
|
||||
assign_unit_test:
|
||||
extends:
|
||||
- .assign_test_template
|
||||
- .rules:build:unit_test
|
||||
needs:
|
||||
- job: build_esp_idf_tests_cmake_esp32
|
||||
optional: true
|
||||
- job: build_esp_idf_tests_cmake_esp32s2
|
||||
optional: true
|
||||
- job: build_esp_idf_tests_cmake_esp32c2
|
||||
optional: true
|
||||
- job: build_esp_idf_tests_cmake_esp32c3
|
||||
optional: true
|
||||
- job: build_esp_idf_tests_cmake_esp32c6
|
||||
optional: true
|
||||
- job: build_esp_idf_tests_cmake_esp32h2
|
||||
optional: true
|
||||
- job: build_esp_idf_tests_cmake_esp32s3
|
||||
optional: true
|
||||
variables:
|
||||
TEST_TYPE: unit_test
|
||||
TEST_DIR: components/idf_test/unit_test
|
||||
|
||||
assign_integration_test:
|
||||
extends:
|
||||
- .assign_test_template
|
||||
- .rules:test:integration_test
|
||||
- .before_script_minimal
|
||||
image: ${CI_INTEGRATION_TEST_ENV_IMAGE}
|
||||
needs:
|
||||
- build_ssc_esp32
|
||||
- build_ssc_esp32c3
|
||||
- build_ssc_esp32c2
|
||||
artifacts:
|
||||
paths:
|
||||
- $TEST_DIR/test_configs
|
||||
expire_in: 1 week
|
||||
variables:
|
||||
TEST_DIR: ${CI_PROJECT_DIR}/tools/ci/integration_test
|
||||
BUILD_DIR: ${CI_PROJECT_DIR}/SSC/ssc_bin
|
||||
INTEGRATION_TEST_CASE_PATH: "${CI_PROJECT_DIR}/auto_test_script/TestCaseFiles"
|
||||
ASSIGN_TEST_CASE_SCRIPT: "${CI_PROJECT_DIR}/auto_test_script/bin/CIAssignTestCases.py"
|
||||
PYTHONPATH: ${CI_PROJECT_DIR}/auto_test_script/packages
|
||||
GIT_LFS_SKIP_SMUDGE: 1
|
||||
script:
|
||||
- add_gitlab_ssh_keys
|
||||
# clone test script to assign tests
|
||||
- retry_failed git clone ${CI_AUTO_TEST_SCRIPT_REPO_URL} auto_test_script
|
||||
- python $CHECKOUT_REF_SCRIPT auto_test_script auto_test_script
|
||||
- cd auto_test_script
|
||||
- ./tools/ci/setup_idfci.sh
|
||||
# assign integration test cases
|
||||
- python ${ASSIGN_TEST_CASE_SCRIPT} -t ${INTEGRATION_TEST_CASE_PATH} -c $CI_TARGET_TEST_CONFIG_FILE -b ${BUILD_DIR} -o $TEST_DIR/test_configs
|
||||
@@ -1,7 +1,5 @@
|
||||
.build_template:
|
||||
stage: build
|
||||
extends:
|
||||
- .after_script:build:ccache
|
||||
image: $ESP_ENV_IMAGE
|
||||
tags:
|
||||
- build
|
||||
@@ -10,45 +8,76 @@
|
||||
variables:
|
||||
# Enable ccache for all build jobs. See configure_ci_environment.sh for more ccache related settings.
|
||||
IDF_CCACHE_ENABLE: "1"
|
||||
after_script:
|
||||
# Show ccache statistics if enabled globally
|
||||
- test "$CI_CCACHE_STATS" == 1 && test -n "$(which ccache)" && ccache --show-stats || true
|
||||
dependencies: []
|
||||
|
||||
.build_cmake_template:
|
||||
extends:
|
||||
- .build_template
|
||||
- .before_script:build
|
||||
- .after_script:build:ccache
|
||||
- .before_script_build_jobs
|
||||
dependencies: # set dependencies to null to avoid missing artifacts issue
|
||||
needs:
|
||||
- job: fast_template_app
|
||||
artifacts: false
|
||||
- pipeline_variables
|
||||
artifacts:
|
||||
paths:
|
||||
# The other artifacts patterns are defined under tools/ci/artifacts_handler.py
|
||||
# Now we're uploading/downloading the binary files from our internal storage server
|
||||
#
|
||||
# keep the log file to help debug
|
||||
- "**/build*/build_log.txt"
|
||||
# keep the size info to help track the binary size
|
||||
- size_info.txt
|
||||
- "**/build*/size.json"
|
||||
- "**/build*/build_log.txt"
|
||||
- "**/build*/*.bin"
|
||||
# upload to s3 server to save the artifacts size
|
||||
# - "**/build*/*.map"
|
||||
# ttfw tests require elf files
|
||||
- "**/build*/*.elf"
|
||||
- "**/build*/flasher_args.json"
|
||||
- "**/build*/flash_project_args"
|
||||
- "**/build*/config/sdkconfig.json"
|
||||
# ttfw tests require sdkconfig file
|
||||
- "**/build*/sdkconfig"
|
||||
- "**/build*/bootloader/*.bin"
|
||||
- "**/build*/partition_table/*.bin"
|
||||
- list_job_*.json
|
||||
- size_info.txt
|
||||
# unit test specific
|
||||
- components/idf_test/unit_test/*.yml
|
||||
when: always
|
||||
expire_in: 4 days
|
||||
after_script:
|
||||
# Show ccache statistics if enabled globally
|
||||
- test "$CI_CCACHE_STATS" == 1 && test -n "$(which ccache)" && ccache --show-stats || true
|
||||
# upload the binary files to s3 server
|
||||
- echo -e "\e[0Ksection_start:`date +%s`:upload_binaries_to_s3_server[collapsed=true]\r\e[0KUploading binaries to s3 Server"
|
||||
- shopt -s globstar
|
||||
# use || true to bypass the no-file error
|
||||
- zip ${CI_JOB_ID}.zip **/build*/*.bin || true
|
||||
- zip ${CI_JOB_ID}.zip **/build*/*.elf || true
|
||||
- zip ${CI_JOB_ID}.zip **/build*/*.map || true
|
||||
- zip ${CI_JOB_ID}.zip **/build*/flasher_args.json || true
|
||||
- zip ${CI_JOB_ID}.zip **/build*/flash_project_args || true
|
||||
- zip ${CI_JOB_ID}.zip **/build*/config/sdkconfig.json || true
|
||||
- zip ${CI_JOB_ID}.zip **/build*/sdkconfig || true
|
||||
- zip ${CI_JOB_ID}.zip **/build*/bootloader/*.bin || true
|
||||
- zip ${CI_JOB_ID}.zip **/build*/partition_table/*.bin || true
|
||||
- mc alias set shiny-s3 ${SHINY_S3_SERVER} ${SHINY_S3_ACCESS_KEY} ${SHINY_S3_SECRET_KEY}
|
||||
- mc cp ${CI_JOB_ID}.zip shiny-s3/idf-artifacts/${CI_PIPELINE_ID}/${CI_JOB_ID}.zip
|
||||
- echo -e "\e[0Ksection_end:`date +%s`:upload_binaries_to_s3_server\r\e[0K"
|
||||
- echo "Please download the full binary files (including *.elf and *.map files) from the following share link"
|
||||
# would be clean up after 4 days
|
||||
- mc share download shiny-s3/idf-artifacts/${CI_PIPELINE_ID}/${CI_JOB_ID}.zip --expire=96h
|
||||
script:
|
||||
# CI specific options start from "--parallel-count xxx". could ignore when running locally
|
||||
# CI specific options start from "--collect-size-info xxx". could ignore when running locally
|
||||
- run_cmd python tools/ci/ci_build_apps.py $TEST_DIR -v
|
||||
-t $IDF_TARGET
|
||||
--copy-sdkconfig
|
||||
--collect-size-info size_info.txt
|
||||
--collect-app-info list_job_${CI_NODE_INDEX:-1}.json
|
||||
--parallel-count ${CI_NODE_TOTAL:-1}
|
||||
--parallel-index ${CI_NODE_INDEX:-1}
|
||||
--extra-preserve-dirs
|
||||
examples/bluetooth/esp_ble_mesh/ble_mesh_console
|
||||
examples/bluetooth/hci/controller_hci_uart_esp32
|
||||
examples/wifi/iperf
|
||||
--modified-components ${MODIFIED_COMPONENTS}
|
||||
--modified-files ${MODIFIED_FILES}
|
||||
# for detailed documents, please refer to .gitlab/ci/README.md#uploaddownload-artifacts-to-internal-minio-server
|
||||
- python tools/ci/artifacts_handler.py upload
|
||||
|
||||
.build_cmake_clang_template:
|
||||
extends:
|
||||
@@ -58,64 +87,88 @@
|
||||
TEST_BUILD_OPTS_EXTRA: ""
|
||||
TEST_DIR: tools/test_apps/system/cxx_pthread_bluetooth
|
||||
script:
|
||||
# CI specific options start from "--parallel-count xxx". could ignore when running locally
|
||||
# CI specific options start from "--collect-size-info xxx". could ignore when running locally
|
||||
- run_cmd python tools/ci/ci_build_apps.py $TEST_DIR -v
|
||||
-t $IDF_TARGET
|
||||
--copy-sdkconfig
|
||||
--collect-size-info size_info.txt
|
||||
--collect-app-info list_job_${CI_NODE_INDEX:-1}.json
|
||||
--parallel-count ${CI_NODE_TOTAL:-1}
|
||||
--parallel-index ${CI_NODE_INDEX:-1}
|
||||
--modified-components ${MODIFIED_COMPONENTS}
|
||||
--modified-files ${MODIFIED_FILES}
|
||||
$TEST_BUILD_OPTS_EXTRA
|
||||
- python tools/ci/artifacts_handler.py upload
|
||||
|
||||
.build_pytest_template:
|
||||
extends:
|
||||
- .build_cmake_template
|
||||
- .before_script_build_jobs
|
||||
artifacts:
|
||||
paths:
|
||||
- "**/build*/size.json"
|
||||
- "**/build*/build_log.txt"
|
||||
- "**/build*/*.bin"
|
||||
# upload to s3 server to save the artifacts size
|
||||
# - "**/build*/*.map"
|
||||
# - "**/build*/*.elf"
|
||||
- "**/build*/flasher_args.json"
|
||||
- "**/build*/flash_project_args"
|
||||
- "**/build*/config/sdkconfig.json"
|
||||
- "**/build*/bootloader/*.bin"
|
||||
- "**/build*/partition_table/*.bin"
|
||||
- list_job_*.json
|
||||
- size_info.txt
|
||||
when: always
|
||||
expire_in: 4 days
|
||||
script:
|
||||
# CI specific options start from "--parallel-count xxx". could ignore when running locally
|
||||
# CI specific options start from "--collect-size-info xxx". could ignore when running locally
|
||||
- run_cmd python tools/ci/ci_build_apps.py $TEST_DIR -v
|
||||
-t $IDF_TARGET
|
||||
-m \"not host_test\"
|
||||
--pytest-apps
|
||||
--collect-size-info size_info.txt
|
||||
--parallel-count ${CI_NODE_TOTAL:-1}
|
||||
--parallel-index ${CI_NODE_INDEX:-1}
|
||||
--collect-app-info "list_job_${CI_JOB_NAME_SLUG}.txt"
|
||||
--modified-components ${MODIFIED_COMPONENTS}
|
||||
--modified-files ${MODIFIED_FILES}
|
||||
- python tools/ci/artifacts_handler.py upload
|
||||
|
||||
.build_pytest_no_jtag_template:
|
||||
extends:
|
||||
- .build_cmake_template
|
||||
extends: .build_pytest_template
|
||||
script:
|
||||
# CI specific options start from "--parallel-count xxx". could ignore when running locally
|
||||
# CI specific options start from "--collect-size-info xxx". could ignore when running locally
|
||||
- run_cmd python tools/ci/ci_build_apps.py $TEST_DIR -v
|
||||
-t $IDF_TARGET
|
||||
-m \"not host_test and not jtag\"
|
||||
--pytest-apps
|
||||
--collect-size-info size_info.txt
|
||||
--parallel-count ${CI_NODE_TOTAL:-1}
|
||||
--parallel-index ${CI_NODE_INDEX:-1}
|
||||
--collect-app-info "list_job_${CI_JOB_NAME_SLUG}.txt"
|
||||
--modified-components ${MODIFIED_COMPONENTS}
|
||||
--modified-files ${MODIFIED_FILES}
|
||||
- python tools/ci/artifacts_handler.py upload
|
||||
|
||||
.build_pytest_jtag_template:
|
||||
extends:
|
||||
- .build_cmake_template
|
||||
- .before_script_build_jobs
|
||||
artifacts:
|
||||
paths:
|
||||
- "**/build*/size.json"
|
||||
- "**/build*/build_log.txt"
|
||||
- "**/build*/*.bin"
|
||||
# upload to s3 server to save the artifacts size
|
||||
# - "**/build*/*.map"
|
||||
- "**/build*/*.elf" # need elf for gdb
|
||||
- "**/build*/flasher_args.json"
|
||||
- "**/build*/flash_project_args"
|
||||
- "**/build*/config/sdkconfig.json"
|
||||
- "**/build*/bootloader/*.bin"
|
||||
- "**/build*/partition_table/*.bin"
|
||||
- list_job_*.json
|
||||
- size_info.txt
|
||||
when: always
|
||||
expire_in: 4 days
|
||||
script:
|
||||
# CI specific options start from "--parallel-count xxx". could ignore when running locally
|
||||
# CI specific options start from "--collect-size-info xxx". could ignore when running locally
|
||||
- run_cmd python tools/ci/ci_build_apps.py $TEST_DIR -v
|
||||
-t $IDF_TARGET
|
||||
-m \"not host_test and jtag\"
|
||||
--pytest-apps
|
||||
--collect-size-info size_info.txt
|
||||
--parallel-count ${CI_NODE_TOTAL:-1}
|
||||
--parallel-index ${CI_NODE_INDEX:-1}
|
||||
--collect-app-info "list_job_${CI_JOB_NAME_SLUG}.txt"
|
||||
--modified-components ${MODIFIED_COMPONENTS}
|
||||
--modified-files ${MODIFIED_FILES}
|
||||
- python tools/ci/artifacts_handler.py upload
|
||||
|
||||
build_pytest_examples_esp32:
|
||||
extends:
|
||||
@@ -162,9 +215,17 @@ build_pytest_examples_esp32c2:
|
||||
IDF_TARGET: esp32c2
|
||||
TEST_DIR: examples
|
||||
|
||||
build_pytest_examples_jtag: # for all targets
|
||||
extends:
|
||||
- .build_pytest_jtag_template
|
||||
- .rules:build:example_test-esp32
|
||||
variables:
|
||||
IDF_TARGET: all
|
||||
TEST_DIR: examples
|
||||
|
||||
build_pytest_examples_esp32c6:
|
||||
extends:
|
||||
- .build_pytest_no_jtag_template
|
||||
- .build_pytest_template
|
||||
- .rules:build:example_test-esp32c6
|
||||
parallel: 2
|
||||
variables:
|
||||
@@ -173,21 +234,12 @@ build_pytest_examples_esp32c6:
|
||||
|
||||
build_pytest_examples_esp32h2:
|
||||
extends:
|
||||
- .build_pytest_no_jtag_template
|
||||
- .build_pytest_template
|
||||
- .rules:build:example_test-esp32h2
|
||||
parallel: 2
|
||||
variables:
|
||||
IDF_TARGET: esp32h2
|
||||
TEST_DIR: examples
|
||||
|
||||
build_pytest_examples_jtag: # for all targets
|
||||
extends:
|
||||
- .build_pytest_jtag_template
|
||||
- .rules:build:example_test
|
||||
variables:
|
||||
IDF_TARGET: all
|
||||
TEST_DIR: examples
|
||||
|
||||
build_pytest_components_esp32:
|
||||
extends:
|
||||
- .build_pytest_template
|
||||
@@ -258,18 +310,40 @@ build_only_components_apps:
|
||||
parallel: 5
|
||||
script:
|
||||
- set_component_ut_vars
|
||||
# CI specific options start from "--parallel-count xxx". could ignore when running locally
|
||||
# CI specific options start from "--collect-size-info xxx". could ignore when running locally
|
||||
- run_cmd python tools/ci/ci_build_apps.py $COMPONENT_UT_DIRS -v
|
||||
-t all
|
||||
--collect-size-info size_info.txt
|
||||
--collect-app-info list_job_${CI_NODE_INDEX:-1}.json
|
||||
--parallel-count ${CI_NODE_TOTAL:-1}
|
||||
--parallel-index ${CI_NODE_INDEX:-1}
|
||||
--modified-components ${MODIFIED_COMPONENTS}
|
||||
--modified-files ${MODIFIED_FILES}
|
||||
- python tools/ci/artifacts_handler.py upload
|
||||
|
||||
.build_pytest_test_apps_template:
|
||||
extends: .build_pytest_template
|
||||
artifacts:
|
||||
paths:
|
||||
- "**/build*/size.json"
|
||||
- "**/build*/build_log.txt"
|
||||
- "**/build*/*.bin"
|
||||
# upload to s3 server to save the artifacts size
|
||||
# - "**/build*/*.map"
|
||||
# pytest test apps requires elf files for coredump tests
|
||||
- "**/build*/*.elf"
|
||||
- "**/build*/flasher_args.json"
|
||||
- "**/build*/flash_project_args"
|
||||
- "**/build*/config/sdkconfig.json"
|
||||
- "**/build*/bootloader/*.elf"
|
||||
- "**/build*/bootloader/*.bin"
|
||||
- "**/build*/partition_table/*.bin"
|
||||
- "**/build*/project_description.json"
|
||||
- list_job_*.json
|
||||
- size_info.txt
|
||||
when: always
|
||||
expire_in: 4 days
|
||||
|
||||
build_pytest_test_apps_esp32:
|
||||
extends:
|
||||
- .build_pytest_template
|
||||
- .build_pytest_test_apps_template
|
||||
- .rules:build:custom_test-esp32
|
||||
variables:
|
||||
IDF_TARGET: esp32
|
||||
@@ -277,7 +351,7 @@ build_pytest_test_apps_esp32:
|
||||
|
||||
build_pytest_test_apps_esp32s2:
|
||||
extends:
|
||||
- .build_pytest_template
|
||||
- .build_pytest_test_apps_template
|
||||
- .rules:build:custom_test-esp32s2
|
||||
variables:
|
||||
IDF_TARGET: esp32s2
|
||||
@@ -285,7 +359,7 @@ build_pytest_test_apps_esp32s2:
|
||||
|
||||
build_pytest_test_apps_esp32s3:
|
||||
extends:
|
||||
- .build_pytest_template
|
||||
- .build_pytest_test_apps_template
|
||||
- .rules:build:custom_test-esp32s3
|
||||
parallel: 2
|
||||
variables:
|
||||
@@ -294,7 +368,7 @@ build_pytest_test_apps_esp32s3:
|
||||
|
||||
build_pytest_test_apps_esp32c3:
|
||||
extends:
|
||||
- .build_pytest_template
|
||||
- .build_pytest_test_apps_template
|
||||
- .rules:build:custom_test-esp32c3
|
||||
variables:
|
||||
IDF_TARGET: esp32c3
|
||||
@@ -302,7 +376,7 @@ build_pytest_test_apps_esp32c3:
|
||||
|
||||
build_pytest_test_apps_esp32c2:
|
||||
extends:
|
||||
- .build_pytest_template
|
||||
- .build_pytest_test_apps_template
|
||||
- .rules:build:custom_test-esp32c2
|
||||
variables:
|
||||
IDF_TARGET: esp32c2
|
||||
@@ -310,7 +384,7 @@ build_pytest_test_apps_esp32c2:
|
||||
|
||||
build_pytest_test_apps_esp32c6:
|
||||
extends:
|
||||
- .build_pytest_template
|
||||
- .build_pytest_test_apps_template
|
||||
- .rules:build:custom_test-esp32c6
|
||||
variables:
|
||||
IDF_TARGET: esp32c6
|
||||
@@ -318,7 +392,7 @@ build_pytest_test_apps_esp32c6:
|
||||
|
||||
build_pytest_test_apps_esp32h2:
|
||||
extends:
|
||||
- .build_pytest_template
|
||||
- .build_pytest_test_apps_template
|
||||
- .rules:build:custom_test-esp32h2
|
||||
variables:
|
||||
IDF_TARGET: esp32h2
|
||||
@@ -330,23 +404,22 @@ build_only_tools_test_apps:
|
||||
- .rules:build:custom_test
|
||||
parallel: 9
|
||||
script:
|
||||
# CI specific options start from "--parallel-count xxx". could ignore when running locally
|
||||
# CI specific options start from "--collect-size-info xxx". could ignore when running locally
|
||||
- run_cmd python tools/ci/ci_build_apps.py tools/test_apps -v
|
||||
-t all
|
||||
--collect-size-info size_info.txt
|
||||
--collect-app-info list_job_${CI_NODE_INDEX:-1}.json
|
||||
--parallel-count ${CI_NODE_TOTAL:-1}
|
||||
--parallel-index ${CI_NODE_INDEX:-1}
|
||||
--modified-components ${MODIFIED_COMPONENTS}
|
||||
--modified-files ${MODIFIED_FILES}
|
||||
- python tools/ci/artifacts_handler.py upload
|
||||
|
||||
.build_template_app_template:
|
||||
extends:
|
||||
- .build_template
|
||||
- .before_script:build
|
||||
- .before_script_build_jobs
|
||||
variables:
|
||||
LOG_PATH: "${CI_PROJECT_DIR}/log_template_app"
|
||||
BUILD_PATH: "${CI_PROJECT_DIR}/build_template_app"
|
||||
BUILD_DIR: "${BUILD_PATH}/@t/@w"
|
||||
BUILD_DIR: "@t/@w"
|
||||
BUILD_LOG_CMAKE: "${LOG_PATH}/cmake_@t_@w.txt"
|
||||
BUILD_COMMAND_ARGS: ""
|
||||
artifacts:
|
||||
@@ -378,6 +451,151 @@ fast_template_app:
|
||||
BUILD_COMMAND_ARGS: "-p"
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
.build_ssc_template:
|
||||
extends:
|
||||
- .build_template
|
||||
- .rules:build:integration_test
|
||||
needs:
|
||||
- job: fast_template_app
|
||||
artifacts: false
|
||||
artifacts:
|
||||
paths:
|
||||
- SSC/ssc_bin
|
||||
expire_in: 1 week
|
||||
script:
|
||||
- retry_failed git clone $SSC_REPOSITORY
|
||||
- python $CHECKOUT_REF_SCRIPT SSC SSC
|
||||
- cd SSC
|
||||
- MAKEFLAGS= ./ci_build_ssc.sh $TARGET_NAME
|
||||
|
||||
build_ssc_esp32:
|
||||
extends: .build_ssc_template
|
||||
parallel: 3
|
||||
variables:
|
||||
TARGET_NAME: "ESP32"
|
||||
|
||||
build_ssc_esp32s2:
|
||||
extends: .build_ssc_template
|
||||
parallel: 2
|
||||
variables:
|
||||
TARGET_NAME: "ESP32S2"
|
||||
|
||||
build_ssc_esp32c2:
|
||||
extends: .build_ssc_template
|
||||
parallel: 2
|
||||
variables:
|
||||
TARGET_NAME: "ESP32C2"
|
||||
|
||||
build_ssc_esp32c3:
|
||||
extends: .build_ssc_template
|
||||
parallel: 3
|
||||
variables:
|
||||
TARGET_NAME: "ESP32C3"
|
||||
|
||||
build_ssc_esp32s3:
|
||||
extends: .build_ssc_template
|
||||
parallel: 3
|
||||
variables:
|
||||
TARGET_NAME: "ESP32S3"
|
||||
|
||||
build_ssc_esp32c6:
|
||||
extends: .build_ssc_template
|
||||
parallel: 3
|
||||
variables:
|
||||
TARGET_NAME: "ESP32C6"
|
||||
|
||||
build_ssc_esp32h2:
|
||||
extends: .build_ssc_template
|
||||
parallel: 2
|
||||
variables:
|
||||
TARGET_NAME: "ESP32H2"
|
||||
|
||||
.build_esp_idf_tests_cmake_template:
|
||||
extends:
|
||||
- .build_cmake_template
|
||||
- .before_script_build_jobs
|
||||
artifacts:
|
||||
paths:
|
||||
- "**/build*/size.json"
|
||||
- "**/build*/build_log.txt"
|
||||
- "**/build*/*.bin"
|
||||
# upload to s3 server to save the artifacts size
|
||||
# - "**/build*/*.map"
|
||||
# ttfw tests require elf files
|
||||
- "**/build*/*.elf"
|
||||
- "**/build*/flasher_args.json"
|
||||
- "**/build*/flash_project_args"
|
||||
- "**/build*/config/sdkconfig.json"
|
||||
- "**/build*/sdkconfig"
|
||||
- "**/build*/bootloader/*.bin"
|
||||
- "**/build*/partition_table/*.bin"
|
||||
- list_job_*.json
|
||||
- size_info.txt
|
||||
- components/idf_test/unit_test/*.yml
|
||||
when: always
|
||||
expire_in: 4 days
|
||||
script:
|
||||
# CI specific options start from "--collect-size-info xxx". could ignore when running locally
|
||||
- run_cmd python tools/ci/ci_build_apps.py tools/unit-test-app -v
|
||||
-t $IDF_TARGET
|
||||
--config "configs/*="
|
||||
--copy-sdkconfig
|
||||
--preserve-all
|
||||
--collect-size-info size_info.txt
|
||||
--collect-app-info list_job_${CI_NODE_INDEX:-1}.json
|
||||
--parallel-count ${CI_NODE_TOTAL:-1}
|
||||
--parallel-index ${CI_NODE_INDEX:-1}
|
||||
- run_cmd python tools/unit-test-app/tools/UnitTestParser.py tools/unit-test-app ${CI_NODE_INDEX:-1}
|
||||
|
||||
build_esp_idf_tests_cmake_esp32:
|
||||
extends:
|
||||
- .build_esp_idf_tests_cmake_template
|
||||
- .rules:build:unit_test-esp32
|
||||
variables:
|
||||
IDF_TARGET: esp32
|
||||
|
||||
build_esp_idf_tests_cmake_esp32s2:
|
||||
extends:
|
||||
- .build_esp_idf_tests_cmake_template
|
||||
- .rules:build:unit_test-esp32s2
|
||||
variables:
|
||||
IDF_TARGET: esp32s2
|
||||
|
||||
build_esp_idf_tests_cmake_esp32s3:
|
||||
extends:
|
||||
- .build_esp_idf_tests_cmake_template
|
||||
- .rules:build:unit_test-esp32s3
|
||||
variables:
|
||||
IDF_TARGET: esp32s3
|
||||
|
||||
build_esp_idf_tests_cmake_esp32c2:
|
||||
extends:
|
||||
- .build_esp_idf_tests_cmake_template
|
||||
- .rules:build:unit_test-esp32c2
|
||||
variables:
|
||||
IDF_TARGET: esp32c2
|
||||
|
||||
build_esp_idf_tests_cmake_esp32c3:
|
||||
extends:
|
||||
- .build_esp_idf_tests_cmake_template
|
||||
- .rules:build:unit_test-esp32c3
|
||||
variables:
|
||||
IDF_TARGET: esp32c3
|
||||
|
||||
build_esp_idf_tests_cmake_esp32c6:
|
||||
extends:
|
||||
- .build_esp_idf_tests_cmake_template
|
||||
- .rules:build:unit_test-esp32c6
|
||||
variables:
|
||||
IDF_TARGET: esp32c6
|
||||
|
||||
build_esp_idf_tests_cmake_esp32h2:
|
||||
extends:
|
||||
- .build_esp_idf_tests_cmake_template
|
||||
- .rules:build:unit_test-esp32h2
|
||||
variables:
|
||||
IDF_TARGET: esp32h2
|
||||
|
||||
build_examples_cmake_esp32:
|
||||
extends:
|
||||
- .build_cmake_template
|
||||
@@ -400,7 +618,7 @@ build_examples_cmake_esp32s3:
|
||||
extends:
|
||||
- .build_cmake_template
|
||||
- .rules:build:example_test-esp32s3
|
||||
parallel: 11
|
||||
parallel: 7
|
||||
variables:
|
||||
IDF_TARGET: esp32s3
|
||||
TEST_DIR: examples
|
||||
@@ -409,7 +627,7 @@ build_examples_cmake_esp32c2:
|
||||
extends:
|
||||
- .build_cmake_template
|
||||
- .rules:build:example_test-esp32c2
|
||||
parallel: 7
|
||||
parallel: 6
|
||||
variables:
|
||||
IDF_TARGET: esp32c2
|
||||
TEST_DIR: examples
|
||||
@@ -418,7 +636,7 @@ build_examples_cmake_esp32c3:
|
||||
extends:
|
||||
- .build_cmake_template
|
||||
- .rules:build:example_test-esp32c3
|
||||
parallel: 9
|
||||
parallel: 6
|
||||
variables:
|
||||
IDF_TARGET: esp32c3
|
||||
TEST_DIR: examples
|
||||
@@ -427,7 +645,7 @@ build_examples_cmake_esp32c6:
|
||||
extends:
|
||||
- .build_cmake_template
|
||||
- .rules:build:example_test-esp32c6
|
||||
parallel: 11
|
||||
parallel: 6
|
||||
variables:
|
||||
IDF_TARGET: esp32c6
|
||||
TEST_DIR: examples
|
||||
@@ -436,20 +654,11 @@ build_examples_cmake_esp32h2:
|
||||
extends:
|
||||
- .build_cmake_template
|
||||
- .rules:build:example_test-esp32h2
|
||||
parallel: 9
|
||||
parallel: 2
|
||||
variables:
|
||||
IDF_TARGET: esp32h2
|
||||
TEST_DIR: examples
|
||||
|
||||
build_examples_cmake_esp32p4:
|
||||
extends:
|
||||
- .build_cmake_template
|
||||
- .rules:build:example_test-esp32p4
|
||||
parallel: 4
|
||||
variables:
|
||||
IDF_TARGET: esp32p4
|
||||
TEST_DIR: examples
|
||||
|
||||
build_clang_test_apps_esp32:
|
||||
extends:
|
||||
- .build_cmake_clang_template
|
||||
@@ -508,39 +717,39 @@ build_clang_test_apps_esp32c6:
|
||||
stage: host_test
|
||||
extends:
|
||||
- .build_template
|
||||
- .rules:build:check
|
||||
- .rules:build
|
||||
needs:
|
||||
- job: fast_template_app
|
||||
artifacts: false
|
||||
optional: true
|
||||
script:
|
||||
- ${IDF_PATH}/tools/ci/test_configure_ci_environment.sh
|
||||
- cd ${IDF_PATH}/tools/test_build_system
|
||||
- retry_failed git clone $KNOWN_FAILURE_CASES_REPO known_failure_cases
|
||||
- pytest --parallel-count ${CI_NODE_TOTAL:-1} --parallel-index ${CI_NODE_INDEX:-1}
|
||||
--work-dir ${CI_PROJECT_DIR}/test_build_system --junitxml=${CI_PROJECT_DIR}/XUNIT_RESULT.xml
|
||||
--ignore-result-files known_failure_cases/known_failure_cases.txt
|
||||
- rm -rf test_build_system
|
||||
- mkdir test_build_system
|
||||
- cd test_build_system
|
||||
- ${IDF_PATH}/tools/ci/${SHELL_TEST_SCRIPT}
|
||||
|
||||
pytest_build_system:
|
||||
test_build_system_cmake:
|
||||
extends: .test_build_system_template
|
||||
parallel: 3
|
||||
artifacts:
|
||||
paths:
|
||||
- XUNIT_RESULT.xml
|
||||
- test_build_system
|
||||
when: always
|
||||
expire_in: 2 days
|
||||
reports:
|
||||
junit: XUNIT_RESULT.xml
|
||||
variables:
|
||||
SHELL_TEST_SCRIPT: test_build_system_cmake.sh
|
||||
|
||||
pytest_build_system_macos:
|
||||
test_build_system_cmake_macos:
|
||||
extends:
|
||||
- .test_build_system_template
|
||||
- .before_script:build:macos
|
||||
- .before_script_macos
|
||||
- .rules:build:macos
|
||||
tags:
|
||||
- macos_shell
|
||||
parallel: 3
|
||||
variables:
|
||||
SHELL_TEST_SCRIPT: test_build_system_cmake.sh
|
||||
|
||||
test_build_system_spaces:
|
||||
extends: .test_build_system_template
|
||||
variables:
|
||||
SHELL_TEST_SCRIPT: test_build_system_spaces.py
|
||||
|
||||
pytest_build_system:
|
||||
extends: .test_build_system_template
|
||||
artifacts:
|
||||
paths:
|
||||
- XUNIT_RESULT.xml
|
||||
@@ -549,10 +758,14 @@ pytest_build_system_macos:
|
||||
expire_in: 2 days
|
||||
reports:
|
||||
junit: XUNIT_RESULT.xml
|
||||
script:
|
||||
- ${IDF_PATH}/tools/ci/test_configure_ci_environment.sh
|
||||
- cd ${IDF_PATH}/tools/test_build_system
|
||||
- pytest --work-dir ${CI_PROJECT_DIR}/test_build_system --junitxml=${CI_PROJECT_DIR}/XUNIT_RESULT.xml
|
||||
|
||||
build_docker:
|
||||
extends:
|
||||
- .before_script:minimal
|
||||
- .before_script_minimal
|
||||
- .rules:build:docker
|
||||
stage: host_test
|
||||
needs: []
|
||||
|
||||
@@ -1,348 +0,0 @@
|
||||
#####################
|
||||
# Default Variables #
|
||||
#####################
|
||||
stages:
|
||||
- upload_cache
|
||||
- pre_check
|
||||
- build
|
||||
- assign_test
|
||||
- target_test
|
||||
- host_test
|
||||
- build_doc
|
||||
- test_deploy
|
||||
- deploy
|
||||
- post_deploy
|
||||
|
||||
variables:
|
||||
# System environment
|
||||
|
||||
# Common parameters for the 'make' during CI tests
|
||||
MAKEFLAGS: "-j5 --no-keep-going"
|
||||
|
||||
# GitLab-CI environment
|
||||
|
||||
# now we have pack-objects cache, so clone strategy is faster than fetch
|
||||
GIT_STRATEGY: clone
|
||||
# we will download archive for each submodule instead of clone.
|
||||
# we don't do "recursive" when fetch submodule as they're not used in CI now.
|
||||
GIT_SUBMODULE_STRATEGY: none
|
||||
# since we're using merged-result pipelines, the last commit should work for most cases
|
||||
GIT_DEPTH: 1
|
||||
# --no-recurse-submodules: we use cache for submodules
|
||||
# --prune --prune-tags: in case remote branch or tag is force pushed
|
||||
GIT_FETCH_EXTRA_FLAGS: "--no-recurse-submodules --prune --prune-tags"
|
||||
# we're using .cache folder for caches
|
||||
GIT_CLEAN_FLAGS: -ffdx -e .cache/
|
||||
LATEST_GIT_TAG: v5.2-dev
|
||||
|
||||
SUBMODULE_FETCH_TOOL: "tools/ci/ci_fetch_submodule.py"
|
||||
# by default we will fetch all submodules
|
||||
# jobs can overwrite this variable to only fetch submodules they required
|
||||
# set to "none" if don't need to fetch submodules
|
||||
SUBMODULES_TO_FETCH: "all"
|
||||
# tell build system do not check submodule update as we download archive instead of clone
|
||||
IDF_SKIP_CHECK_SUBMODULES: 1
|
||||
|
||||
IDF_PATH: "$CI_PROJECT_DIR"
|
||||
V: "0"
|
||||
CHECKOUT_REF_SCRIPT: "$CI_PROJECT_DIR/tools/ci/checkout_project_ref.py"
|
||||
PYTHON_VER: 3.8.17
|
||||
|
||||
# Docker images
|
||||
ESP_ENV_IMAGE: "${CI_DOCKER_REGISTRY}/esp-env-v5.2:2"
|
||||
ESP_IDF_DOC_ENV_IMAGE: "${CI_DOCKER_REGISTRY}/esp-idf-doc-env-v5.2:2-1"
|
||||
QEMU_IMAGE: "${CI_DOCKER_REGISTRY}/qemu-v5.2:2-20230522"
|
||||
TARGET_TEST_ENV_IMAGE: "${CI_DOCKER_REGISTRY}/target-test-env-v5.2:2"
|
||||
|
||||
SONARQUBE_SCANNER_IMAGE: "${CI_DOCKER_REGISTRY}/sonarqube-scanner:5"
|
||||
PRE_COMMIT_IMAGE: "${CI_DOCKER_REGISTRY}/esp-idf-pre-commit:1"
|
||||
|
||||
# target test repo parameters
|
||||
TEST_ENV_CONFIG_REPO: "https://gitlab-ci-token:${BOT_TOKEN}@${CI_SERVER_HOST}:${CI_SERVER_PORT}/qa/ci-test-runner-configs.git"
|
||||
|
||||
# cache python dependencies
|
||||
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
|
||||
|
||||
# Set this variable to the branch of idf-constraints repo in order to test a custom Python constraint file. The
|
||||
# branch name must be without the remote part ("origin/"). Keep the variable empty in order to use the constraint
|
||||
# file from https://dl.espressif.com/dl/esp-idf.
|
||||
CI_PYTHON_CONSTRAINT_BRANCH: ""
|
||||
|
||||
# Update the filename for a specific ESP-IDF release. It is used only with CI_PYTHON_CONSTRAINT_BRANCH.
|
||||
CI_PYTHON_CONSTRAINT_FILE: "espidf.constraints.v5.2.txt"
|
||||
|
||||
# Set this variable to repository name of a Python tool you wish to install and test in the context of ESP-IDF CI.
|
||||
# Keep the variable empty when not used.
|
||||
CI_PYTHON_TOOL_REPO: ""
|
||||
|
||||
# Set this variable to the branch of a Python tool repo specified in CI_PYTHON_TOOL_REPO. The
|
||||
# branch name must be without the remote part ("origin/"). Keep the variable empty when not used.
|
||||
# This is used only if CI_PYTHON_TOOL_REPO is not empty.
|
||||
CI_PYTHON_TOOL_BRANCH: ""
|
||||
|
||||
IDF_CI_BUILD: 1
|
||||
|
||||
################################################
|
||||
# `before_script` and `after_script` Templates #
|
||||
################################################
|
||||
.common_before_scripts: &common-before_scripts |
|
||||
source tools/ci/utils.sh
|
||||
is_based_on_commits $REQUIRED_ANCESTOR_COMMITS
|
||||
|
||||
if [[ -n "$IDF_DONT_USE_MIRRORS" ]]; then
|
||||
export IDF_MIRROR_PREFIX_MAP=
|
||||
fi
|
||||
|
||||
if echo "$CI_MERGE_REQUEST_LABELS" | egrep "(^|,)include_nightly_run(,|$)"; then
|
||||
export INCLUDE_NIGHTLY_RUN="1"
|
||||
export NIGHTLY_RUN="1"
|
||||
fi
|
||||
|
||||
# configure cmake related flags
|
||||
source tools/ci/configure_ci_environment.sh
|
||||
|
||||
# add extra python packages
|
||||
export PYTHONPATH="$IDF_PATH/tools:$IDF_PATH/tools/esp_app_trace:$IDF_PATH/components/partition_table:$IDF_PATH/tools/ci/python_packages:$PYTHONPATH"
|
||||
|
||||
.setup_tools_and_idf_python_venv: &setup_tools_and_idf_python_venv |
|
||||
# must use after setup_tools_except_target_test
|
||||
# otherwise the export.sh won't work properly
|
||||
|
||||
# download constraint file for dev
|
||||
if [[ -n "$CI_PYTHON_CONSTRAINT_BRANCH" ]]; then
|
||||
wget -O /tmp/constraint.txt --header="Authorization:Bearer ${ESPCI_TOKEN}" ${GITLAB_HTTP_SERVER}/api/v4/projects/2581/repository/files/${CI_PYTHON_CONSTRAINT_FILE}/raw?ref=${CI_PYTHON_CONSTRAINT_BRANCH}
|
||||
mkdir -p ~/.espressif
|
||||
mv /tmp/constraint.txt ~/.espressif/${CI_PYTHON_CONSTRAINT_FILE}
|
||||
fi
|
||||
|
||||
# Mirror
|
||||
if [[ -n "$IDF_DONT_USE_MIRRORS" ]]; then
|
||||
export IDF_MIRROR_PREFIX_MAP=
|
||||
fi
|
||||
|
||||
# install latest python packages
|
||||
# target test jobs
|
||||
if [[ "${CI_JOB_STAGE}" == "target_test" ]]; then
|
||||
run_cmd bash install.sh --enable-ci --enable-pytest
|
||||
elif [[ "${CI_JOB_STAGE}" == "build_doc" ]]; then
|
||||
run_cmd bash install.sh --enable-ci --enable-docs
|
||||
elif [[ "${CI_JOB_STAGE}" == "build" ]]; then
|
||||
run_cmd bash install.sh --enable-ci --enable-pytest
|
||||
else
|
||||
if ! echo "${CI_JOB_NAME}" | egrep ".*pytest.*"; then
|
||||
run_cmd bash install.sh --enable-ci
|
||||
else
|
||||
run_cmd bash install.sh --enable-ci --enable-pytest
|
||||
fi
|
||||
fi
|
||||
|
||||
# Install esp-clang if necessary
|
||||
if [[ "$IDF_TOOLCHAIN" == "clang" ]]; then
|
||||
$IDF_PATH/tools/idf_tools.py --non-interactive install esp-clang
|
||||
fi
|
||||
|
||||
# Since the version 3.21 CMake passes source files and include dirs to ninja using absolute paths.
|
||||
# Needed for pytest junit reports.
|
||||
$IDF_PATH/tools/idf_tools.py --non-interactive install cmake
|
||||
|
||||
source ./export.sh
|
||||
|
||||
# Custom clang
|
||||
if [[ ! -z "$CI_CLANG_DISTRO_URL" ]]; then
|
||||
echo "Using custom clang from ${CI_CLANG_DISTRO_URL}"
|
||||
wget $CI_CLANG_DISTRO_URL
|
||||
ARCH_NAME=$(basename $CI_CLANG_DISTRO_URL)
|
||||
tar -x -f $ARCH_NAME
|
||||
export PATH=$PWD/esp-clang/bin:$PATH
|
||||
fi
|
||||
|
||||
# Custom OpenOCD
|
||||
if [[ ! -z "$OOCD_DISTRO_URL" && "$CI_JOB_STAGE" == "target_test" ]]; then
|
||||
echo "Using custom OpenOCD from ${OOCD_DISTRO_URL}"
|
||||
wget $OOCD_DISTRO_URL
|
||||
ARCH_NAME=$(basename $OOCD_DISTRO_URL)
|
||||
tar -x -f $ARCH_NAME
|
||||
export OPENOCD_SCRIPTS=$PWD/openocd-esp32/share/openocd/scripts
|
||||
export PATH=$PWD/openocd-esp32/bin:$PATH
|
||||
fi
|
||||
|
||||
if [[ -n "$CI_PYTHON_TOOL_REPO" ]]; then
|
||||
git clone --quiet --depth=1 -b ${CI_PYTHON_TOOL_BRANCH} https://gitlab-ci-token:${ESPCI_TOKEN}@${GITLAB_HTTPS_HOST}/espressif/${CI_PYTHON_TOOL_REPO}.git
|
||||
pip install ./${CI_PYTHON_TOOL_REPO}
|
||||
rm -rf ${CI_PYTHON_TOOL_REPO}
|
||||
fi
|
||||
|
||||
.show_ccache_statistics: &show_ccache_statistics |
|
||||
# Show ccache statistics if enabled globally
|
||||
test "$CI_CCACHE_STATS" == 1 && test -n "$(which ccache)" && ccache --show-stats || true
|
||||
|
||||
.upload_failed_job_log_artifacts: &upload_failed_job_log_artifacts |
|
||||
if [ $CI_JOB_STATUS = "failed" ]; then
|
||||
python tools/ci/artifacts_handler.py upload --type logs
|
||||
fi
|
||||
|
||||
.before_script:minimal:
|
||||
before_script:
|
||||
- *common-before_scripts
|
||||
|
||||
.before_script:build:macos:
|
||||
before_script:
|
||||
- *common-before_scripts
|
||||
# On macOS, these tools need to be installed
|
||||
- export IDF_TOOLS_PATH="${HOME}/.espressif_runner_${CI_RUNNER_ID}_${CI_CONCURRENT_ID}"
|
||||
- $IDF_PATH/tools/idf_tools.py --non-interactive install cmake ninja
|
||||
# This adds tools (compilers) and the version-specific Python environment to PATH
|
||||
- *setup_tools_and_idf_python_venv
|
||||
- fetch_submodules
|
||||
|
||||
.before_script:build:
|
||||
before_script:
|
||||
- *common-before_scripts
|
||||
- *setup_tools_and_idf_python_venv
|
||||
- add_gitlab_ssh_keys
|
||||
- fetch_submodules
|
||||
- export EXTRA_CFLAGS=${PEDANTIC_CFLAGS}
|
||||
- export EXTRA_CXXFLAGS=${PEDANTIC_CXXFLAGS}
|
||||
|
||||
.after_script:build:ccache:
|
||||
after_script:
|
||||
- *show_ccache_statistics
|
||||
- *upload_failed_job_log_artifacts
|
||||
|
||||
##############################
|
||||
# Git Strategy Job Templates #
|
||||
##############################
|
||||
.git_init: &git_init |
|
||||
mkdir -p "${CI_PROJECT_DIR}"
|
||||
cd "${CI_PROJECT_DIR}"
|
||||
git init
|
||||
|
||||
.git_fetch_from_mirror_url_if_exists: &git_fetch_from_mirror_url_if_exists |
|
||||
# check if set mirror
|
||||
if [ -n "${LOCAL_GITLAB_HTTPS_HOST:-}" ] && [ -n "${ESPCI_TOKEN:-}" ]; then
|
||||
MIRROR_REPO_URL="https://bot:${ESPCI_TOKEN}@${LOCAL_GITLAB_HTTPS_HOST}/${CI_PROJECT_PATH}"
|
||||
elif [ -n "${LOCAL_GIT_MIRROR:-}" ]; then
|
||||
MIRROR_REPO_URL="${LOCAL_GIT_MIRROR}/${CI_PROJECT_PATH}"
|
||||
fi
|
||||
|
||||
# fetch from mirror first if set
|
||||
if [ -n "${MIRROR_REPO_URL:-}" ]; then
|
||||
if git remote -v | grep origin; then
|
||||
git remote set-url origin "${MIRROR_REPO_URL}"
|
||||
else
|
||||
git remote add origin "${MIRROR_REPO_URL}"
|
||||
fi
|
||||
# mirror url may fail with authentication issue
|
||||
git fetch origin --no-recurse-submodules || true
|
||||
fi
|
||||
|
||||
# set remote url to CI_REPOSITORY_URL
|
||||
if git remote -v | grep origin; then
|
||||
git remote set-url origin "${CI_REPOSITORY_URL}"
|
||||
else
|
||||
git remote add origin "${CI_REPOSITORY_URL}"
|
||||
fi
|
||||
|
||||
.git_checkout_fetch_head: &git_checkout_fetch_head |
|
||||
git checkout FETCH_HEAD
|
||||
git clean ${GIT_CLEAN_FLAGS}
|
||||
|
||||
# git diff requires two commits, with different CI env var
|
||||
#
|
||||
# By default, we use git strategy "clone" with depth 1 to speed up the clone process.
|
||||
# But for jobs requires running `git diff`, we need to fetch more commits to get the correct diffs.
|
||||
#
|
||||
# Since there's no way to get the correct git_depth before the job starts,
|
||||
# we can't set `GIT_DEPTH` in the job definition.
|
||||
#
|
||||
# Set git strategy to "none" and fetch manually instead.
|
||||
.before_script:fetch:git_diff:
|
||||
variables:
|
||||
GIT_STRATEGY: none
|
||||
before_script:
|
||||
- *git_init
|
||||
- *git_fetch_from_mirror_url_if_exists
|
||||
- |
|
||||
# merged results pipelines, by default
|
||||
if [[ -n $CI_MERGE_REQUEST_SOURCE_BRANCH_SHA ]]; then
|
||||
git fetch origin $CI_MERGE_REQUEST_DIFF_BASE_SHA --depth=1 ${GIT_FETCH_EXTRA_FLAGS}
|
||||
git fetch origin $CI_MERGE_REQUEST_SOURCE_BRANCH_SHA --depth=1 ${GIT_FETCH_EXTRA_FLAGS}
|
||||
export GIT_DIFF_OUTPUT=$(git diff --name-only $CI_MERGE_REQUEST_DIFF_BASE_SHA $CI_MERGE_REQUEST_SOURCE_BRANCH_SHA)
|
||||
# merge request pipelines, when the mr got conflicts
|
||||
elif [[ -n $CI_MERGE_REQUEST_DIFF_BASE_SHA ]]; then
|
||||
git fetch origin $CI_MERGE_REQUEST_DIFF_BASE_SHA --depth=1 ${GIT_FETCH_EXTRA_FLAGS}
|
||||
git fetch origin $CI_COMMIT_SHA --depth=1 ${GIT_FETCH_EXTRA_FLAGS}
|
||||
export GIT_DIFF_OUTPUT=$(git diff --name-only $CI_MERGE_REQUEST_DIFF_BASE_SHA $CI_COMMIT_SHA)
|
||||
# other pipelines, like the protected branches pipelines
|
||||
elif [[ "$CI_COMMIT_BEFORE_SHA" != "0000000000000000000000000000000000000000" ]]; then
|
||||
git fetch origin $CI_COMMIT_BEFORE_SHA --depth=1 ${GIT_FETCH_EXTRA_FLAGS}
|
||||
git fetch origin $CI_COMMIT_SHA --depth=1 ${GIT_FETCH_EXTRA_FLAGS}
|
||||
export GIT_DIFF_OUTPUT=$(git diff --name-only $CI_COMMIT_BEFORE_SHA $CI_COMMIT_SHA)
|
||||
else
|
||||
# pipeline source could be web, scheduler, etc.
|
||||
git fetch origin $CI_COMMIT_SHA --depth=2 ${GIT_FETCH_EXTRA_FLAGS}
|
||||
export GIT_DIFF_OUTPUT=$(git diff --name-only $CI_COMMIT_SHA~1 $CI_COMMIT_SHA)
|
||||
fi
|
||||
- *git_checkout_fetch_head
|
||||
- *common-before_scripts
|
||||
- *setup_tools_and_idf_python_venv
|
||||
- add_gitlab_ssh_keys
|
||||
|
||||
# git describe requires commit history until the latest tag
|
||||
.before_script:fetch:git_describe:
|
||||
variables:
|
||||
GIT_STRAEGY: none
|
||||
before_script:
|
||||
- *git_init
|
||||
- *git_fetch_from_mirror_url_if_exists
|
||||
- |
|
||||
git fetch origin refs/tags/"${LATEST_GIT_TAG}":refs/tags/"${LATEST_GIT_TAG}" --depth=1
|
||||
git repack -d
|
||||
git fetch origin $CI_COMMIT_SHA --shallow-since=$(git log -1 --format=%as "${LATEST_GIT_TAG}")
|
||||
- *git_checkout_fetch_head
|
||||
- *common-before_scripts
|
||||
- *setup_tools_and_idf_python_venv
|
||||
- add_gitlab_ssh_keys
|
||||
|
||||
# target test runners may locate in different places
|
||||
# for runners set git mirror, we fetch from the mirror first, then fetch the HEAD commit
|
||||
.before_script:fetch:target_test:
|
||||
variables:
|
||||
GIT_STRATEGY: none
|
||||
before_script:
|
||||
- *git_init
|
||||
- *git_fetch_from_mirror_url_if_exists
|
||||
- git fetch origin "${CI_COMMIT_SHA}" --depth=1 ${GIT_FETCH_EXTRA_FLAGS}
|
||||
- *git_checkout_fetch_head
|
||||
- *common-before_scripts
|
||||
- *setup_tools_and_idf_python_venv
|
||||
- add_gitlab_ssh_keys
|
||||
# no submodules
|
||||
|
||||
#############
|
||||
# `default` #
|
||||
#############
|
||||
default:
|
||||
cache:
|
||||
# pull only for most of the use cases since it's cache dir.
|
||||
# Only set "push" policy for "upload_cache" stage jobs
|
||||
- key: pip-cache
|
||||
paths:
|
||||
- .cache/pip
|
||||
policy: pull
|
||||
- key: submodule-cache
|
||||
paths:
|
||||
- .cache/submodule_archives
|
||||
policy: pull
|
||||
before_script:
|
||||
- *common-before_scripts
|
||||
- *setup_tools_and_idf_python_venv
|
||||
- add_gitlab_ssh_keys
|
||||
- fetch_submodules
|
||||
retry:
|
||||
max: 2
|
||||
when:
|
||||
# In case of a runner failure we could hop to another one, or a network error could go away.
|
||||
- runner_system_failure
|
||||
# Job execution timeout may be caused by a network issue.
|
||||
- job_execution_timeout
|
||||
@@ -7,10 +7,12 @@
|
||||
#
|
||||
# This file should ONLY be used during bringup. Should be reset to empty after the bringup process
|
||||
extra_default_build_targets:
|
||||
- esp32p4
|
||||
- esp32c6
|
||||
- esp32h2
|
||||
|
||||
bypass_check_test_targets:
|
||||
- esp32p4
|
||||
# bypass_check_test_targets:
|
||||
# - esp32h2
|
||||
# - esp32c6
|
||||
#
|
||||
# These lines would
|
||||
# - enable the README.md check for esp32c6. Don't forget to add the build jobs in .gitlab/ci/build.yml
|
||||
|
||||
@@ -6,11 +6,11 @@
|
||||
- esp32c2
|
||||
- esp32c6
|
||||
- esp32h2
|
||||
- esp32p4
|
||||
|
||||
.target_test: &target_test
|
||||
- example_test
|
||||
- custom_test
|
||||
- unit_test
|
||||
- component_ut
|
||||
|
||||
##############
|
||||
@@ -25,7 +25,6 @@
|
||||
- downloadable-tools
|
||||
included_in:
|
||||
- build:target_test
|
||||
- build:check
|
||||
|
||||
# -------------------
|
||||
# Specific Build Jobs
|
||||
@@ -40,6 +39,14 @@
|
||||
- build_system
|
||||
- downloadable-tools
|
||||
|
||||
"build:windows":
|
||||
labels:
|
||||
- build
|
||||
- windows
|
||||
patterns:
|
||||
- build_system
|
||||
- windows
|
||||
|
||||
"build:macos":
|
||||
labels:
|
||||
- build
|
||||
@@ -47,24 +54,9 @@
|
||||
- macos_test # for backward compatibility
|
||||
patterns:
|
||||
- build_system
|
||||
- build_macos
|
||||
- macos
|
||||
- downloadable-tools
|
||||
|
||||
# ---------------------------
|
||||
# Add patterns to build rules
|
||||
# ---------------------------
|
||||
"patterns:template-app":
|
||||
patterns:
|
||||
- build_template-app
|
||||
included_in:
|
||||
- build:target_test
|
||||
|
||||
"patterns:build-check":
|
||||
patterns:
|
||||
- build_check
|
||||
included_in:
|
||||
- build:check
|
||||
|
||||
# ---------------
|
||||
# Build Test Jobs
|
||||
# ---------------
|
||||
@@ -83,6 +75,16 @@
|
||||
- "build:{0}"
|
||||
- build:target_test
|
||||
|
||||
build:integration_test:
|
||||
labels:
|
||||
- build
|
||||
patterns:
|
||||
- build_components
|
||||
- build_system
|
||||
included_in:
|
||||
- build:target_test
|
||||
|
||||
|
||||
####################
|
||||
# Target Test Jobs #
|
||||
####################
|
||||
@@ -105,6 +107,22 @@
|
||||
# -------------
|
||||
# Special Cases
|
||||
# -------------
|
||||
"test:component_ut-{0}": # component_ut will trigger by unit_test as well, since now we have 2 kinds of UT
|
||||
matrix:
|
||||
- *all_targets
|
||||
labels:
|
||||
- component_ut
|
||||
- "component_ut_{0}"
|
||||
- unit_test
|
||||
- "unit_test_{0}"
|
||||
- target_test
|
||||
patterns:
|
||||
- component_ut
|
||||
- "build-component_ut-{0}"
|
||||
included_in:
|
||||
- build:component_ut
|
||||
- "build:component_ut-{0}"
|
||||
- build:target_test
|
||||
|
||||
# To reduce the specific runners' usage.
|
||||
# Do not create these jobs by default patterns on development branches
|
||||
@@ -113,7 +131,8 @@
|
||||
matrix:
|
||||
- *target_test
|
||||
- *all_targets
|
||||
- - wifi # pytest*wifi*
|
||||
- - bt # example_test_005
|
||||
- wifi # pytest*wifi*
|
||||
- ethernet # pytest*ethernet*
|
||||
- sdio # pytest*sdio*
|
||||
- usb # USB Device & Host tests
|
||||
@@ -121,7 +140,7 @@
|
||||
- i154
|
||||
- flash_multi
|
||||
- ecdsa
|
||||
- nvs_encr_hmac
|
||||
- ccs811 # pytest*ccs811*
|
||||
patterns:
|
||||
- "{0}-{1}-{2}"
|
||||
- "{0}-{2}"
|
||||
@@ -163,6 +182,23 @@
|
||||
- "build:example_test"
|
||||
- build:target_test
|
||||
|
||||
"test:integration_test_{0}":
|
||||
matrix:
|
||||
- - wifi
|
||||
- ble
|
||||
labels:
|
||||
- integration_test_{0}
|
||||
- integration_test
|
||||
- target_test
|
||||
patterns:
|
||||
- integration_test-{0}
|
||||
- target_test-{0}
|
||||
# - maybe others
|
||||
included_in:
|
||||
- test:integration_test
|
||||
- build:integration_test
|
||||
- build:target_test
|
||||
|
||||
"test:host_test":
|
||||
labels:
|
||||
- host_test
|
||||
@@ -181,3 +217,13 @@
|
||||
"labels:nvs_coverage": # host_test
|
||||
labels:
|
||||
- nvs_coverage
|
||||
|
||||
"labels-protected:lan8720": # UT # FIXME: IDFCI-1176 temporary run this on master/release or with label
|
||||
labels:
|
||||
- lan8720
|
||||
included_in:
|
||||
- build:unit_test
|
||||
- build:unit_test-esp32
|
||||
- build:target_test
|
||||
- build:component_ut
|
||||
- build:component_ut-esp32
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
.deploy_job_template:
|
||||
stage: deploy
|
||||
image: $ESP_ENV_IMAGE
|
||||
tags: [ deploy ]
|
||||
tags:
|
||||
- deploy
|
||||
|
||||
# Check this before push_to_github
|
||||
check_submodule_sync:
|
||||
@@ -9,11 +10,11 @@ check_submodule_sync:
|
||||
- .deploy_job_template
|
||||
- .rules:test:submodule
|
||||
stage: test_deploy
|
||||
tags: [ brew, github_sync ]
|
||||
tags:
|
||||
- github_sync
|
||||
retry: 2
|
||||
variables:
|
||||
# for brew runners, we always set GIT_STRATEGY to fetch
|
||||
GIT_STRATEGY: fetch
|
||||
GIT_STRATEGY: clone
|
||||
SUBMODULES_TO_FETCH: "none"
|
||||
PUBLIC_IDF_URL: "https://github.com/espressif/esp-idf.git"
|
||||
dependencies: []
|
||||
@@ -30,32 +31,51 @@ check_submodule_sync:
|
||||
push_to_github:
|
||||
extends:
|
||||
- .deploy_job_template
|
||||
- .before_script:minimal
|
||||
- .rules:push_to_github
|
||||
needs:
|
||||
- check_submodule_sync
|
||||
tags: [ brew, github_sync ]
|
||||
variables:
|
||||
# for brew runners, we always set GIT_STRATEGY to fetch
|
||||
GIT_STRATEGY: fetch
|
||||
# github also need full record of commits
|
||||
GIT_DEPTH: 0
|
||||
- .before_script_minimal
|
||||
- .rules:protected-no_label
|
||||
dependencies: []
|
||||
script:
|
||||
- add_github_ssh_keys
|
||||
- git remote remove github &>/dev/null || true
|
||||
- git remote add github git@github.com:espressif/esp-idf.git
|
||||
- tools/ci/push_to_github.sh
|
||||
|
||||
deploy_update_SHA_in_esp-dockerfiles:
|
||||
deploy_test_result:
|
||||
extends:
|
||||
- .deploy_job_template
|
||||
- .before_script:minimal
|
||||
- .rules:protected-no_label-always
|
||||
- .before_script_minimal
|
||||
- .rules:ref:master-always
|
||||
image: $CI_DOCKER_REGISTRY/bot-env:1
|
||||
dependencies: []
|
||||
tags:
|
||||
- deploy_test
|
||||
artifacts:
|
||||
when: always
|
||||
paths:
|
||||
- ${CI_PROJECT_DIR}/test-management/*.log
|
||||
expire_in: 1 week
|
||||
variables:
|
||||
GIT_DEPTH: 2
|
||||
tags: [ shiny, build ]
|
||||
BOT_ACCOUNT_CONFIG_FILE: "${CI_PROJECT_DIR}/test-management/Config/Account.local.yml"
|
||||
TEST_RESULTS_PATH: "${CI_PROJECT_DIR}/TEST_RESULTS"
|
||||
script:
|
||||
- 'curl --header "PRIVATE-TOKEN: ${ESPCI_SCRIPTS_TOKEN}" -o create_MR_in_esp_dockerfile.sh $GITLAB_HTTP_SERVER/api/v4/projects/1260/repository/files/create_MR_in_esp_dockerfile%2Fcreate_MR_in_esp_dockerfile.sh/raw\?ref\=master'
|
||||
- chmod +x create_MR_in_esp_dockerfile.sh
|
||||
- ./create_MR_in_esp_dockerfile.sh
|
||||
- add_gitlab_ssh_keys
|
||||
- export GIT_SHA=$(echo ${CI_COMMIT_SHA} | cut -c 1-8)
|
||||
- export REV_COUNT=$(git rev-list --count ${GIT_SHA} --)
|
||||
- export SUMMARY="IDF CI test result for $GIT_SHA (r${REV_COUNT})"
|
||||
# Download test result
|
||||
- export PYTHONPATH="$IDF_PATH/tools:$IDF_PATH/tools/ci/python_packages:$PYTHONPATH"
|
||||
- python3 ${IDF_PATH}/tools/ci/get_all_test_results.py --path ${TEST_RESULTS_PATH} --include_retried
|
||||
- if [[ -z $(find ${TEST_RESULTS_PATH} -name "*.xml") ]]; then exit 0; fi
|
||||
# Clone test-management repo
|
||||
- retry_failed git clone $TEST_MANAGEMENT_REPO
|
||||
- python3 $CHECKOUT_REF_SCRIPT test-management test-management
|
||||
- cd test-management
|
||||
- echo $BOT_JIRA_ACCOUNT > ${BOT_ACCOUNT_CONFIG_FILE}
|
||||
# Make sure all requirements are installed
|
||||
- pip3 install -r requirements.txt
|
||||
# Update test cases
|
||||
- python3 ImportTestCase.py $JIRA_TEST_MANAGEMENT_PROJECT from_xml -d ${TEST_RESULTS_PATH} -r $GIT_SHA -l IDFCI
|
||||
# update test results
|
||||
- python3 ImportTestResult.py -r "$GIT_SHA (r${REV_COUNT})" -j $JIRA_TEST_MANAGEMENT_PROJECT -s "$SUMMARY" -l IDFCI -p ${TEST_RESULTS_PATH} --pipeline_url ${CI_PIPELINE_URL}
|
||||
# May need a long time to upload all test results.
|
||||
timeout: 4 hours
|
||||
|
||||
@@ -25,9 +25,6 @@
|
||||
.if-protected-no_label: &if-protected-no_label
|
||||
if: '($CI_COMMIT_REF_NAME == "master" || $CI_COMMIT_BRANCH =~ /^release\/v/ || $CI_COMMIT_TAG =~ /^v\d+\.\d+(\.\d+)?($|-)/) && $BOT_TRIGGER_WITH_LABEL == null'
|
||||
|
||||
.if-qa-test-tag: &if-qa-test-tag
|
||||
if: '$CI_COMMIT_TAG =~ /^qa-test/'
|
||||
|
||||
.if-label-build_docs: &if-label-build_docs
|
||||
if: '$BOT_LABEL_BUILD_DOCS || $CI_MERGE_REQUEST_LABELS =~ /^(?:[^,\n\r]+,)*build_docs(?:,[^,\n\r]+)*$/i'
|
||||
|
||||
@@ -35,31 +32,18 @@
|
||||
if: '$BOT_LABEL_DOCS_FULL || $CI_MERGE_REQUEST_LABELS =~ /^(?:[^,\n\r]+,)*docs_full(?:,[^,\n\r]+)*$/i'
|
||||
|
||||
.if-dev-push: &if-dev-push
|
||||
if: '$CI_COMMIT_REF_NAME != "master" && $CI_COMMIT_BRANCH !~ /^release\/v/ && $CI_COMMIT_TAG !~ /^v\d+\.\d+(\.\d+)?($|-)/ && $CI_COMMIT_TAG !~ /^qa-test/ && ($CI_PIPELINE_SOURCE == "push" || $CI_PIPELINE_SOURCE == "merge_request_event")'
|
||||
|
||||
.if-schedule: &if-schedule
|
||||
if: '$CI_PIPELINE_SOURCE == "schedule"'
|
||||
if: '$CI_COMMIT_REF_NAME != "master" && $CI_COMMIT_BRANCH !~ /^release\/v/ && $CI_COMMIT_TAG !~ /^v\d+\.\d+(\.\d+)?($|-)/ && ($CI_PIPELINE_SOURCE == "push" || $CI_PIPELINE_SOURCE == "merge_request_event")'
|
||||
|
||||
.doc-rules:build:docs-full:
|
||||
rules:
|
||||
- <<: *if-qa-test-tag
|
||||
when: never
|
||||
- <<: *if-schedule
|
||||
- <<: *if-protected
|
||||
- <<: *if-label-build_docs
|
||||
- <<: *if-label-docs_full
|
||||
- <<: *if-dev-push
|
||||
changes: *patterns-docs-full
|
||||
|
||||
.doc-rules:build:docs-full-prod:
|
||||
rules:
|
||||
- <<: *if-qa-test-tag
|
||||
when: never
|
||||
- <<: *if-protected-no_label
|
||||
|
||||
.doc-rules:build:docs-partial:
|
||||
rules:
|
||||
- <<: *if-qa-test-tag
|
||||
when: never
|
||||
- <<: *if-dev-push
|
||||
changes: *patterns-docs-full
|
||||
when: never
|
||||
@@ -92,13 +76,14 @@ check_docs_lang_sync:
|
||||
stage: build_doc
|
||||
tags:
|
||||
- build_docs
|
||||
dependencies: []
|
||||
script:
|
||||
- cd docs
|
||||
- build-docs -t $DOCTGT -bs $DOC_BUILDERS -l $DOCLANG build
|
||||
parallel:
|
||||
matrix:
|
||||
- DOCLANG: ["en", "zh_CN"]
|
||||
DOCTGT: ["esp32", "esp32s2", "esp32s3", "esp32c3", "esp32c2", "esp32c6", "esp32h2", "esp32p4"]
|
||||
DOCTGT: ["esp32", "esp32s2", "esp32s3", "esp32c3", "esp32c2", "esp32c6", "esp32h2"]
|
||||
|
||||
check_docs_gh_links:
|
||||
image: $ESP_IDF_DOC_ENV_IMAGE
|
||||
@@ -115,24 +100,6 @@ build_docs_html_full:
|
||||
extends:
|
||||
- .build_docs_template
|
||||
- .doc-rules:build:docs-full
|
||||
needs:
|
||||
- job: fast_template_app
|
||||
artifacts: false
|
||||
optional: true
|
||||
artifacts:
|
||||
when: always
|
||||
paths:
|
||||
- docs/_build/*/*/*.txt
|
||||
- docs/_build/*/*/html/*
|
||||
expire_in: 4 days
|
||||
variables:
|
||||
DOC_BUILDERS: "html"
|
||||
|
||||
build_docs_html_full_prod:
|
||||
extends:
|
||||
- .build_docs_template
|
||||
- .doc-rules:build:docs-full-prod
|
||||
dependencies: [] # Stop build_docs jobs from downloading all previous job's artifacts
|
||||
artifacts:
|
||||
when: always
|
||||
paths:
|
||||
@@ -146,10 +113,6 @@ build_docs_html_partial:
|
||||
extends:
|
||||
- .build_docs_template
|
||||
- .doc-rules:build:docs-partial
|
||||
needs:
|
||||
- job: fast_template_app
|
||||
artifacts: false
|
||||
optional: true
|
||||
artifacts:
|
||||
when: always
|
||||
paths:
|
||||
@@ -163,29 +126,12 @@ build_docs_html_partial:
|
||||
- DOCLANG: "en"
|
||||
DOCTGT: "esp32"
|
||||
- DOCLANG: "zh_CN"
|
||||
DOCTGT: "esp32p4"
|
||||
DOCTGT: "esp32c6"
|
||||
|
||||
build_docs_pdf:
|
||||
extends:
|
||||
- .build_docs_template
|
||||
- .doc-rules:build:docs-full
|
||||
needs:
|
||||
- job: fast_template_app
|
||||
artifacts: false
|
||||
optional: true
|
||||
artifacts:
|
||||
when: always
|
||||
paths:
|
||||
- docs/_build/*/*/latex/*
|
||||
expire_in: 4 days
|
||||
variables:
|
||||
DOC_BUILDERS: "latex"
|
||||
|
||||
build_docs_pdf_prod:
|
||||
extends:
|
||||
- .build_docs_template
|
||||
- .doc-rules:build:docs-full-prod
|
||||
dependencies: [] # Stop build_docs jobs from downloading all previous job's artifacts
|
||||
artifacts:
|
||||
when: always
|
||||
paths:
|
||||
@@ -238,12 +184,13 @@ deploy_docs_production:
|
||||
# The DOCS_PROD_* variables used by this job are "Protected" so these branches must all be marked "Protected" in Gitlab settings
|
||||
extends:
|
||||
- .deploy_docs_template
|
||||
- .doc-rules:build:docs-full-prod
|
||||
rules:
|
||||
- <<: *if-protected-no_label
|
||||
stage: post_deploy
|
||||
dependencies: # set dependencies to null to avoid missing artifacts issue
|
||||
needs: # ensure runs after push_to_github succeeded
|
||||
- build_docs_html_full_prod
|
||||
- build_docs_pdf_prod
|
||||
- build_docs_html_full
|
||||
- build_docs_pdf
|
||||
- job: push_to_github
|
||||
artifacts: false
|
||||
variables:
|
||||
@@ -258,7 +205,8 @@ deploy_docs_production:
|
||||
check_doc_links:
|
||||
extends:
|
||||
- .build_docs_template
|
||||
- .doc-rules:build:docs-full-prod
|
||||
rules:
|
||||
- <<: *if-protected-no_label
|
||||
stage: post_deploy
|
||||
needs:
|
||||
- job: deploy_docs_production
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
image: $ESP_ENV_IMAGE
|
||||
tags:
|
||||
- host_test
|
||||
dependencies: # set dependencies to null to avoid missing artifacts issue
|
||||
dependencies: []
|
||||
# run host_test jobs immediately, only after upload cache
|
||||
needs:
|
||||
- job: upload-pip-cache
|
||||
@@ -13,7 +13,6 @@
|
||||
- job: upload-submodules-cache
|
||||
optional: true
|
||||
artifacts: false
|
||||
- pipeline_variables
|
||||
|
||||
test_nvs_on_host:
|
||||
extends: .host_test_template
|
||||
@@ -41,6 +40,22 @@ test_partition_table_on_host:
|
||||
- cd components/partition_table/test_gen_esp32part_host
|
||||
- ./gen_esp32part_tests.py
|
||||
|
||||
test_wl_on_host:
|
||||
extends: .host_test_template
|
||||
artifacts:
|
||||
paths:
|
||||
- components/wear_levelling/test_wl_host/coverage_report.zip
|
||||
expire_in: 1 week
|
||||
script:
|
||||
- cd components/wear_levelling/test_wl_host
|
||||
- make test
|
||||
|
||||
test_fatfs_on_host:
|
||||
extends: .host_test_template
|
||||
script:
|
||||
- cd components/fatfs/test_fatfs_host/
|
||||
- make test
|
||||
|
||||
test_ldgen_on_host:
|
||||
extends: .host_test_template
|
||||
script:
|
||||
@@ -93,11 +108,6 @@ test_certificate_bundle_on_host:
|
||||
- cd components/mbedtls/esp_crt_bundle/test_gen_crt_bundle/
|
||||
- ./test_gen_crt_bundle.py
|
||||
|
||||
test_gdbstub_on_host:
|
||||
extends: .host_test_template
|
||||
script:
|
||||
- cd components/esp_gdbstub/test_gdbstub_host
|
||||
- make test
|
||||
|
||||
test_idf_py:
|
||||
extends: .host_test_template
|
||||
@@ -113,7 +123,7 @@ test_idf_py:
|
||||
test_idf_tools:
|
||||
extends:
|
||||
- .host_test_template
|
||||
- .before_script:minimal
|
||||
- .before_script_minimal
|
||||
artifacts:
|
||||
when: on_failure
|
||||
paths:
|
||||
@@ -125,10 +135,10 @@ test_idf_tools:
|
||||
entrypoint: [""] # use system python3. no extra pip package installed
|
||||
script:
|
||||
# Tools must be downloaded for testing
|
||||
- python3 ${IDF_PATH}/tools/idf_tools.py download required qemu-riscv32 qemu-xtensa
|
||||
- python3 ${IDF_PATH}/tools/idf_tools.py download
|
||||
- cd ${IDF_PATH}/tools/test_idf_tools
|
||||
- python3 -m pip install jsonschema
|
||||
- python3 ./test_idf_tools.py -v
|
||||
- python3 ./test_idf_tools.py
|
||||
- python3 ./test_idf_tools_python_env.py
|
||||
|
||||
.test_efuse_table_on_host_template:
|
||||
@@ -207,6 +217,12 @@ test_mkdfu:
|
||||
- cd ${IDF_PATH}/tools/test_mkdfu
|
||||
- ./test_mkdfu.py
|
||||
|
||||
test_mkuf2:
|
||||
extends: .host_test_template
|
||||
script:
|
||||
- cd ${IDF_PATH}/tools/test_mkuf2
|
||||
- ./test_mkuf2.py
|
||||
|
||||
test_autocomplete:
|
||||
extends:
|
||||
- .host_test_template
|
||||
@@ -263,8 +279,8 @@ test_sockets_on_host:
|
||||
- grep "Socket unable to connect" test.log
|
||||
# test the udp-client example with lwip sockets
|
||||
- cd ${IDF_PATH}/examples/protocols/sockets/udp_client
|
||||
- echo 'CONFIG_EXAMPLE_IPV4_ADDR="127.0.0.1"' >> sdkconfig.defaults
|
||||
- idf.py --preview set-target linux
|
||||
- cat sdkconfig.ci.linux > sdkconfig
|
||||
- idf.py build
|
||||
- timeout 5 ./build/udp_client.elf >test.log || true
|
||||
- grep "Message sent" test.log
|
||||
@@ -285,7 +301,7 @@ test_gen_soc_caps_kconfig:
|
||||
test_pytest_qemu:
|
||||
extends:
|
||||
- .host_test_template
|
||||
- .before_script:build
|
||||
- .before_script_build_jobs
|
||||
image: $QEMU_IMAGE
|
||||
artifacts:
|
||||
when: always
|
||||
@@ -295,37 +311,28 @@ test_pytest_qemu:
|
||||
reports:
|
||||
junit: XUNIT_RESULT.xml
|
||||
expire_in: 1 week
|
||||
allow_failure: true # IDFCI-1752
|
||||
parallel:
|
||||
matrix:
|
||||
- IDF_TARGET: [esp32, esp32c3]
|
||||
script:
|
||||
- run_cmd python tools/ci/ci_build_apps.py . -vv
|
||||
--target $IDF_TARGET
|
||||
--target esp32
|
||||
--pytest-apps
|
||||
-m qemu
|
||||
--collect-app-info "list_job_${CI_JOB_NAME_SLUG}.txt"
|
||||
--modified-components ${MODIFIED_COMPONENTS}
|
||||
--modified-files ${MODIFIED_FILES}
|
||||
- retry_failed git clone $KNOWN_FAILURE_CASES_REPO known_failure_cases
|
||||
- run_cmd pytest
|
||||
--target $IDF_TARGET
|
||||
- pytest
|
||||
--target esp32
|
||||
-m qemu
|
||||
--embedded-services idf,qemu
|
||||
--junitxml=XUNIT_RESULT.xml
|
||||
--ignore-result-files known_failure_cases/known_failure_cases.txt
|
||||
--app-info-filepattern \"list_job_*.txt\"
|
||||
--known-failure-cases-file known_failure_cases/known_failure_cases.txt
|
||||
|
||||
test_pytest_linux:
|
||||
extends:
|
||||
- .host_test_template
|
||||
- .before_script:build
|
||||
- .before_script_build_jobs
|
||||
artifacts:
|
||||
when: always
|
||||
paths:
|
||||
- XUNIT_RESULT.xml
|
||||
- pytest_embedded_log/
|
||||
- "**/build*/build_log.txt"
|
||||
reports:
|
||||
junit: XUNIT_RESULT.xml
|
||||
expire_in: 1 week
|
||||
@@ -334,13 +341,6 @@ test_pytest_linux:
|
||||
--target linux
|
||||
--pytest-apps
|
||||
-m host_test
|
||||
--collect-app-info "list_job_${CI_JOB_NAME_SLUG}.txt"
|
||||
--modified-components ${MODIFIED_COMPONENTS}
|
||||
--modified-files ${MODIFIED_FILES}
|
||||
- retry_failed git clone $KNOWN_FAILURE_CASES_REPO known_failure_cases
|
||||
- run_cmd pytest
|
||||
--target linux
|
||||
-m host_test
|
||||
--junitxml=XUNIT_RESULT.xml
|
||||
--ignore-result-files known_failure_cases/known_failure_cases.txt
|
||||
--app-info-filepattern \"list_job_*.txt\"
|
||||
- run_cmd pytest --target linux -m host_test --junitxml=XUNIT_RESULT.xml
|
||||
--known-failure-cases-file known_failure_cases/known_failure_cases.txt
|
||||
|
||||
@@ -1,69 +0,0 @@
|
||||
# generate dynamic integration pipeline by `idf-integration-ci` project
|
||||
|
||||
.patterns-integration_test: &patterns-integration_test
|
||||
# add all possible patterns to make sure `gen_integration_pipeline` can be triggered.
|
||||
# fine-grained control will be done while generating the pipeline
|
||||
# find `patterns` in `idf-integration-ci` project
|
||||
- "components/**/*"
|
||||
- "tools/**/*"
|
||||
- ".gitlab-ci.yml"
|
||||
- ".gitlab/ci/common.yml"
|
||||
- ".gitlab/ci/integration_test.yml"
|
||||
- ".gitmodules"
|
||||
- "CMakeLists.txt"
|
||||
- "install.sh"
|
||||
- "export.sh"
|
||||
- "Kconfig"
|
||||
- "sdkconfig.rename"
|
||||
|
||||
# Simplify the rules
|
||||
.integration_test_rules:
|
||||
rules:
|
||||
- if: '$CI_PIPELINE_SOURCE != "merge_request_event"'
|
||||
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
|
||||
changes: *patterns-integration_test
|
||||
# support trigger by ci labels
|
||||
- if: '$CI_MERGE_REQUEST_LABELS =~ /^(?:[^,\n\r]+,)*target_test(?:,[^,\n\r]+)*$/i'
|
||||
- if: '$CI_MERGE_REQUEST_LABELS =~ /^(?:[^,\n\r]+,)*integration_test(?:,[^,\n\r]+)*$/i'
|
||||
- if: '$CI_MERGE_REQUEST_LABELS =~ /^(?:[^,\n\r]+,)*build(?:,[^,\n\r]+)*$/i'
|
||||
|
||||
gen_integration_pipeline:
|
||||
extends:
|
||||
- .before_script:minimal
|
||||
- .integration_test_rules
|
||||
image: ${CI_INTEGRATION_ASSIGN_ENV}
|
||||
stage: assign_test
|
||||
cache: []
|
||||
tags:
|
||||
- assign_test
|
||||
variables:
|
||||
SUBMODULES_TO_FETCH: "none"
|
||||
GIT_LFS_SKIP_SMUDGE: 1
|
||||
needs:
|
||||
- job: fast_template_app
|
||||
artifacts: false
|
||||
optional: true
|
||||
artifacts:
|
||||
paths:
|
||||
- idf-integration-ci/child_pipeline/
|
||||
expire_in: 2 weeks
|
||||
script:
|
||||
- add_gitlab_ssh_keys
|
||||
- retry_failed git clone ${CI_GEN_INTEGRATION_PIPELINE_REPO} idf-integration-ci
|
||||
- python $CHECKOUT_REF_SCRIPT idf-integration-ci idf-integration-ci
|
||||
- cd idf-integration-ci
|
||||
- python tools/generate_child_pipeline.py -o child_pipeline/
|
||||
|
||||
child_integration_test_pipeline:
|
||||
extends:
|
||||
- .integration_test_rules
|
||||
stage: assign_test
|
||||
needs:
|
||||
- gen_integration_pipeline
|
||||
trigger:
|
||||
include:
|
||||
- artifact: idf-integration-ci/child_pipeline/pipeline.yml
|
||||
job: gen_integration_pipeline
|
||||
forward:
|
||||
yaml_variables: false
|
||||
strategy: depend
|
||||
@@ -5,41 +5,41 @@
|
||||
- host_test
|
||||
dependencies: []
|
||||
|
||||
check_pre_commit:
|
||||
.check_pre_commit_template:
|
||||
extends:
|
||||
- .pre_check_template
|
||||
- .before_script:minimal
|
||||
- .before_script_minimal
|
||||
image: $PRE_COMMIT_IMAGE
|
||||
needs:
|
||||
- pipeline_variables
|
||||
|
||||
check_pre_commit_master_release:
|
||||
extends:
|
||||
- .check_pre_commit_template
|
||||
- .rules:protected
|
||||
script:
|
||||
- fetch_submodules
|
||||
- pre-commit run --files $MODIFIED_FILES
|
||||
- git diff-tree --no-commit-id --name-only -r $PIPELINE_COMMIT_SHA | xargs pre-commit run --files
|
||||
|
||||
check_pre_commit_MR:
|
||||
extends:
|
||||
- .check_pre_commit_template
|
||||
- .rules:dev
|
||||
script:
|
||||
- python ${CI_PROJECT_DIR}/tools/ci/ci_get_mr_info.py files ${CI_MERGE_REQUEST_SOURCE_BRANCH_NAME} | xargs pre-commit run --files
|
||||
|
||||
check_MR_style_dangerjs:
|
||||
extends:
|
||||
- .pre_check_template
|
||||
image: node:18.15.0-alpine3.16
|
||||
image: node:14.18.0-alpine3.14
|
||||
variables:
|
||||
DANGER_GITLAB_API_TOKEN: ${ESPCI_TOKEN}
|
||||
DANGER_GITLAB_HOST: ${GITLAB_HTTP_SERVER}
|
||||
DANGER_GITLAB_API_BASE_URL: ${GITLAB_HTTP_SERVER}/api/v4
|
||||
DANGER_JIRA_USER: ${DANGER_JIRA_USER}
|
||||
DANGER_JIRA_PASSWORD: ${DANGER_JIRA_PASSWORD}
|
||||
cache:
|
||||
# pull only for most of the use cases since it's cache dir.
|
||||
# Only set "push" policy for "upload_cache" stage jobs
|
||||
key:
|
||||
files:
|
||||
- .gitlab/dangerjs/package-lock.json
|
||||
paths:
|
||||
- .gitlab/dangerjs/node_modules/
|
||||
policy: pull
|
||||
before_script:
|
||||
- cd .gitlab/dangerjs
|
||||
- npm install --no-progress --no-update-notifier # Install danger dependencies
|
||||
- npm install -g danger@11.2.3 --silent --no-progress > /dev/null
|
||||
- npm install axios@1.3.3 --silent --no-progress > /dev/null
|
||||
script:
|
||||
- npx danger ci --failOnErrors -v
|
||||
- danger ci --dangerfile=".gitlab/dangerjs/dangerfile.js" --failOnErrors -v
|
||||
rules:
|
||||
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
|
||||
|
||||
@@ -49,22 +49,33 @@ check_version:
|
||||
extends:
|
||||
- .pre_check_template
|
||||
- .rules:protected
|
||||
- .before_script:fetch:git_describe
|
||||
script:
|
||||
- export IDF_PATH=$PWD
|
||||
- tools/ci/check_idf_version.sh
|
||||
|
||||
check_api_usage:
|
||||
check_rom_api_header:
|
||||
extends: .pre_check_template
|
||||
script:
|
||||
- tools/ci/check_examples_rom_header.sh
|
||||
- tools/ci/check_api_violation.sh
|
||||
- tools/ci/check_examples_extra_component_dirs.sh
|
||||
|
||||
test_check_kconfigs:
|
||||
extends: .pre_check_template
|
||||
artifacts:
|
||||
when: on_failure
|
||||
paths:
|
||||
- components/*/Kconfig*.new
|
||||
- examples/*/*/*/Kconfig*.new
|
||||
- examples/*/*/*/*/Kconfig*.new
|
||||
- tools/*/Kconfig*.new
|
||||
- tools/*/*/Kconfig*.new
|
||||
- tools/*/*/*/Kconfig*.new
|
||||
expire_in: 1 week
|
||||
script:
|
||||
- python ${IDF_PATH}/tools/ci/test_check_kconfigs.py
|
||||
|
||||
check_blobs:
|
||||
extends:
|
||||
- .pre_check_template
|
||||
- .rules:build:check
|
||||
extends: .pre_check_template
|
||||
variables:
|
||||
SUBMODULES_TO_FETCH: "components/esp_wifi/lib;components/esp_phy/lib;components/esp_coex/lib"
|
||||
script:
|
||||
@@ -89,7 +100,7 @@ check_blobs:
|
||||
check_public_headers:
|
||||
extends:
|
||||
- .pre_check_template
|
||||
- .rules:build:check
|
||||
- .rules:build
|
||||
script:
|
||||
- IDF_TARGET=esp32 python tools/ci/check_public_headers.py --jobs 4 --prefix xtensa-esp32-elf-
|
||||
- IDF_TARGET=esp32s2 python tools/ci/check_public_headers.py --jobs 4 --prefix xtensa-esp32s2-elf-
|
||||
@@ -98,12 +109,11 @@ check_public_headers:
|
||||
- IDF_TARGET=esp32c2 python tools/ci/check_public_headers.py --jobs 4 --prefix riscv32-esp-elf-
|
||||
- IDF_TARGET=esp32c6 python tools/ci/check_public_headers.py --jobs 4 --prefix riscv32-esp-elf-
|
||||
- IDF_TARGET=esp32h2 python tools/ci/check_public_headers.py --jobs 4 --prefix riscv32-esp-elf-
|
||||
- IDF_TARGET=esp32p4 python tools/ci/check_public_headers.py --jobs 4 --prefix riscv32-esp-elf-
|
||||
|
||||
check_chip_support_components:
|
||||
extends:
|
||||
- .pre_check_template
|
||||
- .rules:build:check
|
||||
- .rules:build
|
||||
artifacts:
|
||||
when: on_failure
|
||||
paths:
|
||||
@@ -118,7 +128,7 @@ check_chip_support_components:
|
||||
check_esp_err_to_name:
|
||||
extends:
|
||||
- .pre_check_template
|
||||
- .rules:build:check
|
||||
- .rules:build
|
||||
artifacts:
|
||||
when: on_failure
|
||||
paths:
|
||||
@@ -138,12 +148,10 @@ check_esp_system:
|
||||
|
||||
# For release tag pipelines only, make sure the tag was created with 'git tag -a' so it will update
|
||||
# the version returned by 'git describe'
|
||||
# Don't forget to update the env var `LATEST_GIT_TAG` in .gitlab/ci/common.yml
|
||||
check_version_tag:
|
||||
extends:
|
||||
- .pre_check_template
|
||||
- .rules:tag:release
|
||||
- .before_script:fetch:git_describe
|
||||
script:
|
||||
- (git cat-file -t $CI_COMMIT_REF_NAME | grep tag) || (echo "ESP-IDF versions must be annotated tags." && exit 1)
|
||||
|
||||
@@ -156,7 +164,7 @@ check_artifacts_expire_time:
|
||||
check_test_scripts_build_test_rules:
|
||||
extends:
|
||||
- .pre_check_template
|
||||
- .before_script:build
|
||||
- .before_script_build_jobs
|
||||
script:
|
||||
# required pytest related packages
|
||||
- run_cmd bash install.sh --enable-pytest
|
||||
@@ -165,29 +173,7 @@ check_test_scripts_build_test_rules:
|
||||
check_configure_ci_environment_parsing:
|
||||
extends:
|
||||
- .pre_check_template
|
||||
- .before_script:build
|
||||
- .rules:build
|
||||
- .before_script_build_jobs
|
||||
script:
|
||||
- cd tools/ci
|
||||
- python -m unittest ci_build_apps.py
|
||||
|
||||
pipeline_variables:
|
||||
extends:
|
||||
- .pre_check_template
|
||||
- .before_script:fetch:git_diff
|
||||
tags:
|
||||
- build
|
||||
script:
|
||||
- MODIFIED_FILES=$(echo "$GIT_DIFF_OUTPUT" | xargs)
|
||||
- echo "MODIFIED_FILES=$MODIFIED_FILES" >> pipeline.env
|
||||
- echo "MODIFIED_COMPONENTS=$(run_cmd python tools/ci/ci_get_mr_info.py components --modified-files $MODIFIED_FILES | xargs)" >> pipeline.env
|
||||
- |
|
||||
if echo "$CI_MERGE_REQUEST_LABELS" | egrep "(^|,)BUILD_AND_TEST_ALL_APPS(,|$)"; then
|
||||
echo "BUILD_AND_TEST_ALL_APPS=1" >> pipeline.env
|
||||
fi
|
||||
- cat pipeline.env
|
||||
- python tools/ci/artifacts_handler.py upload --type modified_files_and_components_report
|
||||
artifacts:
|
||||
reports:
|
||||
dotenv: pipeline.env
|
||||
expire_in: 4 days
|
||||
|
||||
1252
.gitlab/ci/rules.yml
1252
.gitlab/ci/rules.yml
File diff suppressed because it is too large
Load Diff
@@ -5,37 +5,44 @@ clang_tidy_check:
|
||||
- .rules:patterns:clang_tidy
|
||||
artifacts:
|
||||
paths:
|
||||
- clang_tidy_reports/
|
||||
- $OUTPUT_DIR
|
||||
when: always
|
||||
expire_in: 1 day
|
||||
variables:
|
||||
CLANG_TIDY_DIRS_TXT: ${CI_PROJECT_DIR}/tools/ci/clang_tidy_dirs.txt
|
||||
RULES_FILE: ${CI_PROJECT_DIR}/tools/ci/static-analysis-rules.yml
|
||||
OUTPUT_DIR: ${CI_PROJECT_DIR}/clang_tidy_reports
|
||||
IDF_TOOLCHAIN: clang
|
||||
script:
|
||||
- run_cmd idf_clang_tidy $(cat tools/ci/clang_tidy_dirs.txt | xargs)
|
||||
--output-path clang_tidy_reports
|
||||
--limit-file tools/ci/static-analysis-rules.yml
|
||||
- internal_pip_install $CLANG_TIDY_RUNNER_PROJ pyclang
|
||||
- export PATH=$PATH:$(python -c "import sys; print(sys.executable.rsplit('/', 1)[0])")
|
||||
- dirs=$(cat ${CLANG_TIDY_DIRS_TXT} | while read line; do echo ${CI_PROJECT_DIR}/${line}; done | xargs)
|
||||
- run_cmd idf_clang ${dirs}
|
||||
--output-path ${OUTPUT_DIR}
|
||||
--limit-file ${RULES_FILE}
|
||||
--xtensa-include-dir
|
||||
--run-clang-tidy-py run-clang-tidy
|
||||
|
||||
check_pylint:
|
||||
extends:
|
||||
- .pre_check_template
|
||||
- .rules:patterns:python-files
|
||||
needs:
|
||||
- pipeline_variables
|
||||
- .before_script_minimal
|
||||
image: $SONARQUBE_SCANNER_IMAGE
|
||||
artifacts:
|
||||
when: always
|
||||
reports:
|
||||
codequality: pylint.json
|
||||
paths:
|
||||
- pylint-report.txt
|
||||
expire_in: 1 week
|
||||
script:
|
||||
- export PYTHONPATH="$IDF_PATH/tools:$IDF_PATH/tools/ci/python_packages:$PYTHONPATH"
|
||||
- |
|
||||
if [ -n "$CI_MERGE_REQUEST_IID" ]; then
|
||||
export files=$(echo "$GIT_DIFF_OUTPUT" | grep ".py$" | xargs);
|
||||
export files=$(python ${CI_PROJECT_DIR}/tools/ci/ci_get_mr_info.py files ${CI_MERGE_REQUEST_SOURCE_BRANCH_NAME} | grep ".py");
|
||||
else
|
||||
export files=$(git ls-files "*.py" | xargs);
|
||||
export files=$(find . -iname "*.py" -print);
|
||||
fi
|
||||
- if [ -z "$files" ]; then echo "No python files found"; exit 0; fi
|
||||
- run_cmd pylint --exit-zero --load-plugins=pylint_gitlab --output-format=gitlab-codeclimate:pylint.json $files
|
||||
- pylint --rcfile=.pylintrc $files -r n --output-format=parseable > pylint-report.txt || exit 0
|
||||
|
||||
# build stage
|
||||
# Sonarqube related jobs put here for this reason:
|
||||
@@ -76,6 +83,7 @@ check_pylint:
|
||||
expire_in: 1 week
|
||||
dependencies: # Here is not a hard dependency relationship, could be skipped when only python files changed. so we do not use "needs" here.
|
||||
- clang_tidy_check
|
||||
- check_pylint
|
||||
|
||||
code_quality_check:
|
||||
extends:
|
||||
@@ -84,7 +92,7 @@ code_quality_check:
|
||||
allow_failure: true # since now it's using exit code to indicate the code analysis result,
|
||||
# we don't want to block ci when critical issues founded
|
||||
script:
|
||||
- export CI_MERGE_REQUEST_COMMITS=$(python ${CI_PROJECT_DIR}/tools/ci/ci_get_mr_info.py commits --src-branch ${CI_COMMIT_REF_NAME} | tr '\n' ',')
|
||||
- export CI_MERGE_REQUEST_COMMITS=$(python ${CI_PROJECT_DIR}/tools/ci/ci_get_mr_info.py commits ${CI_COMMIT_REF_NAME} | tr '\n' ',')
|
||||
# test if this branch have merge request, if not, exit 0
|
||||
- test -n "$CI_MERGE_REQUEST_IID" || exit 0
|
||||
- test -n "$CI_MERGE_REQUEST_COMMITS" || exit 0
|
||||
@@ -99,6 +107,7 @@ code_quality_check:
|
||||
-Dsonar.gitlab.ref_name=$CI_MERGE_REQUEST_SOURCE_BRANCH_NAME
|
||||
-Dsonar.host.url=$SONAR_HOST_URL
|
||||
-Dsonar.login=$SONAR_LOGIN
|
||||
-Dsonar.python.pylint.reportPath=pylint-report.txt
|
||||
|
||||
code_quality_report:
|
||||
extends:
|
||||
@@ -115,3 +124,4 @@ code_quality_report:
|
||||
-Dsonar.gitlab.ref_name=$CI_COMMIT_REF_NAME
|
||||
-Dsonar.host.url=$SONAR_HOST_URL
|
||||
-Dsonar.login=$SONAR_LOGIN
|
||||
-Dsonar.python.pylint.reportPath=pylint-report.txt
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -9,7 +9,7 @@
|
||||
upload-pip-cache:
|
||||
extends:
|
||||
- .upload_cache_template
|
||||
- .before_script:minimal
|
||||
- .before_script_minimal
|
||||
- .rules:patterns:python-cache
|
||||
tags:
|
||||
- $GEO
|
||||
@@ -29,7 +29,7 @@ upload-pip-cache:
|
||||
upload-submodules-cache:
|
||||
extends:
|
||||
- .upload_cache_template
|
||||
- .before_script:minimal
|
||||
- .before_script_minimal
|
||||
- .rules:patterns:submodule
|
||||
tags:
|
||||
- $GEO
|
||||
@@ -48,27 +48,3 @@ upload-submodules-cache:
|
||||
parallel:
|
||||
matrix:
|
||||
- GEO: [ 'shiny', 'brew' ]
|
||||
|
||||
upload-danger-npm-cache:
|
||||
stage: upload_cache
|
||||
image: node:18.15.0-alpine3.16
|
||||
extends:
|
||||
- .rules:patterns:dangerjs
|
||||
tags:
|
||||
- $GEO
|
||||
- cache
|
||||
cache:
|
||||
key:
|
||||
files:
|
||||
- .gitlab/dangerjs/package-lock.json
|
||||
paths:
|
||||
- .gitlab/dangerjs/node_modules/
|
||||
policy: push
|
||||
before_script:
|
||||
- echo "Skip before scripts ...."
|
||||
script:
|
||||
- cd .gitlab/dangerjs
|
||||
- npm install --no-progress --no-update-notifier
|
||||
parallel:
|
||||
matrix:
|
||||
- GEO: [ 'shiny', 'brew' ]
|
||||
|
||||
@@ -1,172 +0,0 @@
|
||||
const {
|
||||
minimumSummaryChars,
|
||||
maximumSummaryChars,
|
||||
maximumBodyLineChars,
|
||||
allowedTypes,
|
||||
} = require("./mrCommitsConstants.js");
|
||||
const { gptStandardModelTokens } = require("./mrCommitsConstants.js");
|
||||
|
||||
const { ChatPromptTemplate } = require("langchain/prompts");
|
||||
const { SystemMessagePromptTemplate } = require("langchain/prompts");
|
||||
const { LLMChain } = require("langchain/chains");
|
||||
const { ChatOpenAI } = require("langchain/chat_models/openai");
|
||||
const openAiTokenCount = require("openai-gpt-token-counter");
|
||||
|
||||
module.exports = async function () {
|
||||
let outputDangerMessage = `\n\nPerhaps you could use an AI-generated suggestion for your commit message. Here is one `;
|
||||
|
||||
let mrDiff = await getMrGitDiff(danger.git.modified_files);
|
||||
const mrCommitMessages = getCommitMessages(danger.gitlab.commits);
|
||||
const inputPrompt = getInputPrompt();
|
||||
const inputLlmTokens = getInputLlmTokens(
|
||||
inputPrompt,
|
||||
mrDiff,
|
||||
mrCommitMessages
|
||||
);
|
||||
console.log(`Input tokens for LLM: ${inputLlmTokens}`);
|
||||
|
||||
if (inputLlmTokens >= gptStandardModelTokens) {
|
||||
mrDiff = ""; // If the input mrDiff is larger than 16k model, don't use mrDiff, use only current commit messages
|
||||
outputDangerMessage += `(based only on your current commit messages, git-diff of this MR is too big (${inputLlmTokens} tokens) for the AI models):\n\n`;
|
||||
} else {
|
||||
outputDangerMessage += `(based on your MR git-diff and your current commit messages):\n\n`;
|
||||
}
|
||||
|
||||
// Generate AI commit message
|
||||
let generatedCommitMessage = "";
|
||||
try {
|
||||
const rawCommitMessage = await createAiGitMessage(
|
||||
inputPrompt,
|
||||
mrDiff,
|
||||
mrCommitMessages
|
||||
);
|
||||
generatedCommitMessage = postProcessCommitMessage(rawCommitMessage);
|
||||
} catch (error) {
|
||||
console.error("Error in generating AI commit message: ", error);
|
||||
outputDangerMessage +=
|
||||
"\nCould not generate commit message due to an error.\n";
|
||||
}
|
||||
|
||||
// Append closing statements ("Closes https://github.com/espressif/esp-idf/issues/XXX") to the generated commit message
|
||||
let closingStatements = extractClosingStatements(mrCommitMessages);
|
||||
if (closingStatements.length > 0) {
|
||||
generatedCommitMessage += "\n\n" + closingStatements;
|
||||
}
|
||||
|
||||
// Add the generated git message, format to the markdown code block
|
||||
outputDangerMessage += `\n\`\`\`\n${generatedCommitMessage}\n\`\`\`\n`;
|
||||
outputDangerMessage +=
|
||||
"\n**NOTE: AI-generated suggestions may not always be correct, please review the suggestion before using it.**"; // Add disclaimer
|
||||
return outputDangerMessage;
|
||||
};
|
||||
|
||||
async function getMrGitDiff(mrModifiedFiles) {
|
||||
const fileDiffs = await Promise.all(
|
||||
mrModifiedFiles.map((file) => danger.git.diffForFile(file))
|
||||
);
|
||||
return fileDiffs.map((fileDiff) => fileDiff.diff.trim()).join(" ");
|
||||
}
|
||||
|
||||
function getCommitMessages(mrCommits) {
|
||||
return mrCommits.map((commit) => commit.message);
|
||||
}
|
||||
|
||||
function getInputPrompt() {
|
||||
return `You are a helpful assistant that creates suggestions for single git commit message, that user can use to describe all the changes in their merge request.
|
||||
Use git diff: {mrDiff} and users current commit messages: {mrCommitMessages} to get the changes made in the commit.
|
||||
|
||||
Output should be git commit message following the conventional commit format.
|
||||
|
||||
Output only git commit message in desired format, without comments and other text.
|
||||
|
||||
Do not include the closing statements ("Closes https://....") in the output.
|
||||
|
||||
Here are the strict rules you must follow:
|
||||
|
||||
- Avoid mentioning any JIRA tickets (e.g., "Closes JIRA-123").
|
||||
- Be specific. Don't use vague terms (e.g., "some checks", "add new ones", "few changes").
|
||||
- The commit message structure should be: <type><(scope/component)>: <summary>
|
||||
- Types allowed: ${allowedTypes.join(", ")}
|
||||
- If 'scope/component' is used, it must start with a lowercase letter.
|
||||
- The 'summary' must NOT end with a period.
|
||||
- The 'summary' must be between ${minimumSummaryChars} and ${maximumSummaryChars} characters long.
|
||||
|
||||
If a 'body' of commit message is used:
|
||||
|
||||
- Each line must be no longer than ${maximumBodyLineChars} characters.
|
||||
- It must be separated from the 'summary' by a blank line.
|
||||
|
||||
Examples of correct commit messages:
|
||||
|
||||
- With scope and body:
|
||||
fix(freertos): Fix startup timeout issue
|
||||
|
||||
This is a text of commit message body...
|
||||
- adds support for wifi6
|
||||
- adds validations for logging script
|
||||
|
||||
- Without scope and body:
|
||||
ci: added target test job for ESP32-Wifi6`;
|
||||
}
|
||||
|
||||
function getInputLlmTokens(inputPrompt, mrDiff, mrCommitMessages) {
|
||||
const mrCommitMessagesTokens = openAiTokenCount(mrCommitMessages.join(" "));
|
||||
const gitDiffTokens = openAiTokenCount(mrDiff);
|
||||
const promptTokens = openAiTokenCount(inputPrompt);
|
||||
return mrCommitMessagesTokens + gitDiffTokens + promptTokens;
|
||||
}
|
||||
|
||||
async function createAiGitMessage(inputPrompt, mrDiff, mrCommitMessages) {
|
||||
const chat = new ChatOpenAI({ engine: "gpt-3.5-turbo", temperature: 0 });
|
||||
const chatPrompt = ChatPromptTemplate.fromPromptMessages([
|
||||
SystemMessagePromptTemplate.fromTemplate(inputPrompt),
|
||||
]);
|
||||
const chain = new LLMChain({ prompt: chatPrompt, llm: chat });
|
||||
|
||||
const response = await chain.call({
|
||||
mrDiff: mrDiff,
|
||||
mrCommitMessages: mrCommitMessages,
|
||||
});
|
||||
return response.text;
|
||||
}
|
||||
|
||||
function postProcessCommitMessage(rawCommitMessage) {
|
||||
// Split the result into lines
|
||||
let lines = rawCommitMessage.split("\n");
|
||||
|
||||
// Format each line
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
let line = lines[i].trim();
|
||||
|
||||
// If the line is longer than maximumBodyLineChars, split it into multiple lines
|
||||
if (line.length > maximumBodyLineChars) {
|
||||
let newLines = [];
|
||||
while (line.length > maximumBodyLineChars) {
|
||||
let lastSpaceIndex = line.lastIndexOf(
|
||||
" ",
|
||||
maximumBodyLineChars
|
||||
);
|
||||
newLines.push(line.substring(0, lastSpaceIndex));
|
||||
line = line.substring(lastSpaceIndex + 1);
|
||||
}
|
||||
newLines.push(line);
|
||||
lines[i] = newLines.join("\n");
|
||||
}
|
||||
}
|
||||
|
||||
// Join the lines back into a single string with a newline between each one
|
||||
return lines.join("\n");
|
||||
}
|
||||
|
||||
function extractClosingStatements(mrCommitMessages) {
|
||||
let closingStatements = [];
|
||||
mrCommitMessages.forEach((message) => {
|
||||
const lines = message.split("\n");
|
||||
lines.forEach((line) => {
|
||||
if (line.startsWith("Closes")) {
|
||||
closingStatements.push(line);
|
||||
}
|
||||
});
|
||||
});
|
||||
return closingStatements.join("\n");
|
||||
}
|
||||
@@ -1,56 +0,0 @@
|
||||
let outputStatuses = [];
|
||||
|
||||
/**
|
||||
* Logs the status of a rule with padded formatting and stores it in the `outputStatuses` array.
|
||||
* If the rule already exists in the array, its status is updated.
|
||||
* @param message The name of the rule
|
||||
* @param status The output (exit) status of the rule
|
||||
*/
|
||||
function recordRuleExitStatus(message, status) {
|
||||
// Check if the rule already exists in the array
|
||||
const existingRecord = outputStatuses.find(
|
||||
(rule) => rule.message === message
|
||||
);
|
||||
|
||||
if (existingRecord) {
|
||||
// Update the status of the existing rule
|
||||
existingRecord.status = status;
|
||||
} else {
|
||||
// If the rule doesn't exist, add it to the array
|
||||
outputStatuses.push({ message, status });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Displays all the rule output statuses stored in the `outputStatuses` array.
|
||||
* Filters out any empty lines, sorts them alphabetically, and prints the statuses
|
||||
* with a header and separator.
|
||||
* These statuses are later displayed in CI job tracelog.
|
||||
*/
|
||||
function displayAllOutputStatuses() {
|
||||
const lineLength = 100;
|
||||
const sortedStatuses = outputStatuses.sort((a, b) =>
|
||||
a.message.localeCompare(b.message)
|
||||
);
|
||||
|
||||
const formattedLines = sortedStatuses.map((statusObj) => {
|
||||
const paddingLength =
|
||||
lineLength - statusObj.message.length - statusObj.status.length;
|
||||
const paddedMessage = statusObj.message.padEnd(
|
||||
statusObj.message.length + paddingLength,
|
||||
"."
|
||||
);
|
||||
return `${paddedMessage} ${statusObj.status}`;
|
||||
});
|
||||
|
||||
console.log(
|
||||
"DangerJS checks (rules) output states:\n" + "=".repeat(lineLength + 2)
|
||||
);
|
||||
console.log(formattedLines.join("\n"));
|
||||
console.log("=".repeat(lineLength + 2));
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
displayAllOutputStatuses,
|
||||
recordRuleExitStatus,
|
||||
};
|
||||
@@ -1,23 +1,22 @@
|
||||
const { displayAllOutputStatuses } = require("./configParameters.js");
|
||||
|
||||
/*
|
||||
* Modules with checks are stored in ".gitlab/dangerjs/<module_name>". To import them, use path relative to "dangerfile.js"
|
||||
*/
|
||||
|
||||
async function runChecks() {
|
||||
// Checks for merge request title
|
||||
require("./mrTitleNoDraftOrWip.js")();
|
||||
|
||||
// Checks for merge request description
|
||||
require("./mrDescriptionLongEnough.js")();
|
||||
require("./mrDescriptionReleaseNotes.js")();
|
||||
require("./mrDescriptionHasReleaseNotes.js")();
|
||||
await require("./mrDescriptionJiraLinks.js")();
|
||||
|
||||
// Checks for documentation
|
||||
await require("./mrDocsTranslation.js")();
|
||||
require("./mrDocsTranslation.js")();
|
||||
|
||||
// Checks for MR commits
|
||||
require("./mrCommitsTooManyCommits.js")();
|
||||
await require("./mrCommitsCommitMessage.js")();
|
||||
require("./mrCommitsCommitMessage.js")();
|
||||
require("./mrCommitsEmail.js")();
|
||||
|
||||
// Checks for MR code
|
||||
@@ -26,19 +25,13 @@ async function runChecks() {
|
||||
// Checks for MR area labels
|
||||
await require("./mrAreaLabels.js")();
|
||||
|
||||
// Checks for Source branch name
|
||||
require("./mrSourceBranchName.js")();
|
||||
|
||||
// Show DangerJS individual checks statuses - visible in CI job tracelog
|
||||
displayAllOutputStatuses();
|
||||
|
||||
// Add success log if no issues
|
||||
if (
|
||||
results.fails.length === 0 &&
|
||||
results.warnings.length === 0 &&
|
||||
results.messages.length === 0
|
||||
) {
|
||||
return message("🎉 Good Job! All checks are passing!");
|
||||
return message("Good Job! All checks are passing!");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,12 +1,9 @@
|
||||
const { recordRuleExitStatus } = require("./configParameters.js");
|
||||
|
||||
/**
|
||||
* Check if MR has area labels (light blue labels)
|
||||
*
|
||||
* @dangerjs WARN
|
||||
*/
|
||||
module.exports = async function () {
|
||||
const ruleName = "Merge request area labels";
|
||||
const projectId = 103; // ESP-IDF
|
||||
const areaLabelColor = /^#d2ebfa$/i; // match color code (case-insensitive)
|
||||
const projectLabels = await danger.gitlab.api.Labels.all(projectId); // Get all project labels
|
||||
@@ -16,12 +13,8 @@ module.exports = async function () {
|
||||
const mrLabels = danger.gitlab.mr.labels; // Get MR labels
|
||||
|
||||
if (!mrLabels.some((label) => areaLabels.includes(label))) {
|
||||
recordRuleExitStatus(ruleName, "Failed");
|
||||
return warn(
|
||||
warn(
|
||||
`Please add some [area labels](${process.env.DANGER_GITLAB_HOST}/espressif/esp-idf/-/labels) to this MR.`
|
||||
);
|
||||
}
|
||||
|
||||
// At this point, the rule has passed
|
||||
recordRuleExitStatus(ruleName, "Passed");
|
||||
};
|
||||
|
||||
@@ -1,165 +1,70 @@
|
||||
const {
|
||||
minimumSummaryChars,
|
||||
maximumSummaryChars,
|
||||
maximumBodyLineChars,
|
||||
allowedTypes,
|
||||
} = require("./mrCommitsConstants.js");
|
||||
const { recordRuleExitStatus } = require("./configParameters.js");
|
||||
|
||||
/**
|
||||
* Check that commit messages are based on the Espressif ESP-IDF project's rules for git commit messages.
|
||||
* Check if commit messages are sufficiently descriptive (not too short).
|
||||
*
|
||||
* Search for commit messages that appear to be automatically generated by Gitlab or temporary messages and report them.
|
||||
*
|
||||
* @dangerjs WARN
|
||||
*/
|
||||
module.exports = async function () {
|
||||
const ruleName = "Commit messages style";
|
||||
|
||||
module.exports = function () {
|
||||
const mrCommits = danger.gitlab.commits;
|
||||
const lint = require("@commitlint/lint").default;
|
||||
|
||||
const lintingRules = {
|
||||
// rule definition: [(0-1 = off/on), (always/never = must be/mustn't be), (value)]
|
||||
"body-max-line-length": [1, "always", maximumBodyLineChars], // Max length of the body line
|
||||
"footer-leading-blank": [1, "always"], // Always have a blank line before the footer section
|
||||
"footer-max-line-length": [1, "always", maximumBodyLineChars], // Max length of the footer line
|
||||
"subject-max-length": [1, "always", maximumSummaryChars], // Max length of the "Summary"
|
||||
"subject-min-length": [1, "always", minimumSummaryChars], // Min length of the "Summary"
|
||||
"scope-case": [1, "always", "lower-case"], // "scope/component" must start with lower-case
|
||||
"subject-full-stop": [1, "never", "."], // "Summary" must not end with a full stop (period)
|
||||
"subject-empty": [1, "never"], // "Summary" is mandatory
|
||||
"type-case": [1, "always", "lower-case"], // "type/action" must start with lower-case
|
||||
"type-empty": [1, "never"], // "type/action" is mandatory
|
||||
"type-enum": [1, "always", allowedTypes], // "type/action" must be one of the allowed types
|
||||
"body-leading-blank": [1, "always"], // Always have a blank line before the body section
|
||||
};
|
||||
|
||||
// Switcher for AI suggestions (for poor messages)
|
||||
let generateAISuggestion = false;
|
||||
const detectRegexes = [
|
||||
/^Rebased.*onto.*/i, // Automatically generated message by Gitlab
|
||||
/^Fast-forwarded.*to.*/i, // Automatically generated message by Gitlab
|
||||
/^Applied suggestion to.*/i, // Automatically generated message by Gitlab
|
||||
/^Suggestion applied for line.*/i, // Automatically generated message by Gitlab
|
||||
/^Opened merge request/i, // Automatically generated message by Gitlab
|
||||
/^Merged.*/i, // Automatically generated message by Gitlab
|
||||
/^Opened issue #\d+:.*/i, // Automatically generated message by Gitlab
|
||||
/^Closed issue #\d+:.*/i, // Automatically generated message by Gitlab
|
||||
/^Tagged.*as.*/i, // Automatically generated message by Gitlab
|
||||
/^Deleted tag.*/i, // Automatically generated message by Gitlab
|
||||
/^WIP.*/i, // Message starts with prefix "WIP"
|
||||
/^Cleaned.*/i, // Message starts "Cleaned"
|
||||
/clean ?up/i, // Message contains "clean up"
|
||||
/^[^A-Za-z0-9\s].*/, // Message starts with special characters
|
||||
];
|
||||
|
||||
// Search for the messages in each commit
|
||||
let issuesAllCommitMessages = [];
|
||||
|
||||
let partMessages = [];
|
||||
for (const commit of mrCommits) {
|
||||
const commitMessage = commit.message;
|
||||
const commitMessageTitle = commit.title;
|
||||
|
||||
let issuesSingleCommitMessage = [];
|
||||
let reportSingleCommitMessage = "";
|
||||
|
||||
// Check if the commit message contains any Jira ticket references
|
||||
const jiraTicketRegex = /[A-Z0-9]+-[0-9]+/g;
|
||||
const jiraTicketMatches = commitMessage.match(jiraTicketRegex);
|
||||
if (jiraTicketMatches) {
|
||||
const jiraTicketNames = jiraTicketMatches.join(", ");
|
||||
issuesSingleCommitMessage.push(
|
||||
`- probably contains Jira ticket reference (\`${jiraTicketNames}\`). Please remove Jira tickets from commit messages.`
|
||||
partMessages.push(
|
||||
`- the commit message \`${commitMessageTitle}\` probably contains Jira ticket reference (\`${jiraTicketNames}\`). Please remove Jira tickets from commit messages.`
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if the commit message matches any regex from "detectRegexes"
|
||||
if (detectRegexes.some((regex) => commitMessage.match(regex))) {
|
||||
partMessages.push(
|
||||
`- the commit message \`${commitMessageTitle}\` appears to be a temporary or automatically generated message`
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if the commit message is not too short
|
||||
const shortCommitMessageThreshold = 20; // commit message is considered too short below this number of characters
|
||||
if (commitMessage.length < shortCommitMessageThreshold) {
|
||||
partMessages.push(
|
||||
`- the commit message \`${commitMessageTitle}\` may not be sufficiently descriptive`
|
||||
);
|
||||
}
|
||||
|
||||
// Lint commit messages with @commitlint (Conventional Commits style)
|
||||
const result = await lint(commit.message, lintingRules);
|
||||
|
||||
for (const warning of result.warnings) {
|
||||
// Custom messages for each rule with terminology used by Espressif conventional commits guide
|
||||
switch (warning.name) {
|
||||
case "subject-max-length":
|
||||
issuesSingleCommitMessage.push(
|
||||
`- *summary* appears to be too long`
|
||||
);
|
||||
break;
|
||||
case "type-empty":
|
||||
issuesSingleCommitMessage.push(
|
||||
`- *type/action* looks empty`
|
||||
);
|
||||
break;
|
||||
case "type-case":
|
||||
issuesSingleCommitMessage.push(
|
||||
`- *type/action* should start with a lowercase letter`
|
||||
);
|
||||
|
||||
break;
|
||||
case "scope-empty":
|
||||
issuesSingleCommitMessage.push(
|
||||
`- *scope/component* looks empty`
|
||||
);
|
||||
break;
|
||||
case "scope-case":
|
||||
issuesSingleCommitMessage.push(
|
||||
`- *scope/component* should be lowercase without whitespace, allowed special characters are \`_\` \`/\` \`.\` \`,\` \`*\` \`-\` \`.\``
|
||||
);
|
||||
break;
|
||||
case "subject-empty":
|
||||
issuesSingleCommitMessage.push(`- *summary* looks empty`);
|
||||
generateAISuggestion = true;
|
||||
break;
|
||||
case "subject-min-length":
|
||||
issuesSingleCommitMessage.push(
|
||||
`- *summary* looks too short`
|
||||
);
|
||||
generateAISuggestion = true;
|
||||
break;
|
||||
case "subject-case":
|
||||
issuesSingleCommitMessage.push(
|
||||
`- *summary* should start with a capital letter`
|
||||
);
|
||||
break;
|
||||
case "subject-full-stop":
|
||||
issuesSingleCommitMessage.push(
|
||||
`- *summary* should not end with a period (full stop)`
|
||||
);
|
||||
break;
|
||||
case "type-enum":
|
||||
issuesSingleCommitMessage.push(
|
||||
`- *type/action* should be one of [${allowedTypes
|
||||
.map((type) => `\`${type}\``)
|
||||
.join(", ")}]`
|
||||
);
|
||||
break;
|
||||
|
||||
default:
|
||||
issuesSingleCommitMessage.push(`- ${warning.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (issuesSingleCommitMessage.length) {
|
||||
reportSingleCommitMessage = `- the commit message \`"${commitMessageTitle}"\`:\n${issuesSingleCommitMessage
|
||||
.map((message) => ` ${message}`) // Indent each issue by 2 spaces
|
||||
.join("\n")}`;
|
||||
issuesAllCommitMessages.push(reportSingleCommitMessage);
|
||||
}
|
||||
}
|
||||
|
||||
// Create report
|
||||
if (issuesAllCommitMessages.length) {
|
||||
issuesAllCommitMessages.sort();
|
||||
const basicTips = [
|
||||
`- correct format of commit message should be: \`<type/action>(<scope/component>): <summary>\`, for example \`fix(esp32): Fixed startup timeout issue\``,
|
||||
`- allowed types are: \`${allowedTypes}\``,
|
||||
`- sufficiently descriptive message summary should be between ${minimumSummaryChars} to ${maximumSummaryChars} characters and start with upper case letter`,
|
||||
`- avoid Jira references in commit messages (unavailable/irrelevant for our customers)`,
|
||||
`- follow this [commit messages guide](${process.env.DANGER_GITLAB_HOST}/espressif/esp-idf/-/wikis/dev-proc/Commit-messages)`,
|
||||
];
|
||||
let dangerMessage = `\n**Some issues found for the commit messages in this MR:**\n${issuesAllCommitMessages.join(
|
||||
"\n"
|
||||
)}
|
||||
\n***
|
||||
\n**Please consider updating these commit messages** - here are some basic tips:\n${basicTips.join(
|
||||
"\n"
|
||||
)}
|
||||
\n \`TIP:\` You can install commit-msg pre-commit hook (\`pre-commit install -t pre-commit -t commit-msg\`) to run this check when committing.
|
||||
\n***
|
||||
`;
|
||||
|
||||
if (generateAISuggestion) {
|
||||
// Create AI generated suggestion for git commit message based of gitDiff and current commit messages
|
||||
const AImessageSuggestion =
|
||||
await require("./aiGenerateGitMessage.js")();
|
||||
dangerMessage += AImessageSuggestion;
|
||||
}
|
||||
|
||||
recordRuleExitStatus(ruleName, "Failed");
|
||||
return warn(dangerMessage);
|
||||
}
|
||||
|
||||
// At this point, the rule has passed
|
||||
recordRuleExitStatus(ruleName, "Passed");
|
||||
// Create report
|
||||
if (partMessages.length) {
|
||||
partMessages.sort();
|
||||
let dangerMessage = `\nSome issues found for the commit messages in this MR:\n${partMessages.join('\n')}
|
||||
\nPlease consider updating these commit messages. It is recommended to follow this [commit messages guide](https://gitlab.espressif.cn:6688/espressif/esp-idf/-/wikis/dev-proc/Commit-messages)`;
|
||||
warn(dangerMessage);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
module.exports = {
|
||||
gptStandardModelTokens: 4096,
|
||||
minimumSummaryChars: 20,
|
||||
maximumSummaryChars: 72,
|
||||
maximumBodyLineChars: 100,
|
||||
allowedTypes: [
|
||||
"change",
|
||||
"ci",
|
||||
"docs",
|
||||
"feat",
|
||||
"fix",
|
||||
"refactor",
|
||||
"remove",
|
||||
"revert",
|
||||
],
|
||||
};
|
||||
@@ -1,23 +1,16 @@
|
||||
const { recordRuleExitStatus } = require("./configParameters.js");
|
||||
|
||||
/**
|
||||
* Check if the author is accidentally making a commit using a personal email
|
||||
*
|
||||
* @dangerjs INFO
|
||||
*/
|
||||
module.exports = function () {
|
||||
const ruleName = 'Commits from outside Espressif';
|
||||
const mrCommitAuthorEmails = danger.gitlab.commits.map(commit => commit.author_email);
|
||||
const mrCommitCommitterEmails = danger.gitlab.commits.map(commit => commit.committer_email);
|
||||
const emailPattern = /.*@espressif\.com/;
|
||||
const filteredEmails = [...mrCommitAuthorEmails, ...mrCommitCommitterEmails].filter((email) => !emailPattern.test(email));
|
||||
if (filteredEmails.length) {
|
||||
recordRuleExitStatus(ruleName, "Failed");
|
||||
return message(
|
||||
`Some of the commits were authored or committed by developers outside Espressif: ${filteredEmails.join(', ')}. Please check if this is expected.`
|
||||
);
|
||||
}
|
||||
|
||||
// At this point, the rule has passed
|
||||
recordRuleExitStatus(ruleName, 'Passed');
|
||||
};
|
||||
|
||||
@@ -1,22 +1,15 @@
|
||||
const { recordRuleExitStatus } = require("./configParameters.js");
|
||||
|
||||
/**
|
||||
* Check if MR has not an excessive numbers of commits (if squashed)
|
||||
*
|
||||
* @dangerjs INFO
|
||||
*/
|
||||
module.exports = function () {
|
||||
const ruleName = 'Number of commits in merge request';
|
||||
const tooManyCommitThreshold = 2; // above this number of commits, squash commits is suggested
|
||||
const mrCommits = danger.gitlab.commits;
|
||||
|
||||
if (mrCommits.length > tooManyCommitThreshold) {
|
||||
recordRuleExitStatus(ruleName, "Passed (with suggestions)");
|
||||
return message(
|
||||
`You might consider squashing your ${mrCommits.length} commits (simplifying branch history).`
|
||||
);
|
||||
}
|
||||
|
||||
// At this point, the rule has passed
|
||||
recordRuleExitStatus(ruleName, 'Passed');
|
||||
};
|
||||
|
||||
28
.gitlab/dangerjs/mrDescriptionHasReleaseNotes.js
Normal file
28
.gitlab/dangerjs/mrDescriptionHasReleaseNotes.js
Normal file
@@ -0,0 +1,28 @@
|
||||
/**
|
||||
* Check if MR Description contains mandatory section "Release notes"
|
||||
*
|
||||
* Extracts the content of the "Release notes" section from the GitLab merge request description.
|
||||
*
|
||||
* @dangerjs WARN (if section missing, is empty or wrong markdown format)
|
||||
*/
|
||||
module.exports = function () {
|
||||
const mrDescription = danger.gitlab.mr.description;
|
||||
const regexSectionReleaseNotes = /## Release notes([\s\S]*?)(?=## |$)/;
|
||||
const sectionReleaseNotes = mrDescription.match(regexSectionReleaseNotes);
|
||||
|
||||
if (!sectionReleaseNotes) {
|
||||
warn(
|
||||
'The `Release Notes` section seems to be missing. Please check if the section header in MR description is present and in the correct markdown format ("## Release Notes")\n'
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
let content = sectionReleaseNotes[1].replace(/(\r\n|\n|\r)/gm, "").trim(); // Remove empty lines and whitespace
|
||||
|
||||
if (!content.length) {
|
||||
warn(
|
||||
"The `Release Notes` section seems to be empty (no section content)\n"
|
||||
);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
@@ -1,5 +1,3 @@
|
||||
const { recordRuleExitStatus } = require("./configParameters.js");
|
||||
|
||||
/** Check that there are valid JIRA links in MR description.
|
||||
*
|
||||
* This check extracts the "Related" section from the MR description and
|
||||
@@ -12,7 +10,6 @@ const { recordRuleExitStatus } = require("./configParameters.js");
|
||||
*
|
||||
*/
|
||||
module.exports = async function () {
|
||||
const ruleName = 'Jira ticket references';
|
||||
const axios = require("axios");
|
||||
const mrDescription = danger.gitlab.mr.description;
|
||||
const mrCommitMessages = danger.gitlab.commits.map(
|
||||
@@ -29,7 +26,6 @@ module.exports = async function () {
|
||||
!sectionRelated.header || // No section Related in MR description or ...
|
||||
!jiraTicketRegex.test(sectionRelated.content) // no Jira links in section Related
|
||||
) {
|
||||
recordRuleExitStatus(ruleName, 'Passed (with suggestions)');
|
||||
return message(
|
||||
"Please consider adding references to JIRA issues in the `Related` section of the MR description."
|
||||
);
|
||||
@@ -92,9 +88,6 @@ module.exports = async function () {
|
||||
createReport();
|
||||
}
|
||||
|
||||
// At this point, the rule has passed
|
||||
recordRuleExitStatus(ruleName, 'Passed');
|
||||
|
||||
// ---------------------------------------------------------------
|
||||
|
||||
/**
|
||||
@@ -232,7 +225,6 @@ module.exports = async function () {
|
||||
let dangerMessage = `Some issues found for the related JIRA tickets in this MR:\n${partMessages.join(
|
||||
"\n"
|
||||
)}`;
|
||||
recordRuleExitStatus(ruleName, "Failed");
|
||||
return warn(dangerMessage);
|
||||
warn(dangerMessage);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1,24 +1,17 @@
|
||||
const { recordRuleExitStatus } = require("./configParameters.js");
|
||||
|
||||
/**
|
||||
* Check if MR Description has accurate description".
|
||||
*
|
||||
* @dangerjs WARN
|
||||
*/
|
||||
module.exports = function () {
|
||||
const ruleName = "Merge request sufficient description";
|
||||
const mrDescription = danger.gitlab.mr.description;
|
||||
const descriptionChunk = mrDescription.match(/^([^#]*)/)[1].trim(); // Extract all text before the first section header (i.e., the text before the "## Release notes")
|
||||
|
||||
const shortMrDescriptionThreshold = 50; // Description is considered too short below this number of characters
|
||||
|
||||
if (descriptionChunk.length < shortMrDescriptionThreshold) {
|
||||
recordRuleExitStatus(ruleName, "Failed");
|
||||
return warn(
|
||||
"The MR description looks very brief, please check if more details can be added."
|
||||
);
|
||||
}
|
||||
|
||||
// At this point, the rule has passed
|
||||
recordRuleExitStatus(ruleName, "Passed");
|
||||
};
|
||||
|
||||
@@ -1,103 +0,0 @@
|
||||
const { recordRuleExitStatus } = require("./configParameters.js");
|
||||
|
||||
/**
|
||||
* Check if MR Description contains mandatory section "Release notes"
|
||||
*
|
||||
* Extracts the content of the "Release notes" section from the GitLab merge request description.
|
||||
*
|
||||
* @dangerjs WARN (if section missing, is empty or wrong markdown format)
|
||||
*/
|
||||
module.exports = function () {
|
||||
const ruleName = 'Merge request Release Notes section';
|
||||
const mrDescription = danger.gitlab.mr.description;
|
||||
const wiki_link = `${process.env.DANGER_GITLAB_HOST}/espressif/esp-idf/-/wikis/rfc/How-to-write-release-notes-properly`;
|
||||
|
||||
const regexSectionReleaseNotes = /## Release notes([\s\S]*?)(?=## |$)/;
|
||||
const regexValidEntry = /^\s*[-*+]\s+.+/;
|
||||
const regexNoReleaseNotes = /no release note/i;
|
||||
|
||||
const sectionReleaseNotes = mrDescription.match(regexSectionReleaseNotes);
|
||||
if (!sectionReleaseNotes) {
|
||||
recordRuleExitStatus(ruleName, "Failed");
|
||||
return warn(`The \`Release Notes\` section seems to be missing. Please check if the section header in MR description is present and in the correct markdown format ("## Release Notes").\n\nSee [Release Notes Format Rules](${wiki_link}).`);
|
||||
}
|
||||
|
||||
const releaseNotesLines = sectionReleaseNotes[1].replace(/<!--[\s\S]*?-->/g, '')
|
||||
|
||||
const lines = releaseNotesLines.split("\n").filter(s => s.trim().length > 0);
|
||||
let valid_entries_found = 0;
|
||||
let no_release_notes_found = false;
|
||||
let violations = [];
|
||||
|
||||
lines.forEach((line) => {
|
||||
if (line.match(regexValidEntry)) {
|
||||
valid_entries_found++;
|
||||
const error_msg = check_entry(line);
|
||||
if (error_msg) {
|
||||
violations.push(error_msg);
|
||||
}
|
||||
} else if (line.match(regexNoReleaseNotes)) {
|
||||
no_release_notes_found = true;
|
||||
}
|
||||
});
|
||||
|
||||
let error_output = [];
|
||||
if (violations.length > 0) {
|
||||
error_output = [...error_output, 'Invalid release note entries:', violations.join('\n')];
|
||||
}
|
||||
if (no_release_notes_found) {
|
||||
if (valid_entries_found > 0) {
|
||||
error_output.push('`No release notes` comment shows up when there is valid entry. Remove bullets before comments in release notes section.');
|
||||
}
|
||||
} else {
|
||||
if (!valid_entries_found) {
|
||||
error_output.push('The `Release Notes` section seems to have no valid entries. Add bullets before valid entries, or add `No release notes` comment to suppress this error if you mean to have no release notes.');
|
||||
}
|
||||
}
|
||||
|
||||
if (error_output.length > 0) {
|
||||
// Paragraphs joined by double `\n`s.
|
||||
error_output = [...error_output, `See [Release Notes Format Guide](${wiki_link}).`].join('\n\n');
|
||||
recordRuleExitStatus(ruleName, "Failed");
|
||||
return warn(error_output);
|
||||
}
|
||||
|
||||
// At this point, the rule has passed
|
||||
recordRuleExitStatus(ruleName, 'Passed');
|
||||
};
|
||||
|
||||
function check_entry(entry) {
|
||||
const entry_str = `- \`${entry}\``;
|
||||
const indent = " ";
|
||||
|
||||
if (entry.match(/no\s+release\s+note/i)) {
|
||||
return [entry_str, `${indent}- \`No release notes\` comment shouldn't start with bullet.`].join('\n');
|
||||
}
|
||||
|
||||
// Remove a leading escaping backslash of the special characters, https://www.markdownguide.org/basic-syntax/#characters-you-can-escape
|
||||
const escapeCharRegex = /\\([\\`*_{}[\]<>()+#-.!|])/g;
|
||||
entry = entry.replace(escapeCharRegex, '$1');
|
||||
|
||||
const regex = /^(\s*)[-*+]\s+\[([^\]]+)\]\s+(.*)$/;
|
||||
const match = regex.exec(entry);
|
||||
if (!match) {
|
||||
return [entry_str, `${indent}- Please specify the [area] to which the change belongs (see guide). If this line is just a comment, remove the bullet.`].join('\n');
|
||||
}
|
||||
|
||||
// area is in match[2]
|
||||
const description = match[3].trim();
|
||||
let violations = [];
|
||||
|
||||
if (match[1]) {
|
||||
violations.push(`${indent}- Release note entry should start from the beginning of line. (Nested release note not allowed.)`);
|
||||
}
|
||||
|
||||
if (!/^[A-Z0-9]/.test(description)) {
|
||||
violations.push(`${indent}- Release note statement should start with a capital letter or digit.`);
|
||||
}
|
||||
|
||||
if (violations.length > 0) {
|
||||
return [entry_str, ...violations].join('\n');
|
||||
}
|
||||
return null;
|
||||
}
|
||||
@@ -1,280 +1,33 @@
|
||||
const { recordRuleExitStatus } = require("./configParameters.js");
|
||||
|
||||
/**
|
||||
* Check the documentation files in this MR.
|
||||
* Check if documentation needs translation labels
|
||||
*
|
||||
* Generate an object with all docs/ files found in this MR with paths to their EN/CN versions.
|
||||
*
|
||||
* For common files (both language versions exist in this MR), compare the lines of both files.
|
||||
* Ignore if the CN file is only a single line file with an "include" reference to the EN version.
|
||||
*
|
||||
* For files that only have a CN version in this MR, add a message to the message that an EN file also needs to be created.
|
||||
*
|
||||
* For a file that only has an EN version in this MR, try loading its CN version from the target Gitlab branch.
|
||||
* If its CN version doesn't exist in the repository or it does exist,
|
||||
* but its contents are larger than just an "include" link to the EN version (it's a full-size file),
|
||||
* add a message to the report
|
||||
*
|
||||
* Create a compiled report with the docs/ files issues found and set its severity (WARN/INFO).
|
||||
* Severity is based on the presence of "needs translation: ??" labels in this MR
|
||||
*
|
||||
* @dangerjs WARN (if docs translation issues in the MR)
|
||||
* @dangerjs INFO (if docs translation issues in the MR and the user has already added translation labels).
|
||||
* Adding translation labels "needs translation: XX" automatically notifies the Documentation team
|
||||
*
|
||||
* @dangerjs WARN (if there are no docs issues in MR, but translation labels have been added anyway)
|
||||
* #TODO: this simple logic will be improved in future MRs - Jira IDF-6855.
|
||||
*
|
||||
* @dangerjs WARN
|
||||
*/
|
||||
module.exports = async function () {
|
||||
const ruleName = 'Documentation translation';
|
||||
let partMessages = []; // Create a blank field for future records of individual issues
|
||||
const pathProject = "espressif/esp-idf";
|
||||
const regexIncludeLink = /\.\.\sinclude::\s((\.\.\/)+)en\//;
|
||||
const allMrFiles = [
|
||||
...danger.git.modified_files,
|
||||
...danger.git.created_files,
|
||||
...danger.git.deleted_files,
|
||||
];
|
||||
module.exports = function () {
|
||||
const mrLabels = danger.gitlab.mr.labels;
|
||||
const changesInDocsEN = /docs\/en/.test(
|
||||
danger.git.modified_files ? danger.git.modified_files[0] : ""
|
||||
); // Test if changes in directory "docs/EN"
|
||||
const changesInDocsCH = /docs\/zh_CN/.test(
|
||||
danger.git.modified_files ? danger.git.modified_files[0] : ""
|
||||
); // Test if changes in directory "docs/CH"
|
||||
|
||||
const docsFilesMR = parseMrDocsFiles(allMrFiles); // Create single object of all doc files in MR with names, paths and groups
|
||||
|
||||
// Both versions (EN and CN) of document found changed in this MR
|
||||
for (const file of docsFilesMR.bothFilesInMr) {
|
||||
file.contentEn = await getContentFileInMR(file.fileEnPath); // Get content of English file
|
||||
file.linesEn = file.contentEn.split("\n").length; // Get number of lines of English file
|
||||
|
||||
file.contentCn = await getContentFileInMR(file.fileCnPath); // Get content of Chinese file
|
||||
file.linesCn = file.contentCn.split("\n").length; // Get number of lines of English file
|
||||
|
||||
// Compare number of lines in both versions
|
||||
if (file.linesEn !== file.linesCn) {
|
||||
// Check if CN file is only link to EN file
|
||||
if (!regexIncludeLink.test(file.contentCn)) {
|
||||
// if not just a link ...
|
||||
partMessages.push(
|
||||
`- please synchronize the EN and CN version of \`${file.fileName}\`. [\`${file.fileEnPath}\`](${file.fileUrlRepoEN}) has ${file.linesEn} lines; [\`${file.fileCnPath}\`](${file.fileUrlRepoCN}) has ${file.linesCn} lines.`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Only Chinese version of document found changed in this MR
|
||||
for (const file of docsFilesMR.onlyCnFilesInMr) {
|
||||
partMessages.push(
|
||||
`- file \`${file.fileEnPath}\` doesn't exist in this MR or in the GitLab repo. Please add \`${file.fileEnPath}\` into this MR.`
|
||||
);
|
||||
}
|
||||
|
||||
// Only English version of document found in this MR
|
||||
for (const file of docsFilesMR.onlyEnFilesInMr) {
|
||||
const targetBranch = danger.gitlab.mr.target_branch;
|
||||
file.contentCn = await getContentFileInGitlab(
|
||||
file.fileCnPath,
|
||||
targetBranch
|
||||
); // Try to fetch CN file from target branch of Gitlab repository and store content
|
||||
|
||||
if (file.contentCn) {
|
||||
// File found on target branch in Gitlab repository
|
||||
if (!regexIncludeLink.test(file.contentCn)) {
|
||||
// File on Gitlab master is NOT just an ..include:: link to ENG version
|
||||
file.fileUrlRepoMasterCN = `${process.env.DANGER_GITLAB_HOST}/${pathProject}/-/blob/${targetBranch}/${file.fileCnPath}`;
|
||||
partMessages.push(
|
||||
`- file \`${file.fileCnPath}\` was not updated in this MR, but found unchanged full document (not just link to EN) in target branch of Gitlab repository [\`${file.fileCnPath}\`](${file.fileUrlRepoMasterCN}). Please update \`${file.fileCnPath}\` into this MR.`
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// File failed to fetch, probably does not exist in the target branch
|
||||
partMessages.push(
|
||||
`- file \`${file.fileCnPath}\` probably doesn't exist in this MR or in the GitLab repo. Please add \`${file.fileCnPath}\` into this MR.`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Create a report with found issues with documents in MR
|
||||
createReport();
|
||||
|
||||
// At this point, the rule has passed
|
||||
recordRuleExitStatus(ruleName, 'Passed');
|
||||
|
||||
/**
|
||||
* Generates an object that represents the relationships between files in two different languages found in this MR.
|
||||
*
|
||||
* @param {string[]} docsFilesEN - An array of file paths for documents in English.
|
||||
* @param {string[]} docsFilesCN - An array of file paths for documents in Chinese.
|
||||
* @returns {Object} An object with the following properties:
|
||||
* - bothFilesInMr: An array of objects that represent files that found in MR in both languages. Each object has the following properties:
|
||||
* - fileName: The name of the file.
|
||||
* - fileEnPath: The path to the file in English.
|
||||
* - fileCnPath: The path to the file in Chinese.
|
||||
* - fileUrlRepoEN: The URL link to MR branch path to the file in English.
|
||||
* - fileUrlRepoCN: The URL link to MR branch path to the file in Chinese.
|
||||
* - onlyCnFilesInMr: An array of objects that represent files that only found in MR in English. Each object has the following properties:
|
||||
* - fileName: The name of the file.
|
||||
* - fileEnPath: The path to the file in English.
|
||||
* - fileCnPath: The FUTURE path to the file in Chinese.
|
||||
* - fileUrlRepoEN: The URL link to MR branch path to the file in English.
|
||||
* - fileUrlRepoCN: The URL link to MR branch path to the file in Chinese.
|
||||
* - onlyEnFilesInMr: An array of objects that represent files that only found in MR in Chinese. Each object has the following properties:
|
||||
* - fileName: The name of the file.
|
||||
* - fileEnPath: The FUTURE path to the file in English.
|
||||
* - fileCnPath: The path to the file in Chinese.
|
||||
* - fileUrlRepoEN: The URL link to MR branch path to the file in English.
|
||||
* - fileUrlRepoCN: The URL link to MR branch path to the file in Chinese.
|
||||
*/
|
||||
function parseMrDocsFiles(allMrFiles) {
|
||||
const path = require("path");
|
||||
const mrBranch = danger.gitlab.mr.source_branch;
|
||||
|
||||
const docsEnFilesMrPath = allMrFiles.filter((file) =>
|
||||
file.startsWith("docs/en")
|
||||
); // Filter all English doc files in MR
|
||||
const docsCnFilesMrPath = allMrFiles.filter((file) =>
|
||||
file.startsWith("docs/zh_CN")
|
||||
); // Filter all Chinese doc files in MR
|
||||
|
||||
const docsEnFileNames = docsEnFilesMrPath.map((filePath) =>
|
||||
path.basename(filePath)
|
||||
); // Get (base) file names for English docs
|
||||
const docsCnFileNames = docsCnFilesMrPath.map((filePath) =>
|
||||
path.basename(filePath)
|
||||
); // Get (base) file names for Chinese docs
|
||||
|
||||
const bothFileNames = docsEnFileNames.filter((fileName) =>
|
||||
docsCnFileNames.includes(fileName)
|
||||
); // Get file names that are common to both English and Chinese docs
|
||||
const onlyEnFileNames = docsEnFileNames.filter(
|
||||
(fileName) => !docsCnFileNames.includes(fileName)
|
||||
); // Get file names that are only present in English version
|
||||
const onlyCnFileNames = docsCnFileNames.filter(
|
||||
(fileName) => !docsEnFileNames.includes(fileName)
|
||||
); // Get file names that are only present in Chinese version
|
||||
|
||||
return {
|
||||
bothFilesInMr: bothFileNames.map((fileName) => {
|
||||
const fileEnPath =
|
||||
docsEnFilesMrPath[docsEnFileNames.indexOf(fileName)];
|
||||
const fileCnPath =
|
||||
docsCnFilesMrPath[docsCnFileNames.indexOf(fileName)];
|
||||
|
||||
return {
|
||||
fileName,
|
||||
fileEnPath,
|
||||
fileCnPath,
|
||||
fileUrlRepoEN: `${process.env.DANGER_GITLAB_HOST}/${pathProject}/-/blob/${mrBranch}/${fileEnPath}`,
|
||||
fileUrlRepoCN: `${process.env.DANGER_GITLAB_HOST}/${pathProject}/-/blob/${mrBranch}/${fileCnPath}`,
|
||||
};
|
||||
}),
|
||||
onlyEnFilesInMr: onlyEnFileNames.map((fileName) => {
|
||||
const fileEnPath =
|
||||
docsEnFilesMrPath[docsEnFileNames.indexOf(fileName)];
|
||||
const fileCnPath = fileEnPath.replace("en", "zh_CN"); // Generate future CN file path
|
||||
|
||||
return {
|
||||
fileName,
|
||||
fileEnPath,
|
||||
fileCnPath,
|
||||
fileUrlRepoEN: `${process.env.DANGER_GITLAB_HOST}/${pathProject}/-/blob/${mrBranch}/${fileEnPath}`,
|
||||
fileUrlRepoCN: `${process.env.DANGER_GITLAB_HOST}/${pathProject}/-/blob/${mrBranch}/${fileCnPath}`,
|
||||
};
|
||||
}),
|
||||
onlyCnFilesInMr: onlyCnFileNames.map((fileName) => {
|
||||
const fileCnPath =
|
||||
docsCnFilesMrPath[docsCnFileNames.indexOf(fileName)];
|
||||
const fileEnPath = fileCnPath.replace("zh_CN", "en"); // Generate future EN file path
|
||||
|
||||
return {
|
||||
fileName,
|
||||
fileEnPath,
|
||||
fileCnPath,
|
||||
fileUrlRepoEN: `${process.env.DANGER_GITLAB_HOST}/${pathProject}/-/blob/${mrBranch}/${fileEnPath}`,
|
||||
fileUrlRepoCN: `${process.env.DANGER_GITLAB_HOST}/${pathProject}/-/blob/${mrBranch}/${fileCnPath}`,
|
||||
};
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the contents of a file from GitLab using the GitLab API.
|
||||
*
|
||||
* @param {string} filePath - The path of the file to retrieve.
|
||||
* @param {string} branch - The branch where the file is located.
|
||||
* @returns {string|null} - The contents of the file, with any trailing new lines trimmed, or null if the file cannot be retrieved.
|
||||
*/
|
||||
async function getContentFileInGitlab(filePath, branch) {
|
||||
const axios = require("axios");
|
||||
|
||||
const encFilePath = encodeURIComponent(filePath);
|
||||
const encBranch = encodeURIComponent(branch);
|
||||
const urlApi = `${process.env.DANGER_GITLAB_API_BASE_URL}/projects/${danger.gitlab.mr.project_id}/repository/files/${encFilePath}/raw?ref=${encBranch}`;
|
||||
|
||||
try {
|
||||
const response = await axios.get(urlApi, {
|
||||
headers: {
|
||||
"Private-Token": process.env.DANGER_GITLAB_API_TOKEN,
|
||||
},
|
||||
});
|
||||
return response.data.trim(); // Trim trailing new line
|
||||
} catch (error) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the contents of a file in a DangerJS merge request object.
|
||||
*
|
||||
* @param {string} filePath - The path of the file to retrieve.
|
||||
* @returns {string|null} - The contents of the file, with any trailing new lines trimmed, or null if the file cannot be retrieved.
|
||||
*/
|
||||
async function getContentFileInMR(filePath) {
|
||||
try {
|
||||
const content = await danger.git.diffForFile(filePath);
|
||||
const fileContentAfter = content.after.trim(); // Trim trailing new lines
|
||||
return fileContentAfter;
|
||||
} catch (error) {
|
||||
console.error(`Error while getting file content MR: ${error}`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a compiled report for found documentation issues in the current MR and alerts the Documentation team if there are any "needs translation" labels present.
|
||||
*
|
||||
* Report if documentation labels have been added by mistake.
|
||||
*/
|
||||
function createReport() {
|
||||
const mrLabels = danger.gitlab.mr.labels; // Get MR labels
|
||||
const regexTranslationLabel = /needs translation:/i;
|
||||
|
||||
const translationLabelsPresent = mrLabels.some((label) =>
|
||||
regexTranslationLabel.test(label)
|
||||
); // Check if any of MR labels are "needs translation: XX"
|
||||
|
||||
// No docs issues found in MR, but translation labels have been added anyway
|
||||
if (!partMessages.length && translationLabelsPresent) {
|
||||
recordRuleExitStatus(ruleName, "Failed");
|
||||
// Only English docs has been changed
|
||||
if (changesInDocsEN && !changesInDocsCH) {
|
||||
if (!mrLabels.includes("needs translation: CN")) {
|
||||
return warn(
|
||||
`Please remove the \`needs translation: XX\` labels. For documents that need to translate from scratch, Doc team will translate them in the future. For the current stage, we only focus on updating exiting EN and CN translation to make them in sync.`
|
||||
"The updated documentation will need to be translated into Chinese, please add the MR label `needs translation: CN`"
|
||||
);
|
||||
}
|
||||
|
||||
// Docs issues found in this MR
|
||||
partMessages.sort();
|
||||
let dangerMessage = `Some of the documentation files in this MR seem to have translations issues:\n${partMessages.join(
|
||||
"\n"
|
||||
)}\n`;
|
||||
|
||||
if (partMessages.length) {
|
||||
if (!translationLabelsPresent) {
|
||||
dangerMessage += `
|
||||
\nWhen synchronizing the EN and CN versions, please follow the [Documentation Code](https://docs.espressif.com/projects/esp-idf/zh_CN/latest/esp32/contribute/documenting-code.html#standardize-document-format). The total number of lines of EN and CN should be same.\n
|
||||
\nIf you have difficulty in providing translation, you can contact Documentation team by adding <kbd>needs translation: CN</kbd> or <kbd>needs translation: EN</kbd> labels into this MR and retrying Danger CI job. The documentation team will be automatically notified and will help you with the translations before the merge.\n`;
|
||||
recordRuleExitStatus(ruleName, "Failed");
|
||||
return warn(dangerMessage); // no "needs translation: XX" labels in MR; report issues as warn
|
||||
} else {
|
||||
dangerMessage += `\nTranslation labels <kbd>needs translation: CN</kbd> or <kbd>needs translation: EN</kbd> were added - this will automatically notify the Documentation team to help you with translation issues.`;
|
||||
recordRuleExitStatus(ruleName, 'Passed (with suggestions)');
|
||||
return message(dangerMessage); // "needs translation: XX" labels were found in MR and Docs team was notified; report issues as info
|
||||
}
|
||||
}
|
||||
// Only Chineese docs has been changed
|
||||
if (!changesInDocsEN && changesInDocsCH) {
|
||||
if (!mrLabels.includes("needs translation: EN")) {
|
||||
return warn(
|
||||
"The updated documentation will need to be translated into English, please add the MR label `needs translation: EN`"
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1,22 +1,15 @@
|
||||
const { recordRuleExitStatus } = require("./configParameters.js");
|
||||
|
||||
/**
|
||||
* Check if MR is too large (more than 1000 lines of changes)
|
||||
*
|
||||
* @dangerjs INFO
|
||||
*/
|
||||
module.exports = async function () {
|
||||
const ruleName = "Merge request size (number of changed lines)";
|
||||
const bigMrLinesOfCodeThreshold = 1000;
|
||||
const totalLines = await danger.git.linesOfCode();
|
||||
|
||||
if (totalLines > bigMrLinesOfCodeThreshold) {
|
||||
recordRuleExitStatus(ruleName, "Passed (with suggestions)");
|
||||
return message(
|
||||
`This MR seems to be quite large (total lines of code: ${totalLines}), you might consider splitting it into smaller MRs`
|
||||
);
|
||||
}
|
||||
|
||||
// At this point, the rule has passed
|
||||
recordRuleExitStatus(ruleName, "Passed");
|
||||
};
|
||||
|
||||
@@ -1,31 +0,0 @@
|
||||
const { recordRuleExitStatus } = require("./configParameters.js");
|
||||
|
||||
/**
|
||||
* Throw Danger WARN if branch name contains more than one slash or uppercase letters
|
||||
*
|
||||
* @dangerjs INFO
|
||||
*/
|
||||
module.exports = function () {
|
||||
const ruleName = "Source branch name";
|
||||
const sourceBranch = danger.gitlab.mr.source_branch;
|
||||
|
||||
// Check if the source branch name contains more than one slash
|
||||
const slashCount = (sourceBranch.match(/\//g) || []).length;
|
||||
if (slashCount > 1) {
|
||||
recordRuleExitStatus(ruleName, "Failed");
|
||||
return warn(
|
||||
`The source branch name \`${sourceBranch}\` contains more than one slash. This can cause troubles with git sync. Please rename the branch.`
|
||||
);
|
||||
}
|
||||
|
||||
// Check if the source branch name contains any uppercase letters
|
||||
if (sourceBranch !== sourceBranch.toLowerCase()) {
|
||||
recordRuleExitStatus(ruleName, "Failed");
|
||||
return warn(
|
||||
`The source branch name \`${sourceBranch}\` contains uppercase letters. This can cause troubles on case-insensitive file systems (macOS). Please use only lowercase letters.`
|
||||
);
|
||||
}
|
||||
|
||||
// At this point, the rule has passed
|
||||
recordRuleExitStatus(ruleName, "Passed");
|
||||
};
|
||||
@@ -1,31 +1,22 @@
|
||||
const { recordRuleExitStatus } = require("./configParameters.js");
|
||||
|
||||
/**
|
||||
* Check if MR Title contains prefix "WIP: ...".
|
||||
*
|
||||
* @dangerjs WARN
|
||||
*/
|
||||
module.exports = function () {
|
||||
const ruleName = 'Merge request not in Draft or WIP state';
|
||||
const mrTitle = danger.gitlab.mr.title;
|
||||
const mrTitle = danger.gitlab.mr.title;
|
||||
const regexes = [
|
||||
{ prefix: "WIP", regex: /^WIP:/i },
|
||||
{ prefix: "W.I.P", regex: /^W\.I\.P/i },
|
||||
{ prefix: "[WIP]", regex: /^\[WIP/i },
|
||||
{ prefix: "[W.I.P]", regex: /^\[W\.I\.P/i },
|
||||
{ prefix: "(WIP)", regex: /^\(WIP/i },
|
||||
{ prefix: "(W.I.P)", regex: /^\(W\.I\.P/i },
|
||||
{ prefix: 'WIP', regex: /^WIP:/i },
|
||||
{ prefix: 'W.I.P', regex: /^W\.I\.P/i },
|
||||
{ prefix: '[WIP]', regex: /^\[WIP/i },
|
||||
{ prefix: '[W.I.P]', regex: /^\[W\.I\.P/i },
|
||||
{ prefix: '(WIP)', regex: /^\(WIP/i },
|
||||
{ prefix: '(W.I.P)', regex: /^\(W\.I\.P/i },
|
||||
];
|
||||
|
||||
for (const item of regexes) {
|
||||
if (item.regex.test(mrTitle)) {
|
||||
recordRuleExitStatus(ruleName, "Failed");
|
||||
return warn(
|
||||
`Please remove the \`${item.prefix}\` prefix from the MR name before merging this MR.`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// At this point, the rule has passed
|
||||
recordRuleExitStatus(ruleName, "Passed");
|
||||
for (const item of regexes) {
|
||||
if (item.regex.test(mrTitle)) {
|
||||
return warn(`Please remove the \`${item.prefix}\` prefix from the MR name before merging this MR.`);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
2745
.gitlab/dangerjs/package-lock.json
generated
2745
.gitlab/dangerjs/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"name": "dangerjs-esp-idf",
|
||||
"description": "Merge request automatic linter",
|
||||
"main": "dangerfile.js",
|
||||
"dependencies": {
|
||||
"danger": "^11.2.3",
|
||||
"axios": "^1.3.3",
|
||||
"langchain": "^0.0.53",
|
||||
"openai-gpt-token-counter": "^1.0.3",
|
||||
"@commitlint/lint": "^13.1.0"
|
||||
}
|
||||
}
|
||||
57
.gitmodules
vendored
57
.gitmodules
vendored
@@ -2,26 +2,6 @@
|
||||
# All the relative URL paths are intended to be GitHub ones
|
||||
# For Espressif's public projects please use '../../espressif/proj', not a '../proj'
|
||||
#
|
||||
# Submodules SBOM information
|
||||
# ---------------------------
|
||||
# Submodules, which are used directly and not forked into espressif namespace should
|
||||
# contain SBOM information here. Other submodules should have the SBOM manifest file
|
||||
# included in the root of their project's repository.
|
||||
#
|
||||
# The sbom-hash entry records the submodule's checkout SHA as presented in git-tree
|
||||
# commit object. For example spiffs submodule
|
||||
#
|
||||
# $ git ls-tree HEAD components/spiffs/spiffs
|
||||
# 160000 commit 0dbb3f71c5f6fae3747a9d935372773762baf852 components/spiffs/spiffs
|
||||
#
|
||||
# The hash can be also obtained with git submodule command
|
||||
#
|
||||
# $ git submodule status components/spiffs/spiffs
|
||||
# 0dbb3f71c5f6fae3747a9d935372773762baf852 components/spiffs/spiffs (0.2-255-g0dbb3f71c5f6)
|
||||
#
|
||||
# The submodule SHA recorded here has to match with SHA, which is presented in git-tree.
|
||||
# This is checked by CI. Also please don't forget to update the submodule version
|
||||
# if you are changing the sbom-hash. This is important for SBOM generation.
|
||||
|
||||
[submodule "components/bt/controller/lib_esp32"]
|
||||
path = components/bt/controller/lib_esp32
|
||||
@@ -30,31 +10,14 @@
|
||||
[submodule "components/bootloader/subproject/components/micro-ecc/micro-ecc"]
|
||||
path = components/bootloader/subproject/components/micro-ecc/micro-ecc
|
||||
url = ../../kmackay/micro-ecc.git
|
||||
sbom-version = 1.1
|
||||
sbom-cpe = cpe:2.3:a:micro-ecc_project:micro-ecc:{}:*:*:*:*:*:*:*
|
||||
sbom-supplier = Person: Ken MacKay
|
||||
sbom-url = https://github.com/kmackay/micro-ecc
|
||||
sbom-description = A small and fast ECDH and ECDSA implementation for 8-bit, 32-bit, and 64-bit processors
|
||||
sbom-hash = 24c60e243580c7868f4334a1ba3123481fe1aa48
|
||||
|
||||
[submodule "components/spiffs/spiffs"]
|
||||
path = components/spiffs/spiffs
|
||||
url = ../../pellepl/spiffs.git
|
||||
sbom-version = 0.2-255-g0dbb3f71c5f6
|
||||
sbom-supplier = Person: Peter Andersson
|
||||
sbom-url = https://github.com/pellepl/spiffs
|
||||
sbom-description = Wear-leveled SPI flash file system for embedded devices
|
||||
sbom-hash = 0dbb3f71c5f6fae3747a9d935372773762baf852
|
||||
|
||||
[submodule "components/json/cJSON"]
|
||||
path = components/json/cJSON
|
||||
url = ../../DaveGamble/cJSON.git
|
||||
sbom-version = 1.7.17
|
||||
sbom-cpe = cpe:2.3:a:cjson_project:cjson:{}:*:*:*:*:*:*:*
|
||||
sbom-supplier = Person: Dave Gamble
|
||||
sbom-url = https://github.com/DaveGamble/cJSON
|
||||
sbom-description = Ultralightweight JSON parser in ANSI C
|
||||
sbom-hash = 87d8f0961a01bf09bef98ff89bae9fdec42181ee
|
||||
|
||||
[submodule "components/mbedtls/mbedtls"]
|
||||
path = components/mbedtls/mbedtls
|
||||
@@ -71,21 +34,10 @@
|
||||
[submodule "components/protobuf-c/protobuf-c"]
|
||||
path = components/protobuf-c/protobuf-c
|
||||
url = ../../protobuf-c/protobuf-c.git
|
||||
sbom-version = 1.4.1
|
||||
sbom-cpe = cpe:2.3:a:protobuf-c_project:protobuf-c:{}:*:*:*:*:*:*:*
|
||||
sbom-supplier = Organization: protobuf-c community <https://groups.google.com/g/protobuf-c>
|
||||
sbom-url = https://github.com/protobuf-c/protobuf-c
|
||||
sbom-description = Protocol Buffers implementation in C
|
||||
sbom-hash = abc67a11c6db271bedbb9f58be85d6f4e2ea8389
|
||||
|
||||
[submodule "components/unity/unity"]
|
||||
path = components/unity/unity
|
||||
url = ../../ThrowTheSwitch/Unity.git
|
||||
sbom-version = v2.4.3-51-g7d2bf62b7e6a
|
||||
sbom-supplier = Organization: ThrowTheSwitch community <http://www.throwtheswitch.org>
|
||||
sbom-url = https://github.com/ThrowTheSwitch/Unity
|
||||
sbom-description = Simple Unit Testing for C
|
||||
sbom-hash = 7d2bf62b7e6afaf38153041a9d53c21aeeca9a25
|
||||
|
||||
[submodule "components/bt/host/nimble/nimble"]
|
||||
path = components/bt/host/nimble/nimble
|
||||
@@ -98,11 +50,6 @@
|
||||
[submodule "components/cmock/CMock"]
|
||||
path = components/cmock/CMock
|
||||
url = ../../ThrowTheSwitch/CMock.git
|
||||
sbom-version = v2.5.2-2-geeecc49ce8af
|
||||
sbom-supplier = Organization: ThrowTheSwitch community <http://www.throwtheswitch.org>
|
||||
sbom-url = https://github.com/ThrowTheSwitch/CMock
|
||||
sbom-description = CMock - Mock/stub generator for C
|
||||
sbom-hash = eeecc49ce8af123cf8ad40efdb9673e37b56230f
|
||||
|
||||
[submodule "components/openthread/openthread"]
|
||||
path = components/openthread/openthread
|
||||
@@ -143,7 +90,3 @@
|
||||
[submodule "components/esp_coex/lib"]
|
||||
path = components/esp_coex/lib
|
||||
url = ../../espressif/esp-coex-lib.git
|
||||
|
||||
[submodule "components/bt/esp_ble_mesh/lib/lib"]
|
||||
path = components/bt/esp_ble_mesh/lib/lib
|
||||
url = ../../espressif/esp-ble-mesh-lib.git
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
# See https://pre-commit.com for more information
|
||||
# See https://pre-commit.com/hooks.html for more hooks
|
||||
|
||||
default_stages: [commit]
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.0.1
|
||||
@@ -30,19 +28,13 @@ repos:
|
||||
- id: mixed-line-ending
|
||||
args: ['-f=lf']
|
||||
- id: double-quote-string-fixer
|
||||
- id: no-commit-to-branch
|
||||
name: Do not use more than one slash in the branch name
|
||||
args: ['--pattern', '^[^/]*/[^/]*/']
|
||||
- id: no-commit-to-branch
|
||||
name: Do not use uppercase letters in the branch name
|
||||
args: ['--pattern', '^[^A-Z]*[A-Z]']
|
||||
- repo: https://github.com/PyCQA/flake8
|
||||
rev: 5.0.4
|
||||
hooks:
|
||||
- id: flake8
|
||||
args: ['--config=.flake8', '--tee', '--benchmark']
|
||||
- repo: https://github.com/pycqa/isort
|
||||
rev: 5.12.0 # python 3.8 compatible
|
||||
rev: 5.11.5 # python 3.7 compatible
|
||||
hooks:
|
||||
- id: isort
|
||||
name: isort (python)
|
||||
@@ -68,8 +60,6 @@ repos:
|
||||
name: Validate Kconfig files
|
||||
entry: tools/ci/check_kconfigs.py
|
||||
language: python
|
||||
additional_dependencies:
|
||||
- esp-idf-kconfig
|
||||
files: '^Kconfig$|Kconfig.*$'
|
||||
- id: check-deprecated-kconfigs-options
|
||||
name: Check if any Kconfig Options Deprecated
|
||||
@@ -137,14 +127,6 @@ repos:
|
||||
additional_dependencies:
|
||||
- PyYAML == 5.3.1
|
||||
pass_filenames: false
|
||||
- id: check-rules-components-patterns
|
||||
name: check patterns-build_components in rules.yml
|
||||
entry: tools/ci/check_rules_components_patterns.py
|
||||
language: python
|
||||
files: 'components/.+|.gitlab/ci/rules.yml'
|
||||
additional_dependencies:
|
||||
- PyYAML == 5.3.1
|
||||
pass_filenames: false
|
||||
- id: check-generated-soc-caps-kconfig
|
||||
name: Check soc caps kconfig files are generated (based on components/soc/IDF_TARGET/include/soc/soc_caps.h)
|
||||
entry: tools/gen_soc_caps_kconfig/gen_soc_caps_kconfig.py -d 'components/soc/*/include/soc/' 'components/esp_rom/*/' 'components/spi_flash/*/'
|
||||
@@ -161,7 +143,7 @@ repos:
|
||||
require_serial: true
|
||||
additional_dependencies:
|
||||
- PyYAML == 5.3.1
|
||||
- idf_build_apps~=1.0
|
||||
- idf_build_apps
|
||||
- id: sort-build-test-rules-ymls
|
||||
name: sort .build-test-rules.yml files
|
||||
entry: tools/ci/check_build_test_rules.py sort-yaml
|
||||
@@ -170,55 +152,13 @@ repos:
|
||||
additional_dependencies:
|
||||
- PyYAML == 5.3.1
|
||||
- ruamel.yaml
|
||||
- id: check-build-test-rules-path-exists
|
||||
name: check path in .build-test-rules.yml exists
|
||||
entry: tools/ci/check_build_test_rules.py check-exist
|
||||
language: python
|
||||
additional_dependencies:
|
||||
- PyYAML == 5.3.1
|
||||
always_run: true
|
||||
pass_filenames: false
|
||||
require_serial: true
|
||||
- id: cleanup-ignore-lists
|
||||
name: Remove non-existing patterns from ignore lists
|
||||
entry: tools/ci/cleanup_ignore_lists.py
|
||||
language: python
|
||||
always_run: true
|
||||
require_serial: true
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.0.1
|
||||
hooks:
|
||||
- id: file-contents-sorter
|
||||
files: 'tools\/ci\/(executable-list\.txt|mypy_ignore_list\.txt|check_copyright_ignore\.txt)'
|
||||
- repo: https://github.com/espressif/check-copyright/
|
||||
rev: v1.0.3
|
||||
rev: v1.0.1
|
||||
hooks:
|
||||
- id: check-copyright
|
||||
args: ['--ignore', 'tools/ci/check_copyright_ignore.txt', '--config', 'tools/ci/check_copyright_config.yaml']
|
||||
- repo: https://github.com/espressif/conventional-precommit-linter
|
||||
rev: v1.2.1
|
||||
hooks:
|
||||
- id: conventional-precommit-linter
|
||||
stages: [commit-msg]
|
||||
- repo: https://github.com/espressif/astyle_py.git
|
||||
rev: v1.0.5
|
||||
hooks:
|
||||
- id: astyle_py
|
||||
# If you are modifying astyle version, update tools/format.sh as well
|
||||
args: ['--astyle-version=3.4.7', '--rules=tools/ci/astyle-rules.yml']
|
||||
- repo: https://github.com/shellcheck-py/shellcheck-py
|
||||
rev: v0.9.0.5
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
name: shellcheck bash
|
||||
args: ['--shell', 'bash', '-x']
|
||||
files: 'install.sh|export.sh'
|
||||
- id: shellcheck
|
||||
name: shellcheck dash (export.sh)
|
||||
args: ['--shell', 'dash', '-x']
|
||||
files: 'export.sh'
|
||||
- repo: https://github.com/espressif/esp-idf-sbom.git
|
||||
rev: v0.13.0
|
||||
hooks:
|
||||
- id: validate-sbom-manifest
|
||||
stages: [post-commit]
|
||||
|
||||
820
.pylintrc
820
.pylintrc
@@ -1,77 +1,27 @@
|
||||
[MAIN]
|
||||
|
||||
# Analyse import fallback blocks. This can be used to support both Python 2 and
|
||||
# 3 compatible code, which means that the block might have code that exists
|
||||
# only in one or another interpreter, leading to false positives when analysed.
|
||||
analyse-fallback-blocks=no
|
||||
|
||||
# Clear in-memory caches upon conclusion of linting. Useful if running pylint
|
||||
# in a server-like mode.
|
||||
clear-cache-post-run=no
|
||||
|
||||
# Load and enable all available extensions. Use --list-extensions to see a list
|
||||
# all available extensions.
|
||||
#enable-all-extensions=
|
||||
|
||||
# In error mode, messages with a category besides ERROR or FATAL are
|
||||
# suppressed, and no reports are done by default. Error mode is compatible with
|
||||
# disabling specific errors.
|
||||
#errors-only=
|
||||
|
||||
# Always return a 0 (non-error) status code, even if lint errors are found.
|
||||
# This is primarily useful in continuous integration scripts.
|
||||
#exit-zero=
|
||||
[MASTER]
|
||||
|
||||
# A comma-separated list of package or module names from where C extensions may
|
||||
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||
# run arbitrary code.
|
||||
extension-pkg-allow-list=
|
||||
|
||||
# A comma-separated list of package or module names from where C extensions may
|
||||
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||
# run arbitrary code. (This is an alternative name to extension-pkg-allow-list
|
||||
# for backward compatibility.)
|
||||
extension-pkg-whitelist=
|
||||
|
||||
# Return non-zero exit code if any of these messages/categories are detected,
|
||||
# even if score is above --fail-under value. Syntax same as enable. Messages
|
||||
# specified are enabled, while categories only check already-enabled messages.
|
||||
fail-on=
|
||||
|
||||
# Specify a score threshold under which the program will exit with error.
|
||||
# Specify a score threshold to be exceeded before program exits with error.
|
||||
fail-under=10
|
||||
|
||||
# Interpret the stdin as a python script, whose filename needs to be passed as
|
||||
# the module_or_package argument.
|
||||
#from-stdin=
|
||||
|
||||
# Files or directories to be skipped. They should be base names, not paths.
|
||||
# Add files or directories to the blacklist. They should be base names, not
|
||||
# paths.
|
||||
ignore=CVS
|
||||
|
||||
# Add files or directories matching the regular expressions patterns to the
|
||||
# ignore-list. The regex matches against paths and can be in Posix or Windows
|
||||
# format. Because '\\' represents the directory delimiter on Windows systems,
|
||||
# it can't be used as an escape character.
|
||||
ignore-paths=
|
||||
|
||||
# Files or directories matching the regular expression patterns are skipped.
|
||||
# The regex matches against base names, not paths. The default value ignores
|
||||
# Emacs file locks
|
||||
ignore-patterns=^\.#
|
||||
|
||||
# List of module names for which member attributes should not be checked
|
||||
# (useful for modules/projects where namespaces are manipulated during runtime
|
||||
# and thus existing member attributes cannot be deduced by static analysis). It
|
||||
# supports qualified module names, as well as Unix pattern matching.
|
||||
ignored-modules=
|
||||
# Add files or directories matching the regex patterns to the blacklist. The
|
||||
# regex matches against base names, not paths.
|
||||
ignore-patterns=
|
||||
|
||||
# Python code to execute, usually for sys.path manipulation such as
|
||||
# pygtk.require().
|
||||
#init-hook=
|
||||
|
||||
# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
|
||||
# number of processors available to use, and will cap the count on Windows to
|
||||
# avoid hangs.
|
||||
# number of processors available to use.
|
||||
jobs=1
|
||||
|
||||
# Control the amount of potential inferred values when inferring a single
|
||||
@@ -86,19 +36,6 @@ load-plugins=
|
||||
# Pickle collected data for later comparisons.
|
||||
persistent=yes
|
||||
|
||||
# Minimum Python version to use for version dependent checks. Will default to
|
||||
# the version used to run pylint.
|
||||
py-version=3.8
|
||||
|
||||
# Discover python modules and packages in the file system subtree.
|
||||
recursive=no
|
||||
|
||||
# Add paths to the list of the source roots. Supports globbing patterns. The
|
||||
# source root is an absolute path or a path relative to the current working
|
||||
# directory used to determine a package namespace for modules located under the
|
||||
# source root.
|
||||
source-roots=
|
||||
|
||||
# When enabled, pylint would attempt to guess common misconfiguration and emit
|
||||
# user-friendly hints instead of false-positive error messages.
|
||||
suggestion-mode=yes
|
||||
@@ -107,8 +44,348 @@ suggestion-mode=yes
|
||||
# active Python interpreter and may run arbitrary code.
|
||||
unsafe-load-any-extension=no
|
||||
|
||||
# In verbose mode, extra non-checker-related info will be displayed.
|
||||
#verbose=
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
|
||||
# Only show warnings with the listed confidence levels. Leave empty to show
|
||||
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED.
|
||||
confidence=
|
||||
|
||||
# Disable the message, report, category or checker with the given id(s). You
|
||||
# can either give multiple identifiers separated by comma (,) or put this
|
||||
# option multiple times (only on the command line, not in the configuration
|
||||
# file where it should appear only once). You can also use "--disable=all" to
|
||||
# disable everything first and then reenable specific checks. For example, if
|
||||
# you want to run only the similarities checker, you can use "--disable=all
|
||||
# --enable=similarities". If you want to run only the classes checker, but have
|
||||
# no Warning level messages displayed, use "--disable=all --enable=classes
|
||||
# --disable=W".
|
||||
disable=print-statement,
|
||||
parameter-unpacking,
|
||||
unpacking-in-except,
|
||||
old-raise-syntax,
|
||||
backtick,
|
||||
long-suffix,
|
||||
old-ne-operator,
|
||||
old-octal-literal,
|
||||
import-star-module-level,
|
||||
non-ascii-bytes-literal,
|
||||
raw-checker-failed,
|
||||
bad-inline-option,
|
||||
locally-disabled,
|
||||
file-ignored,
|
||||
suppressed-message,
|
||||
useless-suppression,
|
||||
deprecated-pragma,
|
||||
use-symbolic-message-instead,
|
||||
apply-builtin,
|
||||
basestring-builtin,
|
||||
buffer-builtin,
|
||||
cmp-builtin,
|
||||
coerce-builtin,
|
||||
execfile-builtin,
|
||||
file-builtin,
|
||||
long-builtin,
|
||||
raw_input-builtin,
|
||||
reduce-builtin,
|
||||
standarderror-builtin,
|
||||
unicode-builtin,
|
||||
xrange-builtin,
|
||||
coerce-method,
|
||||
delslice-method,
|
||||
getslice-method,
|
||||
setslice-method,
|
||||
no-absolute-import,
|
||||
old-division,
|
||||
dict-iter-method,
|
||||
dict-view-method,
|
||||
next-method-called,
|
||||
metaclass-assignment,
|
||||
indexing-exception,
|
||||
raising-string,
|
||||
reload-builtin,
|
||||
oct-method,
|
||||
hex-method,
|
||||
nonzero-method,
|
||||
cmp-method,
|
||||
input-builtin,
|
||||
round-builtin,
|
||||
intern-builtin,
|
||||
unichr-builtin,
|
||||
map-builtin-not-iterating,
|
||||
zip-builtin-not-iterating,
|
||||
range-builtin-not-iterating,
|
||||
filter-builtin-not-iterating,
|
||||
using-cmp-argument,
|
||||
eq-without-hash,
|
||||
div-method,
|
||||
idiv-method,
|
||||
rdiv-method,
|
||||
exception-message-attribute,
|
||||
invalid-str-codec,
|
||||
sys-max-int,
|
||||
bad-python3-import,
|
||||
deprecated-string-function,
|
||||
deprecated-str-translate-call,
|
||||
deprecated-itertools-function,
|
||||
deprecated-types-field,
|
||||
next-method-defined,
|
||||
dict-items-not-iterating,
|
||||
dict-keys-not-iterating,
|
||||
dict-values-not-iterating,
|
||||
deprecated-operator-function,
|
||||
deprecated-urllib-function,
|
||||
xreadlines-attribute,
|
||||
deprecated-sys-function,
|
||||
exception-escape,
|
||||
comprehension-escape,
|
||||
missing-function-docstring, # Modified since here, include this line
|
||||
missing-class-docstring,
|
||||
missing-module-docstring,
|
||||
wrong-import-order,
|
||||
invalid-name,
|
||||
too-few-public-methods,
|
||||
too-many-locals,
|
||||
bad-super-call, # since we still haven't drop python2 support
|
||||
too-many-nested-blocks,
|
||||
too-many-branches,
|
||||
too-many-statements,
|
||||
ungrouped-imports, # since we have isort in pre-commit
|
||||
no-name-in-module, # since we have flake8 to check this
|
||||
too-many-instance-attributes,
|
||||
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
# multiple time (only on the command line, not in the configuration file where
|
||||
# it should appear only once). See also the "--disable" option for examples.
|
||||
enable=c-extension-no-member
|
||||
|
||||
|
||||
[REPORTS]
|
||||
|
||||
# Python expression which should return a score less than or equal to 10. You
|
||||
# have access to the variables 'error', 'warning', 'refactor', and 'convention'
|
||||
# which contain the number of messages in each category, as well as 'statement'
|
||||
# which is the total number of statements analyzed. This score is used by the
|
||||
# global evaluation report (RP0004).
|
||||
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
|
||||
|
||||
# Template used to display messages. This is a python new-style format string
|
||||
# used to format the message information. See doc for all details.
|
||||
#msg-template=
|
||||
|
||||
# Set the output format. Available formats are text, parseable, colorized, json
|
||||
# and msvs (visual studio). You can also give a reporter class, e.g.
|
||||
# mypackage.mymodule.MyReporterClass.
|
||||
output-format=text
|
||||
|
||||
# Tells whether to display a full report or only the messages.
|
||||
reports=no
|
||||
|
||||
# Activate the evaluation score.
|
||||
score=yes
|
||||
|
||||
|
||||
[REFACTORING]
|
||||
|
||||
# Maximum number of nested blocks for function / method body
|
||||
max-nested-blocks=5
|
||||
|
||||
# Complete name of functions that never returns. When checking for
|
||||
# inconsistent-return-statements if a never returning function is called then
|
||||
# it will be considered as an explicit return statement and no message will be
|
||||
# printed.
|
||||
never-returning-functions=sys.exit
|
||||
|
||||
|
||||
[TYPECHECK]
|
||||
|
||||
# List of decorators that produce context managers, such as
|
||||
# contextlib.contextmanager. Add to this list to register other decorators that
|
||||
# produce valid context managers.
|
||||
contextmanager-decorators=contextlib.contextmanager
|
||||
|
||||
# List of members which are set dynamically and missed by pylint inference
|
||||
# system, and so shouldn't trigger E1101 when accessed. Python regular
|
||||
# expressions are accepted.
|
||||
generated-members=
|
||||
|
||||
# Tells whether missing members accessed in mixin class should be ignored. A
|
||||
# mixin class is detected if its name ends with "mixin" (case insensitive).
|
||||
ignore-mixin-members=yes
|
||||
|
||||
# Tells whether to warn about missing members when the owner of the attribute
|
||||
# is inferred to be None.
|
||||
ignore-none=yes
|
||||
|
||||
# This flag controls whether pylint should warn about no-member and similar
|
||||
# checks whenever an opaque object is returned when inferring. The inference
|
||||
# can return multiple potential results while evaluating a Python object, but
|
||||
# some branches might not be evaluated, which results in partial inference. In
|
||||
# that case, it might be useful to still emit no-member and other checks for
|
||||
# the rest of the inferred objects.
|
||||
ignore-on-opaque-inference=yes
|
||||
|
||||
# List of class names for which member attributes should not be checked (useful
|
||||
# for classes with dynamically set attributes). This supports the use of
|
||||
# qualified names.
|
||||
ignored-classes=optparse.Values,thread._local,_thread._local
|
||||
|
||||
# List of module names for which member attributes should not be checked
|
||||
# (useful for modules/projects where namespaces are manipulated during runtime
|
||||
# and thus existing member attributes cannot be deduced by static analysis). It
|
||||
# supports qualified module names, as well as Unix pattern matching.
|
||||
ignored-modules=
|
||||
|
||||
# Show a hint with possible names when a member name was not found. The aspect
|
||||
# of finding the hint is based on edit distance.
|
||||
missing-member-hint=yes
|
||||
|
||||
# The minimum edit distance a name should have in order to be considered a
|
||||
# similar match for a missing member name.
|
||||
missing-member-hint-distance=1
|
||||
|
||||
# The total number of similar names that should be taken in consideration when
|
||||
# showing a hint for a missing member.
|
||||
missing-member-max-choices=1
|
||||
|
||||
# List of decorators that change the signature of a decorated function.
|
||||
signature-mutators=ttfw_idf.idf_example_test,ttfw_idf.idf_unit_test,ttfw_idf.idf_custom_test,ttfw_idf.idf_component_unit_test
|
||||
|
||||
|
||||
[SPELLING]
|
||||
|
||||
# Limits count of emitted suggestions for spelling mistakes.
|
||||
max-spelling-suggestions=4
|
||||
|
||||
# Spelling dictionary name. Available dictionaries: none. To make it work,
|
||||
# install the python-enchant package.
|
||||
spelling-dict=
|
||||
|
||||
# List of comma separated words that should not be checked.
|
||||
spelling-ignore-words=
|
||||
|
||||
# A path to a file that contains the private dictionary; one word per line.
|
||||
spelling-private-dict-file=
|
||||
|
||||
# Tells whether to store unknown words to the private dictionary (see the
|
||||
# --spelling-private-dict-file option) instead of raising a message.
|
||||
spelling-store-unknown-words=no
|
||||
|
||||
|
||||
[FORMAT]
|
||||
|
||||
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
|
||||
expected-line-ending-format=
|
||||
|
||||
# Regexp for a line that is allowed to be longer than the limit.
|
||||
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
|
||||
|
||||
# Number of spaces of indent required inside a hanging or continued line.
|
||||
indent-after-paren=4
|
||||
|
||||
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
||||
# tab).
|
||||
indent-string=' '
|
||||
|
||||
# Maximum number of characters on a single line.
|
||||
max-line-length=160
|
||||
|
||||
# Maximum number of lines in a module.
|
||||
max-module-lines=1000
|
||||
|
||||
# List of optional constructs for which whitespace checking is disabled. `dict-
|
||||
# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
|
||||
# `trailing-comma` allows a space between comma and closing bracket: (a, ).
|
||||
# `empty-line` allows space-only lines.
|
||||
no-space-check=trailing-comma,
|
||||
dict-separator
|
||||
|
||||
# Allow the body of a class to be on the same line as the declaration if body
|
||||
# contains single statement.
|
||||
single-line-class-stmt=no
|
||||
|
||||
# Allow the body of an if to be on the same line as the test if there is no
|
||||
# else.
|
||||
single-line-if-stmt=no
|
||||
|
||||
|
||||
[STRING]
|
||||
|
||||
# This flag controls whether inconsistent-quotes generates a warning when the
|
||||
# character used as a quote delimiter is used inconsistently within a module.
|
||||
check-quote-consistency=no
|
||||
|
||||
# This flag controls whether the implicit-str-concat should generate a warning
|
||||
# on implicit string concatenation in sequences defined over several lines.
|
||||
check-str-concat-over-line-jumps=no
|
||||
|
||||
|
||||
[LOGGING]
|
||||
|
||||
# The type of string formatting that logging methods do. `old` means using %
|
||||
# formatting, `new` is for `{}` formatting.
|
||||
logging-format-style=old
|
||||
|
||||
# Logging modules to check that the string format arguments are in logging
|
||||
# function parameter format.
|
||||
logging-modules=logging
|
||||
|
||||
|
||||
[MISCELLANEOUS]
|
||||
|
||||
# List of note tags to take in consideration, separated by a comma.
|
||||
notes=FIXME,
|
||||
XXX,
|
||||
TODO
|
||||
|
||||
# Regular expression of note tags to take in consideration.
|
||||
#notes-rgx=
|
||||
|
||||
|
||||
[SIMILARITIES]
|
||||
|
||||
# Ignore comments when computing similarities.
|
||||
ignore-comments=yes
|
||||
|
||||
# Ignore docstrings when computing similarities.
|
||||
ignore-docstrings=yes
|
||||
|
||||
# Ignore imports when computing similarities.
|
||||
ignore-imports=no
|
||||
|
||||
# Minimum lines number of a similarity.
|
||||
min-similarity-lines=4
|
||||
|
||||
|
||||
[VARIABLES]
|
||||
|
||||
# List of additional names supposed to be defined in builtins. Remember that
|
||||
# you should avoid defining new builtins when possible.
|
||||
additional-builtins=
|
||||
|
||||
# Tells whether unused global variables should be treated as a violation.
|
||||
allow-global-unused-variables=yes
|
||||
|
||||
# List of strings which can identify a callback function by name. A callback
|
||||
# name must start or end with one of those strings.
|
||||
callbacks=cb_,
|
||||
_cb
|
||||
|
||||
# A regular expression matching the name of dummy variables (i.e. expected to
|
||||
# not be used).
|
||||
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
|
||||
|
||||
# Argument names that match this expression will be ignored. Default to name
|
||||
# with leading underscore.
|
||||
ignored-argument-names=_.*|^ignored_|^unused_
|
||||
|
||||
# Tells whether we should check for unused import in __init__ files.
|
||||
init-import=no
|
||||
|
||||
# List of qualified module names which can have objects that can redefine
|
||||
# builtins.
|
||||
redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
|
||||
|
||||
|
||||
[BASIC]
|
||||
@@ -117,15 +394,13 @@ unsafe-load-any-extension=no
|
||||
argument-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct argument names. Overrides argument-
|
||||
# naming-style. If left empty, argument names will be checked with the set
|
||||
# naming style.
|
||||
# naming-style.
|
||||
#argument-rgx=
|
||||
|
||||
# Naming style matching correct attribute names.
|
||||
attr-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct attribute names. Overrides attr-naming-
|
||||
# style. If left empty, attribute names will be checked with the set naming
|
||||
# style.
|
||||
#attr-rgx=
|
||||
|
||||
@@ -145,30 +420,20 @@ bad-names-rgxs=
|
||||
class-attribute-naming-style=any
|
||||
|
||||
# Regular expression matching correct class attribute names. Overrides class-
|
||||
# attribute-naming-style. If left empty, class attribute names will be checked
|
||||
# with the set naming style.
|
||||
# attribute-naming-style.
|
||||
#class-attribute-rgx=
|
||||
|
||||
# Naming style matching correct class constant names.
|
||||
class-const-naming-style=UPPER_CASE
|
||||
|
||||
# Regular expression matching correct class constant names. Overrides class-
|
||||
# const-naming-style. If left empty, class constant names will be checked with
|
||||
# the set naming style.
|
||||
#class-const-rgx=
|
||||
|
||||
# Naming style matching correct class names.
|
||||
class-naming-style=PascalCase
|
||||
|
||||
# Regular expression matching correct class names. Overrides class-naming-
|
||||
# style. If left empty, class names will be checked with the set naming style.
|
||||
# style.
|
||||
#class-rgx=
|
||||
|
||||
# Naming style matching correct constant names.
|
||||
const-naming-style=UPPER_CASE
|
||||
|
||||
# Regular expression matching correct constant names. Overrides const-naming-
|
||||
# style. If left empty, constant names will be checked with the set naming
|
||||
# style.
|
||||
#const-rgx=
|
||||
|
||||
@@ -180,8 +445,7 @@ docstring-min-length=-1
|
||||
function-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct function names. Overrides function-
|
||||
# naming-style. If left empty, function names will be checked with the set
|
||||
# naming style.
|
||||
# naming-style.
|
||||
#function-rgx=
|
||||
|
||||
# Good variable names which should always be accepted, separated by a comma.
|
||||
@@ -203,22 +467,21 @@ include-naming-hint=no
|
||||
inlinevar-naming-style=any
|
||||
|
||||
# Regular expression matching correct inline iteration names. Overrides
|
||||
# inlinevar-naming-style. If left empty, inline iteration names will be checked
|
||||
# with the set naming style.
|
||||
# inlinevar-naming-style.
|
||||
#inlinevar-rgx=
|
||||
|
||||
# Naming style matching correct method names.
|
||||
method-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct method names. Overrides method-naming-
|
||||
# style. If left empty, method names will be checked with the set naming style.
|
||||
# style.
|
||||
#method-rgx=
|
||||
|
||||
# Naming style matching correct module names.
|
||||
module-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct module names. Overrides module-naming-
|
||||
# style. If left empty, module names will be checked with the set naming style.
|
||||
# style.
|
||||
#module-rgx=
|
||||
|
||||
# Colon-delimited sets of names that determine each other's naming style when
|
||||
@@ -234,61 +497,21 @@ no-docstring-rgx=^_
|
||||
# These decorators are taken in consideration only for invalid-name.
|
||||
property-classes=abc.abstractproperty
|
||||
|
||||
# Regular expression matching correct type alias names. If left empty, type
|
||||
# alias names will be checked with the set naming style.
|
||||
#typealias-rgx=
|
||||
|
||||
# Regular expression matching correct type variable names. If left empty, type
|
||||
# variable names will be checked with the set naming style.
|
||||
#typevar-rgx=
|
||||
|
||||
# Naming style matching correct variable names.
|
||||
variable-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct variable names. Overrides variable-
|
||||
# naming-style. If left empty, variable names will be checked with the set
|
||||
# naming style.
|
||||
# naming-style.
|
||||
#variable-rgx=
|
||||
|
||||
|
||||
[CLASSES]
|
||||
|
||||
# Warn about protected attribute access inside special methods
|
||||
check-protected-access-in-special-methods=no
|
||||
|
||||
# List of method names used to declare (i.e. assign) instance attributes.
|
||||
defining-attr-methods=__init__,
|
||||
__new__,
|
||||
setUp,
|
||||
asyncSetUp,
|
||||
__post_init__
|
||||
|
||||
# List of member names, which should be excluded from the protected access
|
||||
# warning.
|
||||
exclude-protected=_asdict,_fields,_replace,_source,_make,os._exit
|
||||
|
||||
# List of valid names for the first argument in a class method.
|
||||
valid-classmethod-first-arg=cls
|
||||
|
||||
# List of valid names for the first argument in a metaclass class method.
|
||||
valid-metaclass-classmethod-first-arg=mcs
|
||||
|
||||
|
||||
[DESIGN]
|
||||
|
||||
# List of regular expressions of class ancestor names to ignore when counting
|
||||
# public methods (see R0903)
|
||||
exclude-too-few-public-methods=
|
||||
|
||||
# List of qualified class names to ignore when counting class parents (see
|
||||
# R0901)
|
||||
ignored-parents=
|
||||
|
||||
# Maximum number of arguments for function / method.
|
||||
max-args=5
|
||||
|
||||
# Maximum number of attributes for a class (see R0902).
|
||||
max-attributes=7
|
||||
max-attributes=12
|
||||
|
||||
# Maximum number of boolean expressions in an if statement (see R0916).
|
||||
max-bool-expr=5
|
||||
@@ -315,40 +538,27 @@ max-statements=50
|
||||
min-public-methods=2
|
||||
|
||||
|
||||
[EXCEPTIONS]
|
||||
[CLASSES]
|
||||
|
||||
# Exceptions that will emit a warning when caught.
|
||||
overgeneral-exceptions=builtins.BaseException,builtins.Exception
|
||||
# List of method names used to declare (i.e. assign) instance attributes.
|
||||
defining-attr-methods=__init__,
|
||||
__new__,
|
||||
setUp,
|
||||
__post_init__
|
||||
|
||||
# List of member names, which should be excluded from the protected access
|
||||
# warning.
|
||||
exclude-protected=_asdict,
|
||||
_fields,
|
||||
_replace,
|
||||
_source,
|
||||
_make
|
||||
|
||||
[FORMAT]
|
||||
# List of valid names for the first argument in a class method.
|
||||
valid-classmethod-first-arg=cls
|
||||
|
||||
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
|
||||
expected-line-ending-format=
|
||||
|
||||
# Regexp for a line that is allowed to be longer than the limit.
|
||||
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
|
||||
|
||||
# Number of spaces of indent required inside a hanging or continued line.
|
||||
indent-after-paren=4
|
||||
|
||||
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
||||
# tab).
|
||||
indent-string=' '
|
||||
|
||||
# Maximum number of characters on a single line.
|
||||
max-line-length=160
|
||||
|
||||
# Maximum number of lines in a module.
|
||||
max-module-lines=1000
|
||||
|
||||
# Allow the body of a class to be on the same line as the declaration if body
|
||||
# contains single statement.
|
||||
single-line-class-stmt=no
|
||||
|
||||
# Allow the body of an if to be on the same line as the test if there is no
|
||||
# else.
|
||||
single-line-if-stmt=no
|
||||
# List of valid names for the first argument in a metaclass class method.
|
||||
valid-metaclass-classmethod-first-arg=cls
|
||||
|
||||
|
||||
[IMPORTS]
|
||||
@@ -357,26 +567,27 @@ single-line-if-stmt=no
|
||||
# one.
|
||||
allow-any-import-level=
|
||||
|
||||
# Allow explicit reexports by alias from a package __init__.
|
||||
allow-reexport-from-package=no
|
||||
|
||||
# Allow wildcard imports from modules that define __all__.
|
||||
allow-wildcard-with-all=no
|
||||
|
||||
# Deprecated modules which should not be used, separated by a comma.
|
||||
deprecated-modules=
|
||||
# Analyse import fallback blocks. This can be used to support both Python 2 and
|
||||
# 3 compatible code, which means that the block might have code that exists
|
||||
# only in one or another interpreter, leading to false positives when analysed.
|
||||
analyse-fallback-blocks=no
|
||||
|
||||
# Output a graph (.gv or any supported image format) of external dependencies
|
||||
# to the given file (report RP0402 must not be disabled).
|
||||
# Deprecated modules which should not be used, separated by a comma.
|
||||
deprecated-modules=optparse,tkinter.tix
|
||||
|
||||
# Create a graph of external dependencies in the given file (report RP0402 must
|
||||
# not be disabled).
|
||||
ext-import-graph=
|
||||
|
||||
# Output a graph (.gv or any supported image format) of all (i.e. internal and
|
||||
# external) dependencies to the given file (report RP0402 must not be
|
||||
# disabled).
|
||||
# Create a graph of every (i.e. internal and external) dependencies in the
|
||||
# given file (report RP0402 must not be disabled).
|
||||
import-graph=
|
||||
|
||||
# Output a graph (.gv or any supported image format) of internal dependencies
|
||||
# to the given file (report RP0402 must not be disabled).
|
||||
# Create a graph of internal dependencies in the given file (report RP0402 must
|
||||
# not be disabled).
|
||||
int-import-graph=
|
||||
|
||||
# Force import order to recognize a module as part of the standard
|
||||
@@ -390,252 +601,9 @@ known-third-party=enchant
|
||||
preferred-modules=
|
||||
|
||||
|
||||
[LOGGING]
|
||||
[EXCEPTIONS]
|
||||
|
||||
# The type of string formatting that logging methods do. `old` means using %
|
||||
# formatting, `new` is for `{}` formatting.
|
||||
logging-format-style=old
|
||||
|
||||
# Logging modules to check that the string format arguments are in logging
|
||||
# function parameter format.
|
||||
logging-modules=logging
|
||||
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
|
||||
# Only show warnings with the listed confidence levels. Leave empty to show
|
||||
# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE,
|
||||
# UNDEFINED.
|
||||
confidence=HIGH,
|
||||
CONTROL_FLOW,
|
||||
INFERENCE,
|
||||
INFERENCE_FAILURE,
|
||||
UNDEFINED
|
||||
|
||||
# Disable the message, report, category or checker with the given id(s). You
|
||||
# can either give multiple identifiers separated by comma (,) or put this
|
||||
# option multiple times (only on the command line, not in the configuration
|
||||
# file where it should appear only once). You can also use "--disable=all" to
|
||||
# disable everything first and then re-enable specific checks. For example, if
|
||||
# you want to run only the similarities checker, you can use "--disable=all
|
||||
# --enable=similarities". If you want to run only the classes checker, but have
|
||||
# no Warning level messages displayed, use "--disable=all --enable=classes
|
||||
# --disable=W".
|
||||
disable=raw-checker-failed,
|
||||
bad-inline-option,
|
||||
locally-disabled,
|
||||
file-ignored,
|
||||
suppressed-message,
|
||||
useless-suppression,
|
||||
deprecated-pragma,
|
||||
use-symbolic-message-instead,
|
||||
missing-function-docstring, # Modified since here, include this line
|
||||
missing-class-docstring,
|
||||
missing-module-docstring,
|
||||
wrong-import-order,
|
||||
invalid-name,
|
||||
too-few-public-methods,
|
||||
too-many-locals,
|
||||
ungrouped-imports, # since we have isort in pre-commit
|
||||
no-name-in-module, # since we have flake8 to check this
|
||||
too-many-instance-attributes,
|
||||
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
# multiple time (only on the command line, not in the configuration file where
|
||||
# it should appear only once). See also the "--disable" option for examples.
|
||||
enable=c-extension-no-member
|
||||
|
||||
|
||||
[METHOD_ARGS]
|
||||
|
||||
# List of qualified names (i.e., library.method) which require a timeout
|
||||
# parameter e.g. 'requests.api.get,requests.api.post'
|
||||
timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request
|
||||
|
||||
|
||||
[MISCELLANEOUS]
|
||||
|
||||
# List of note tags to take in consideration, separated by a comma.
|
||||
notes=FIXME,
|
||||
XXX,
|
||||
TODO
|
||||
|
||||
# Regular expression of note tags to take in consideration.
|
||||
notes-rgx=
|
||||
|
||||
|
||||
[REFACTORING]
|
||||
|
||||
# Maximum number of nested blocks for function / method body
|
||||
max-nested-blocks=5
|
||||
|
||||
# Complete name of functions that never returns. When checking for
|
||||
# inconsistent-return-statements if a never returning function is called then
|
||||
# it will be considered as an explicit return statement and no message will be
|
||||
# printed.
|
||||
never-returning-functions=sys.exit,argparse.parse_error
|
||||
|
||||
|
||||
[REPORTS]
|
||||
|
||||
# Python expression which should return a score less than or equal to 10. You
|
||||
# have access to the variables 'fatal', 'error', 'warning', 'refactor',
|
||||
# 'convention', and 'info' which contain the number of messages in each
|
||||
# category, as well as 'statement' which is the total number of statements
|
||||
# analyzed. This score is used by the global evaluation report (RP0004).
|
||||
evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10))
|
||||
|
||||
# Template used to display messages. This is a python new-style format string
|
||||
# used to format the message information. See doc for all details.
|
||||
msg-template=
|
||||
|
||||
# Set the output format. Available formats are text, parseable, colorized, json
|
||||
# and msvs (visual studio). You can also give a reporter class, e.g.
|
||||
# mypackage.mymodule.MyReporterClass.
|
||||
#output-format=
|
||||
|
||||
# Tells whether to display a full report or only the messages.
|
||||
reports=no
|
||||
|
||||
# Activate the evaluation score.
|
||||
score=yes
|
||||
|
||||
|
||||
[SIMILARITIES]
|
||||
|
||||
# Comments are removed from the similarity computation
|
||||
ignore-comments=yes
|
||||
|
||||
# Docstrings are removed from the similarity computation
|
||||
ignore-docstrings=yes
|
||||
|
||||
# Imports are removed from the similarity computation
|
||||
ignore-imports=yes
|
||||
|
||||
# Signatures are removed from the similarity computation
|
||||
ignore-signatures=yes
|
||||
|
||||
# Minimum lines number of a similarity.
|
||||
min-similarity-lines=4
|
||||
|
||||
|
||||
[SPELLING]
|
||||
|
||||
# Limits count of emitted suggestions for spelling mistakes.
|
||||
max-spelling-suggestions=4
|
||||
|
||||
# Spelling dictionary name. No available dictionaries : You need to install
|
||||
# both the python package and the system dependency for enchant to work..
|
||||
spelling-dict=
|
||||
|
||||
# List of comma separated words that should be considered directives if they
|
||||
# appear at the beginning of a comment and should not be checked.
|
||||
spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:
|
||||
|
||||
# List of comma separated words that should not be checked.
|
||||
spelling-ignore-words=
|
||||
|
||||
# A path to a file that contains the private dictionary; one word per line.
|
||||
spelling-private-dict-file=
|
||||
|
||||
# Tells whether to store unknown words to the private dictionary (see the
|
||||
# --spelling-private-dict-file option) instead of raising a message.
|
||||
spelling-store-unknown-words=no
|
||||
|
||||
|
||||
[STRING]
|
||||
|
||||
# This flag controls whether inconsistent-quotes generates a warning when the
|
||||
# character used as a quote delimiter is used inconsistently within a module.
|
||||
check-quote-consistency=no
|
||||
|
||||
# This flag controls whether the implicit-str-concat should generate a warning
|
||||
# on implicit string concatenation in sequences defined over several lines.
|
||||
check-str-concat-over-line-jumps=no
|
||||
|
||||
|
||||
[TYPECHECK]
|
||||
|
||||
# List of decorators that produce context managers, such as
|
||||
# contextlib.contextmanager. Add to this list to register other decorators that
|
||||
# produce valid context managers.
|
||||
contextmanager-decorators=contextlib.contextmanager
|
||||
|
||||
# List of members which are set dynamically and missed by pylint inference
|
||||
# system, and so shouldn't trigger E1101 when accessed. Python regular
|
||||
# expressions are accepted.
|
||||
generated-members=
|
||||
|
||||
# Tells whether to warn about missing members when the owner of the attribute
|
||||
# is inferred to be None.
|
||||
ignore-none=yes
|
||||
|
||||
# This flag controls whether pylint should warn about no-member and similar
|
||||
# checks whenever an opaque object is returned when inferring. The inference
|
||||
# can return multiple potential results while evaluating a Python object, but
|
||||
# some branches might not be evaluated, which results in partial inference. In
|
||||
# that case, it might be useful to still emit no-member and other checks for
|
||||
# the rest of the inferred objects.
|
||||
ignore-on-opaque-inference=yes
|
||||
|
||||
# List of symbolic message names to ignore for Mixin members.
|
||||
ignored-checks-for-mixins=no-member,
|
||||
not-async-context-manager,
|
||||
not-context-manager,
|
||||
attribute-defined-outside-init
|
||||
|
||||
# List of class names for which member attributes should not be checked (useful
|
||||
# for classes with dynamically set attributes). This supports the use of
|
||||
# qualified names.
|
||||
ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace
|
||||
|
||||
# Show a hint with possible names when a member name was not found. The aspect
|
||||
# of finding the hint is based on edit distance.
|
||||
missing-member-hint=yes
|
||||
|
||||
# The minimum edit distance a name should have in order to be considered a
|
||||
# similar match for a missing member name.
|
||||
missing-member-hint-distance=1
|
||||
|
||||
# The total number of similar names that should be taken in consideration when
|
||||
# showing a hint for a missing member.
|
||||
missing-member-max-choices=1
|
||||
|
||||
# Regex pattern to define which classes are considered mixins.
|
||||
mixin-class-rgx=.*[Mm]ixin
|
||||
|
||||
# List of decorators that change the signature of a decorated function.
|
||||
signature-mutators=
|
||||
|
||||
|
||||
[VARIABLES]
|
||||
|
||||
# List of additional names supposed to be defined in builtins. Remember that
|
||||
# you should avoid defining new builtins when possible.
|
||||
additional-builtins=
|
||||
|
||||
# Tells whether unused global variables should be treated as a violation.
|
||||
allow-global-unused-variables=yes
|
||||
|
||||
# List of names allowed to shadow builtins
|
||||
allowed-redefined-builtins=
|
||||
|
||||
# List of strings which can identify a callback function by name. A callback
|
||||
# name must start or end with one of those strings.
|
||||
callbacks=cb_,
|
||||
_cb
|
||||
|
||||
# A regular expression matching the name of dummy variables (i.e. expected to
|
||||
# not be used).
|
||||
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
|
||||
|
||||
# Argument names that match this expression will be ignored.
|
||||
ignored-argument-names=_.*|^ignored_|^unused_
|
||||
|
||||
# Tells whether we should check for unused import in __init__ files.
|
||||
init-import=no
|
||||
|
||||
# List of qualified module names which can have objects that can redefine
|
||||
# builtins.
|
||||
redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
|
||||
# Exceptions that will emit a warning when being caught. Defaults to
|
||||
# "BaseException, Exception".
|
||||
overgeneral-exceptions=BaseException,
|
||||
Exception
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
# This is shellcheck config file
|
||||
# Files that are checked: install.sh, export.sh
|
||||
|
||||
# Do not complain about variables determined at runtime (IDF_PATH)
|
||||
disable=SC1090
|
||||
119
.vale.ini
119
.vale.ini
@@ -1,119 +0,0 @@
|
||||
###################
|
||||
### Vale Config ###
|
||||
###################
|
||||
|
||||
# This is a Vale linter configuration file.
|
||||
# - Repo: esp-idf
|
||||
# - Based on Default config: v0-1-1
|
||||
# It lists all necessary parameters to configure Vale for your project.
|
||||
# For official documentation on all config settings, see
|
||||
# https://vale.sh/docs/topics/config
|
||||
|
||||
|
||||
##############
|
||||
### Global ###
|
||||
##############
|
||||
|
||||
# This section lists core settings applying to Vale itself.
|
||||
|
||||
|
||||
# Specify path to external resources (e.g., styles and vocab files).
|
||||
# The path value may be absolute or relative to this configuration file.
|
||||
StylesPath = .vale/styles
|
||||
|
||||
|
||||
# Specify the minimum alert severity that Vale will report.
|
||||
MinAlertLevel = suggestion # "suggestion", "warning", or "error"
|
||||
|
||||
|
||||
# Specify vocabulary for special treatment.
|
||||
# Create a folder in <StylesPath>/Vocab/<name>/and add its name here
|
||||
# The folder should contain two files:
|
||||
# - accept.txt -- lists words with accepted case-sensitive spelling
|
||||
# - reject.txt -- lists words whose occurrences throw an error
|
||||
# Vocab = Espressif
|
||||
|
||||
|
||||
# Specify the packages to import into your project.
|
||||
# A package is a zip file containing a number of rules (style) written in YAML.
|
||||
# For a list of official packages, see Package Hub at https://vale.sh/hub/
|
||||
# For official documentation on packages, see
|
||||
# https://vale.sh/docs/topics/packages/
|
||||
# Before linting, navigate to your project and run `vale sync` to download
|
||||
# the official packages specified below.
|
||||
# Packages = Package1, Package2, \
|
||||
# https://example.com/path/to/package/Package.zip
|
||||
Packages = Google, Microsoft, RedHat, \
|
||||
https://dl.espressif.com/dl/esp-vale-config/Espressif-latest.zip
|
||||
|
||||
|
||||
###############
|
||||
### Formats ###
|
||||
###############
|
||||
|
||||
# This section enables association of "unknown" formats with the ones
|
||||
# supported by Vale. For official documentation on supported formats, see
|
||||
# https://vale.sh/docs/topics/scoping/
|
||||
[formats]
|
||||
|
||||
# For example, treat MDX files as Markdown files.
|
||||
# mdx = md
|
||||
|
||||
|
||||
################################
|
||||
### Format-specific settings ###
|
||||
################################
|
||||
|
||||
# This section lists the settings that apply to specific file formats
|
||||
# based on their glob pattern.
|
||||
# Settings provided under a more specific glob pattern,
|
||||
# such as [*.{md,txt}] will override those in [*].
|
||||
[*.{md,rst}]
|
||||
|
||||
|
||||
# Enable styles to activate all rules included in them.
|
||||
# BasedOnStyles = Style1, Style2
|
||||
BasedOnStyles = Vale, Espressif-latest
|
||||
|
||||
|
||||
### Deactivate individual rules ###
|
||||
### in enabled styles.
|
||||
# Style1.Rule1 = NO
|
||||
Vale.Repetition = NO
|
||||
Vale.Spelling = NO
|
||||
Espressif-latest.Admonitions = NO
|
||||
Espressif-latest.Contractions = NO
|
||||
Espressif-latest.Monospace = NO
|
||||
Espressif-latest.PascalCamelCase = NO
|
||||
|
||||
|
||||
### Change default severity level ###
|
||||
### of an activated rule.
|
||||
# Choose between "suggestion", "warning", or "error".
|
||||
# Style1.Rule2 = error
|
||||
|
||||
|
||||
### Activate individual rules ###
|
||||
### in non-enabled styles stored in <StylesPath>.
|
||||
# Style1.Rule = YES
|
||||
Google.Gender = YES
|
||||
Google.GenderBias = YES
|
||||
Google.Slang = YES
|
||||
Google.Spacing = YES
|
||||
Microsoft.DateNumbers = YES
|
||||
Microsoft.Ellipses = YES
|
||||
Microsoft.FirstPerson = YES
|
||||
Microsoft.Hyphens = YES
|
||||
Microsoft.Ordinal = YES
|
||||
Microsoft.OxfordComma = YES
|
||||
Microsoft.Percentages = YES
|
||||
Microsoft.RangeTime = YES
|
||||
Microsoft.Semicolon = YES
|
||||
Microsoft.SentenceLength = YES
|
||||
Microsoft.Suspended = YES
|
||||
Microsoft.Units = YES
|
||||
Microsoft.URLFormat = YES
|
||||
Microsoft.We = YES
|
||||
Microsoft.Wordiness = YES
|
||||
RedHat.Contractions = YES
|
||||
RedHat.RepeatedWords = YES
|
||||
@@ -20,11 +20,8 @@ if(NOT BOOTLOADER_BUILD)
|
||||
if(CMAKE_C_COMPILER_ID MATCHES "GNU")
|
||||
list(APPEND compile_options "-freorder-blocks")
|
||||
endif()
|
||||
elseif(CONFIG_COMPILER_OPTIMIZATION_DEBUG)
|
||||
elseif(CONFIG_COMPILER_OPTIMIZATION_DEFAULT)
|
||||
list(APPEND compile_options "-Og")
|
||||
if(CMAKE_C_COMPILER_ID MATCHES "GNU" AND NOT CONFIG_IDF_TARGET_LINUX)
|
||||
list(APPEND compile_options "-fno-shrink-wrap") # Disable shrink-wrapping to reduce binary size
|
||||
endif()
|
||||
elseif(CONFIG_COMPILER_OPTIMIZATION_NONE)
|
||||
list(APPEND compile_options "-O0")
|
||||
elseif(CONFIG_COMPILER_OPTIMIZATION_PERF)
|
||||
@@ -40,9 +37,6 @@ else() # BOOTLOADER_BUILD
|
||||
endif()
|
||||
elseif(CONFIG_BOOTLOADER_COMPILER_OPTIMIZATION_DEBUG)
|
||||
list(APPEND compile_options "-Og")
|
||||
if(CMAKE_C_COMPILER_ID MATCHES "GNU" AND NOT CONFIG_IDF_TARGET_LINUX)
|
||||
list(APPEND compile_options "-fno-shrink-wrap") # Disable shrink-wrapping to reduce binary size
|
||||
endif()
|
||||
elseif(CONFIG_BOOTLOADER_COMPILER_OPTIMIZATION_NONE)
|
||||
list(APPEND compile_options "-O0")
|
||||
elseif(CONFIG_BOOTLOADER_COMPILER_OPTIMIZATION_PERF)
|
||||
@@ -121,8 +115,6 @@ if(CMAKE_C_COMPILER_ID MATCHES "Clang")
|
||||
list(APPEND compile_options "-Wno-c2x-extensions")
|
||||
# warning on xMPU_SETTINGS for esp32s2 has size 0 for C and 1 for C++
|
||||
list(APPEND compile_options "-Wno-extern-c-compat")
|
||||
# warning: implicit truncation from 'int' to a one-bit wide bit-field changes value from 1 to -1
|
||||
list(APPEND compile_options "-Wno-single-bit-bitfield-constant-conversion")
|
||||
endif()
|
||||
# More warnings may exist in unit tests and example projects.
|
||||
|
||||
@@ -192,13 +184,6 @@ if(CONFIG_COMPILER_DISABLE_GCC12_WARNINGS)
|
||||
"-Wno-use-after-free")
|
||||
endif()
|
||||
|
||||
if(CONFIG_COMPILER_DISABLE_GCC13_WARNINGS)
|
||||
list(APPEND compile_options "-Wno-xor-used-as-pow")
|
||||
list(APPEND c_compile_options "-Wno-enum-int-mismatch")
|
||||
list(APPEND cxx_compile_options "-Wno-self-move"
|
||||
"-Wno-dangling-reference")
|
||||
endif()
|
||||
|
||||
# GCC-specific options
|
||||
if(CMAKE_C_COMPILER_ID STREQUAL "GNU")
|
||||
list(APPEND compile_options "-fstrict-volatile-bitfields"
|
||||
@@ -239,10 +224,6 @@ if(CMAKE_C_COMPILER_ID MATCHES "Clang")
|
||||
list(APPEND compile_options "-fno-use-cxa-atexit")
|
||||
endif()
|
||||
|
||||
if(COMPILER_RT_LIB_NAME)
|
||||
list(APPEND link_options "-rtlib=${CONFIG_COMPILER_RT_LIB_NAME}")
|
||||
endif()
|
||||
|
||||
# For the transition period from 32-bit time_t to 64-bit time_t,
|
||||
# auto-detect the size of this type and set corresponding variable.
|
||||
include(CheckTypeSize)
|
||||
|
||||
101
COMPATIBILITY.md
101
COMPATIBILITY.md
@@ -1,101 +0,0 @@
|
||||
# Compatibility Between ESP-IDF Releases and Revisions of Espressif SoCs
|
||||
|
||||
* [中文版](./COMPATIBILITY_CN.md)
|
||||
|
||||
Espressif keeps improving the performance of its SoCs by providing new chip revisions. However, some of the improvements require special software support. Some of the software supports are even mandatory for the chip revisions to run normally.
|
||||
|
||||
This document describes the compatibility between ESP-IDF releases and Espressif SoC revisions.
|
||||
|
||||
NOTE: This document on release branches may be out-of-date. Check the [Compatibility file on master](https://github.com/espressif/esp-idf/blob/master/COMPATIBILITY.md) for the most accurate information.
|
||||
|
||||
See [Compatibility Advisory for Chip Revision Numbering Scheme](https://www.espressif.com.cn/sites/default/files/advisory_downloads/AR2022-005%20Compatibility%20Advisory%20for%20Chip%20Revision%20Numbering%20%20Scheme.pdf) on the versioning of Espressif SoC revisions.
|
||||
|
||||
You can run `esptool chip_id` to detect the series and revision of an SoC. See [SoC Errata](https://www.espressif.com.cn/en/support/documents/technical-documents?keys=errata) for more on how to distinguish between chip revisions, and the improvements provided by chip revisions. And run `idf.py --version` to know the version of current ESP-IDF.
|
||||
|
||||
## ESP-IDF Support for Different Chip Revisions
|
||||
|
||||
The sections below show the requirements to ESP-IDF version of chip revisions. Each chip revision corresponds to specific `Recommended` and `Required` versions of ESP-IDF:
|
||||
|
||||
- `Recommended`: shows from which version of ESP-IDF you can make use of all the improvements of the chip revision. Running binary compiled with ESP-IDF below the `Recommended` version of a chip revision, software may not benefit from the bugfix/features provided by the chip revision. The chip will have almost the same behavior as its previous revision.
|
||||
|
||||
- `Required`: shows the minimum version required to run the chip revision normally. Running binary compiled below the `Required` version, the binary may have unpredictable behavior.
|
||||
|
||||
Though the software can make use of all the features of a chip revision, if its version is higher than the `Recommended` version of the chip, it is still recommended to use the latest bugfix version of the release branch. The latest bugfix version fixes a number of issues and helps improve product stability.
|
||||
|
||||
For example, if we have a chip, whose `Required`/`Recommended` version of `release/v5.1` branch is `v5.1.2`/`v5.1.4`, and the latest release on that branch is `v5.1.6`. Then the chip will not boot up with ESP-IDF `v5.1`-`v5.1.1` or will have unpredictable behavior, and application may not make use of all benefits of the chip, when running with ESP-IDF `v5.1.2` or `v5.1.3`. Though `v5.1.4` well supports the chip revision, it is still recommended to upgrade ESP-IDF to `v5.1.6`.
|
||||
|
||||
### ESP32
|
||||
|
||||
#### v0.0, v1.0, v3.0
|
||||
|
||||
Supported since initial version of ESP-IDF.
|
||||
|
||||
#### v1.1
|
||||
|
||||
To be added.
|
||||
|
||||
#### v2.0
|
||||
|
||||
To be added.
|
||||
|
||||
#### v3.1
|
||||
|
||||
To be added.
|
||||
|
||||
### ESP32-S2
|
||||
|
||||
#### v0.0
|
||||
|
||||
Supported since ESP-IDF v4.2.
|
||||
|
||||
#### v1.0
|
||||
|
||||
| Release branch | Recommended | Required |
|
||||
|------------------------|-------------|----------|
|
||||
| release/v4.2 | v4.2.3 | v4.2.3 |
|
||||
| release/v4.3 | v4.3.3 | v4.3.3 |
|
||||
| release/v4.4 | v4.4.6 | v4.4.1 |
|
||||
| release/v5.0 | v5.0.4 | v5.0 |
|
||||
| release/v5.1 | v5.1.2 | v5.1 |
|
||||
| release/v5.2 and above | v5.2 | v5.2 |
|
||||
|
||||
### ESP32-C3
|
||||
|
||||
#### v0.2, v0.3
|
||||
|
||||
Supported since ESP-IDF v4.3.
|
||||
|
||||
#### v0.4
|
||||
|
||||
To be added.
|
||||
|
||||
### ESP32-S3
|
||||
|
||||
#### v0.1
|
||||
|
||||
Supported since ESP-IDF v4.4.
|
||||
|
||||
#### v0.2
|
||||
|
||||
To be added.
|
||||
|
||||
### ESP32-C2 & ESP8684
|
||||
|
||||
#### v1.0
|
||||
|
||||
Supported since ESP-IDF v5.0.
|
||||
|
||||
#### v1.1
|
||||
|
||||
To be added.
|
||||
|
||||
#### v1.2
|
||||
|
||||
To be added.
|
||||
|
||||
|
||||
## What If the ESP-IDF Version Is Lower than the `Required` Version?
|
||||
|
||||
Latest ESP-IDF versions can prevent from downloading to, or even execute binaries on unsupported chips. ESP-IDF of versions v4.4.5+, v5.0.1+, v5.1 and above have both esptool download check and bootloader loading check against the chip revision. While ESP-IDF v4.3.5 has only esptool downloading check.
|
||||
|
||||
For earlier ESP-IDF versions without such checking, which is incompatible with the given chip revision, the chips running such versions will have unpredictable behavior.
|
||||
@@ -1,101 +0,0 @@
|
||||
# ESP-IDF 版本与乐鑫芯片版本兼容性
|
||||
|
||||
* [English Version](./COMPATIBILITY.md)
|
||||
|
||||
为不断提高芯片性能,乐鑫会为其芯片发布新版本。但新芯片版本中的某些性能提升依赖特殊的软件支持,有时候新芯片版本必须在一定的软件版本下才能正常运行。
|
||||
|
||||
本文档介绍了具体 ESP-IDF 版本与乐鑫芯片版本之间的兼容性情况。
|
||||
|
||||
注意:各分支上的兼容性文档可能不是最新版本,请参考 [master 分支上的兼容性文件](https://github.com/espressif/esp-idf/blob/master/COMPATIBILITY_CN.md) 以获取最新信息。
|
||||
|
||||
有关乐鑫芯片版本的编码方式,请参考 [关于芯片版本 (Chip Revision) 编码方式的兼容性公告](https://www.espressif.com/sites/default/files/advisory_downloads/AR2022-005%20%E5%85%B3%E4%BA%8E%E8%8A%AF%E7%89%87%E7%89%88%E6%9C%AC%E7%BC%96%E7%A0%81%E6%96%B9%E5%BC%8F%20%28Chip%20Revision%29%20%E7%9A%84%E5%85%BC%E5%AE%B9%E6%80%A7%E5%85%AC%E5%91%8A.pdf)。
|
||||
|
||||
运行 `esptool chip_id` 可查看芯片系列及其版本。有关区分芯片版本及版本改进内容的更多信息,请参考 [芯片勘误表](https://www.espressif.com.cn/zh-hans/support/documents/technical-documents?keys=%E5%8B%98%E8%AF%AF%E8%A1%A8)。运行 `idf.py --version` 可查看当前的 ESP-IDF 版本。
|
||||
|
||||
## ESP-IDF 对各芯片版本的支持
|
||||
|
||||
下文介绍了 ESP-IDF 对各芯片版本的支持情况,每个芯片版本都有对应的 ESP-IDF `推荐版本` 和 `需求版本`:
|
||||
|
||||
- `推荐版本`:表示从该版本的 ESP-IDF 开始,软件可以利用芯片版本提升的性能。如果在该芯片版本上运行低于 `推荐版本` 的 ESP-IDF 来编译二进制文件,软件可能无法利用该芯片版本修复的错误或新增的功能,芯片行为将与其上一版本几乎相同。
|
||||
|
||||
- `需求版本`:表示该芯片版本正常运行所需的最低 ESP-IDF 版本。如果在该芯片版本上运行低于 `需求版本` 的 ESP-IDF 来编译二进制文件,可能会出现不可预测的芯片行为。
|
||||
|
||||
即便使用的软件版本已高于该芯片版本的对应 `推荐版本`,软件已经能够利用该芯片版本的所有功能,我们仍建议用户升级到该发布分支的最新 bugfix 版本。新的 bugfix 版本修复了一些问题,有助于提升产品稳定性。
|
||||
|
||||
例如,对于某一芯片版本,其 `release/v5.1` 分支的 `需求版本` 和 `推荐版本` 分别是 `v5.1.2` 和 `v5.1.4`,而该分支的最新版本是 `v5.1.6`。那么,在使用 ESP-IDF `v5.1` - `v5.1.1` 时,芯片将无法启动,或会出现不可预测的行为,而在使用 ESP-IDF `v5.1.2` 或 `v5.1.3` 时,应用程序可能无法使用芯片的部分性能。此外,虽然 `v5.1.4` 已支持该芯片版本,但仍建议将 ESP-IDF 升级到 `v5.1.6`。
|
||||
|
||||
### ESP32
|
||||
|
||||
#### v0.0、v1.0 和 v3.0
|
||||
|
||||
从最初版本的 ESP-IDF 开始支持。
|
||||
|
||||
#### v1.1
|
||||
|
||||
待更新。
|
||||
|
||||
#### v2.0
|
||||
|
||||
待更新。
|
||||
|
||||
#### v3.1
|
||||
|
||||
待更新。
|
||||
|
||||
### ESP32-S2
|
||||
|
||||
#### v0.0
|
||||
|
||||
从 ESP-IDF v4.2 开始支持。
|
||||
|
||||
#### v1.0
|
||||
|
||||
| 发布分支 | 推荐版本 | 需求版本 |
|
||||
|------------------------|-------------|----------|
|
||||
| release/v4.2 | v4.2.3 | v4.2.3 |
|
||||
| release/v4.3 | v4.3.3 | v4.3.3 |
|
||||
| release/v4.4 | v4.4.6 | v4.4.1 |
|
||||
| release/v5.0 | v5.0.4 | v5.0 |
|
||||
| release/v5.1 | v5.1.2 | v5.1 |
|
||||
| release/v5.2 及以上 | v5.2 | v5.2 |
|
||||
|
||||
### ESP32-C3
|
||||
|
||||
#### v0.2 和 v0.3
|
||||
|
||||
从 ESP-IDF v4.3 开始支持。
|
||||
|
||||
#### v0.4
|
||||
|
||||
待更新。
|
||||
|
||||
### ESP32-S3
|
||||
|
||||
#### v0.1
|
||||
|
||||
从 ESP-IDF v4.4 开始支持。
|
||||
|
||||
#### v0.2
|
||||
|
||||
待更新。
|
||||
|
||||
### ESP32-C2 & ESP8684
|
||||
|
||||
#### v1.0
|
||||
|
||||
从 ESP-IDF v5.0 开始支持。
|
||||
|
||||
#### v1.1
|
||||
|
||||
待更新。
|
||||
|
||||
#### v1.2
|
||||
|
||||
待更新。
|
||||
|
||||
|
||||
## 如果 ESP-IDF 版本低于 `需求版本` 会出现什么情况?
|
||||
|
||||
使用最新的 ESP-IDF 版本时,软件会阻止下载二进制文件到不支持的芯片版本上,甚至可以防止二进制文件在不支持的芯片版本上被执行。v4.4.5+、v5.0.1+、v5.1 及以上版本的 ESP-IDF 都支持针对芯片版本的 esptool 下载检查和引导加载器加载检查,但 ESP-IDF v4.3.5 只支持 esptool 下载检查。
|
||||
|
||||
更早的 ESP-IDF 版本没有此类检查,若与芯片版本不兼容,芯片运行软件时可能会出现不可预测的行为。
|
||||
92
Kconfig
92
Kconfig
@@ -11,44 +11,14 @@ mainmenu "Espressif IoT Development Framework Configuration"
|
||||
default "y"
|
||||
|
||||
config IDF_ENV_FPGA
|
||||
# This option is for internal use only
|
||||
bool
|
||||
option env="IDF_ENV_FPGA"
|
||||
help
|
||||
- This option is for internal use only.
|
||||
- Enabling this option will help enable all FPGA support so as to
|
||||
run ESP-IDF on an FPGA. This can help reproduce some issues that
|
||||
only happens on FPGA condition, or when you have to burn some
|
||||
efuses multiple times.
|
||||
|
||||
config IDF_ENV_BRINGUP
|
||||
bool
|
||||
default "y" if IDF_TARGET_ESP32P4
|
||||
help
|
||||
- This option is ONLY used when doing new chip bringup.
|
||||
- This option will only enable necessary hw / sw settings for running
|
||||
a hello_world application.
|
||||
|
||||
|
||||
config IDF_CI_BUILD
|
||||
bool
|
||||
default y if "$(IDF_CI_BUILD)" = "y" || "$(IDF_CI_BUILD)" = 1
|
||||
|
||||
config IDF_DOC_BUILD
|
||||
bool
|
||||
default y if "$(IDF_DOC_BUILD)" = "y" || "$(IDF_DOC_BUILD)" = 1
|
||||
|
||||
config IDF_TOOLCHAIN
|
||||
# This option records the IDF target when sdkconfig is generated the first time.
|
||||
# It is not updated if environment variable $IDF_TOOLCHAIN changes later, and
|
||||
# the build system is responsible for detecting the mismatch between
|
||||
# CONFIG_IDF_TOOLCHAIN and $IDF_TOOLCHAIN.
|
||||
string
|
||||
default "$IDF_TOOLCHAIN"
|
||||
|
||||
config IDF_TOOLCHAIN_CLANG
|
||||
bool
|
||||
default "y" if IDF_TOOLCHAIN="clang"
|
||||
|
||||
config IDF_TARGET_ARCH_RISCV
|
||||
bool
|
||||
default "n"
|
||||
@@ -70,12 +40,6 @@ mainmenu "Espressif IoT Development Framework Configuration"
|
||||
string
|
||||
default "$IDF_TARGET"
|
||||
|
||||
config IDF_INIT_VERSION
|
||||
# This option records the IDF version when sdkconfig is generated the first time.
|
||||
# It is not updated if environment variable $IDF_VERSION changes later
|
||||
string
|
||||
default "$IDF_INIT_VERSION"
|
||||
|
||||
config IDF_TARGET_LINUX
|
||||
bool
|
||||
default "y" if IDF_TARGET="linux"
|
||||
@@ -114,11 +78,6 @@ mainmenu "Espressif IoT Development Framework Configuration"
|
||||
select FREERTOS_UNICORE
|
||||
select IDF_TARGET_ARCH_RISCV
|
||||
|
||||
config IDF_TARGET_ESP32P4
|
||||
bool
|
||||
default "y" if IDF_TARGET="esp32p4"
|
||||
select IDF_TARGET_ARCH_RISCV
|
||||
|
||||
config IDF_TARGET_ESP32H2
|
||||
bool
|
||||
default "y" if IDF_TARGET="esp32h2"
|
||||
@@ -138,7 +97,6 @@ mainmenu "Espressif IoT Development Framework Configuration"
|
||||
default 0x000C if IDF_TARGET_ESP32C2
|
||||
default 0x000D if IDF_TARGET_ESP32C6
|
||||
default 0x0010 if IDF_TARGET_ESP32H2
|
||||
default 0x0012 if IDF_TARGET_ESP32P4
|
||||
default 0xFFFF
|
||||
|
||||
|
||||
@@ -306,11 +264,11 @@ mainmenu "Espressif IoT Development Framework Configuration"
|
||||
|
||||
choice COMPILER_OPTIMIZATION
|
||||
prompt "Optimization Level"
|
||||
default COMPILER_OPTIMIZATION_DEBUG
|
||||
default COMPILER_OPTIMIZATION_DEFAULT
|
||||
help
|
||||
This option sets compiler optimization level (gcc -O argument) for the app.
|
||||
|
||||
- The "Debug" setting will add the -0g flag to CFLAGS.
|
||||
- The "Default" setting will add the -0g flag to CFLAGS.
|
||||
- The "Size" setting will add the -0s flag to CFLAGS.
|
||||
- The "Performance" setting will add the -O2 flag to CFLAGS.
|
||||
- The "None" setting will add the -O0 flag to CFLAGS.
|
||||
@@ -329,7 +287,7 @@ mainmenu "Espressif IoT Development Framework Configuration"
|
||||
Compiler optimization for the IDF bootloader is set separately,
|
||||
see the BOOTLOADER_COMPILER_OPTIMIZATION setting.
|
||||
|
||||
config COMPILER_OPTIMIZATION_DEBUG
|
||||
config COMPILER_OPTIMIZATION_DEFAULT
|
||||
bool "Debug (-Og)"
|
||||
config COMPILER_OPTIMIZATION_SIZE
|
||||
bool "Optimize for size (-Os)"
|
||||
@@ -529,47 +487,12 @@ mainmenu "Espressif IoT Development Framework Configuration"
|
||||
Enable this option if use GCC 12 or newer, and want to disable warnings which don't appear with
|
||||
GCC 11.
|
||||
|
||||
config COMPILER_DISABLE_GCC13_WARNINGS
|
||||
bool "Disable new warnings introduced in GCC 13"
|
||||
default "n"
|
||||
help
|
||||
Enable this option if use GCC 13 or newer, and want to disable warnings which don't appear with
|
||||
GCC 12.
|
||||
|
||||
config COMPILER_DUMP_RTL_FILES
|
||||
bool "Dump RTL files during compilation"
|
||||
help
|
||||
If enabled, RTL files will be produced during compilation. These files
|
||||
can be used by other tools, for example to calculate call graphs.
|
||||
|
||||
choice COMPILER_RT_LIB
|
||||
prompt "Compiler runtime library"
|
||||
default COMPILER_RT_LIB_CLANGRT if IDF_TOOLCHAIN_CLANG
|
||||
default COMPILER_RT_LIB_HOST if IDF_TARGET_LINUX
|
||||
default COMPILER_RT_LIB_GCCLIB
|
||||
help
|
||||
Select runtime library to be used by compiler.
|
||||
- GCC toolchain supports libgcc only.
|
||||
- Clang allows to choose between libgcc or libclang_rt.
|
||||
- For host builds ("linux" target), uses the default library.
|
||||
|
||||
config COMPILER_RT_LIB_GCCLIB
|
||||
depends on !IDF_TARGET_LINUX
|
||||
bool "libgcc"
|
||||
config COMPILER_RT_LIB_CLANGRT
|
||||
depends on IDF_TOOLCHAIN_CLANG && !IDF_TARGET_LINUX
|
||||
bool "libclang_rt"
|
||||
config COMPILER_RT_LIB_HOST
|
||||
depends on IDF_TARGET_LINUX
|
||||
bool "Host"
|
||||
endchoice
|
||||
|
||||
config COMPILER_RT_LIB_NAME
|
||||
string
|
||||
default "clang_rt.builtins" if COMPILER_RT_LIB_CLANGRT
|
||||
default "gcc" if COMPILER_RT_LIB_GCCLIB
|
||||
default "" if COMPILER_RT_LIB_HOST
|
||||
|
||||
endmenu # Compiler Options
|
||||
|
||||
menu "Component config"
|
||||
@@ -584,10 +507,3 @@ mainmenu "Espressif IoT Development Framework Configuration"
|
||||
|
||||
Note you should still enable a certain experimental feature option to use it, and you
|
||||
should read the corresponding risk warning and known issue list carefully.
|
||||
|
||||
Current experimental feature list:
|
||||
|
||||
- CONFIG_ESPTOOLPY_FLASHFREQ_120M && CONFIG_ESPTOOLPY_FLASH_SAMPLE_MODE_DTR
|
||||
- CONFIG_SPIRAM_SPEED_120M && CONFIG_SPIRAM_MODE_OCT
|
||||
- CONFIG_BOOTLOADER_CACHE_32BIT_ADDR_QUAD_FLASH
|
||||
- CONFIG_MBEDTLS_USE_CRYPTO_ROM_IMPL
|
||||
|
||||
21
README.md
21
README.md
@@ -6,7 +6,7 @@ ESP-IDF is the development framework for Espressif SoCs supported on Windows, Li
|
||||
|
||||
# ESP-IDF Release Support Schedule
|
||||
|
||||

|
||||

|
||||
|
||||
- Please read [the support policy](SUPPORT_POLICY.md) and [the documentation](https://docs.espressif.com/projects/esp-idf/en/latest/esp32/versions.html) for more information about ESP-IDF versions.
|
||||
- Please see the [End-of-Life Advisories](https://www.espressif.com/en/support/documents/advisories?keys=&field_type_of_advisory_tid%5B%5D=817) for information about ESP-IDF releases with discontinued support.
|
||||
@@ -15,22 +15,19 @@ ESP-IDF is the development framework for Espressif SoCs supported on Windows, Li
|
||||
|
||||
The following table shows ESP-IDF support of Espressif SoCs where ![alt text][preview] and ![alt text][supported] denote preview status and support, respectively. The preview support is usually limited in time and intended for beta versions of chips. Please use an ESP-IDF release where the desired SoC is already supported.
|
||||
|
||||
|Chip | v4.3 | v4.4 | v5.0 | v5.1 | v5.2 | |
|
||||
|:----------- | :---------------------:| :---------------------:| :---------------------:| :--------------------: | :--------------------: | :----------------------------------------------------------|
|
||||
|Chip | v4.2 | v4.3 | v4.4 | v5.0 | v5.1 | |
|
||||
|:----------- | :---------------------:| :---------------------:| :---------------------:| :---------------------:| :--------------------: | :----------------------------------------------------------|
|
||||
|ESP32 | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | |
|
||||
|ESP32-S2 | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | |
|
||||
|ESP32-C3 | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | |
|
||||
|ESP32-S3 | | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | [Announcement](https://www.espressif.com/en/news/ESP32_S3) |
|
||||
|ESP32-C2 | | | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | [Announcement](https://www.espressif.com/en/news/ESP32-C2) |
|
||||
|ESP32-C6 | | | | ![alt text][supported] | ![alt text][supported] | [Announcement](https://www.espressif.com/en/news/ESP32_C6) |
|
||||
|ESP32-H2 | | | | ![alt text][supported] | ![alt text][supported] | [Announcement](https://www.espressif.com/en/news/ESP32_H2) |
|
||||
|ESP32-P4 | | | | | ![alt text][preview] | [Announcement](https://www.espressif.com/en/news/ESP32-P4) |
|
||||
|ESP32-C3 | | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | |
|
||||
|ESP32-S3 | | | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | [Announcement](https://www.espressif.com/en/news/ESP32_S3) |
|
||||
|ESP32-C2 | | | | ![alt text][supported] | ![alt text][supported] | [Announcement](https://www.espressif.com/en/news/ESP32-C2) |
|
||||
|ESP32-C6 | | | | | ![alt text][supported] | [Announcement](https://www.espressif.com/en/news/ESP32_C6) |
|
||||
|ESP32-H2 | | | | | ![alt text][supported] | [Announcement](https://www.espressif.com/en/news/ESP32_H2) |
|
||||
|
||||
[supported]: https://img.shields.io/badge/-supported-green "supported"
|
||||
[preview]: https://img.shields.io/badge/-preview-orange "preview"
|
||||
|
||||
There are variants of revisions for a series of chips. See [Compatibility Between ESP-IDF Releases and Revisions of Espressif SoCs](https://github.com/espressif/esp-idf/blob/master/COMPATIBILITY.md) for the details of the compatibility between ESP-IDF and chip revisions.
|
||||
|
||||
Espressif SoCs released before 2016 (ESP8266 and ESP8285) are supported by [RTOS SDK](https://github.com/espressif/ESP8266_RTOS_SDK) instead.
|
||||
|
||||
# Developing With ESP-IDF
|
||||
@@ -121,8 +118,6 @@ This can be combined with other targets, ie `idf.py -p PORT erase-flash flash` w
|
||||
|
||||
* Documentation for the latest version: https://docs.espressif.com/projects/esp-idf/. This documentation is built from the [docs directory](docs) of this repository.
|
||||
|
||||
* [Beginner's Guide to Key Concepts and Resources of ESP-IDF](https://youtu.be/J8zc8mMNKtc?feature=shared)
|
||||
|
||||
* The [esp32.com forum](https://esp32.com/) is a place to ask questions and find community resources.
|
||||
|
||||
* [Check the Issues section on github](https://github.com/espressif/esp-idf/issues) if you find a bug or have a feature request. Please check existing Issues before opening a new one.
|
||||
|
||||
25
README_CN.md
25
README_CN.md
@@ -6,7 +6,7 @@ ESP-IDF 是乐鑫官方推出的物联网开发框架,支持 Windows、Linux
|
||||
|
||||
# ESP-IDF 版本支持期限
|
||||
|
||||

|
||||

|
||||
|
||||
- 请参考 [ESP-IDF 支持政策](SUPPORT_POLICY_CN.md) 以及 [相关文档](https://docs.espressif.com/projects/esp-idf/zh_CN/latest/esp32/versions.html) 了解更多关于 ESP-IDF 版本的信息。
|
||||
- 请参考 [ESP-IDF 版本停止维护 (EOL) 公告](https://www.espressif.com/zh-hans/support/documents/advisories?keys=&field_type_of_advisory_tid%5B%5D=817)。
|
||||
@@ -15,22 +15,19 @@ ESP-IDF 是乐鑫官方推出的物联网开发框架,支持 Windows、Linux
|
||||
|
||||
下表总结了乐鑫芯片在 ESP-IDF 各版本中的支持状态,其中 ![alt text][supported] 代表已支持,![alt text][preview] 代表目前处于预览支持状态。预览支持状态通常有时间限制,而且仅适用于测试版芯片。请确保使用与芯片相匹配的 ESP-IDF 版本。
|
||||
|
||||
|芯片 | v4.3 | v4.4 | v5.0 | v5.1 | v5.2 | |
|
||||
|:----------- | :---------------------:| :---------------------:| :---------------------:| :--------------------: | :--------------------: | :-------------------------------------------------------------- |
|
||||
|ESP32 | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | |
|
||||
|ESP32-S2 | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | |
|
||||
|ESP32-C3 | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | |
|
||||
|ESP32-S3 | | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | [芯片发布公告](https://www.espressif.com/zh-hans/news/ESP32_S3) |
|
||||
|ESP32-C2 | | | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | [芯片发布公告](https://www.espressif.com/zh-hans/news/ESP32-C2) |
|
||||
|ESP32-C6 | | | | ![alt text][supported] | ![alt text][supported] | [芯片发布公告](https://www.espressif.com/zh-hans/news/ESP32_C6) |
|
||||
|ESP32-H2 | | | | ![alt text][supported] | ![alt text][supported] | [芯片发布公告](https://www.espressif.com/zh-hans/news/ESP32_H2) |
|
||||
|ESP32-P4 | | | | | ![alt text][preview] | [芯片发布公告](https://www.espressif.com/en/news/ESP32-P4) |
|
||||
|芯片 | v4.2 | v4.3 | v4.4 | v5.0 | v5.1 | |
|
||||
|:----------- |:---------------------:| :---------------------:| :---------------------:| :---------------------:| :--------------------: | :-------------------------------------------------------------- |
|
||||
|ESP32 |![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | |
|
||||
|ESP32-S2 |![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | |
|
||||
|ESP32-C3 | | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | |
|
||||
|ESP32-S3 | | | ![alt text][supported] | ![alt text][supported] | ![alt text][supported] | [芯片发布公告](https://www.espressif.com/zh-hans/news/ESP32_S3) |
|
||||
|ESP32-C2 | | | | ![alt text][supported] | ![alt text][supported] | [芯片发布公告](https://www.espressif.com/zh-hans/news/ESP32-C2) |
|
||||
|ESP32-C6 | | | | | ![alt text][supported] | [芯片发布公告](https://www.espressif.com/zh-hans/news/ESP32_C6) |
|
||||
|ESP32-H2 | | | | | ![alt text][supported] | [芯片发布公告](https://www.espressif.com/zh-hans/news/ESP32_H2) |
|
||||
|
||||
[supported]: https://img.shields.io/badge/-%E6%94%AF%E6%8C%81-green "supported"
|
||||
[preview]: https://img.shields.io/badge/-%E9%A2%84%E8%A7%88-orange "preview"
|
||||
|
||||
每款乐鑫芯片都可能有不同版本。建议参考 [ESP-IDF 版本与乐鑫芯片版本兼容性](https://github.com/espressif/esp-idf/blob/master/COMPATIBILITY_CN.md),了解 ESP-IDF 版本与各芯片版本之间的兼容性。
|
||||
|
||||
对于 2016 年之前发布的乐鑫芯片(包括 ESP8266 和 ESP8285),请参考 [RTOS SDK](https://github.com/espressif/ESP8266_RTOS_SDK)。
|
||||
|
||||
# 使用 ESP-IDF 进行开发
|
||||
@@ -121,8 +118,6 @@ ESP-IDF 中的子模块采用相对路径([详见 .gitmodules 文件](.gitmodu
|
||||
|
||||
* 最新版的文档:https://docs.espressif.com/projects/esp-idf/ ,该文档是由本仓库 [docs 目录](docs) 构建得到。
|
||||
|
||||
* [初学者指南:主要概念和资源](https://www.bilibili.com/video/BV1114y1r7du/)
|
||||
|
||||
* 可以前往 [esp32.com 论坛](https://esp32.com/) 提问,挖掘社区资源。
|
||||
|
||||
* 如果你在使用中发现了错误或者需要新的功能,请先[查看 GitHub Issues](https://github.com/espressif/esp-idf/issues),确保该问题没有重复提交。
|
||||
|
||||
@@ -6,4 +6,4 @@ Please refer to https://docs.espressif.com/projects/esp-idf/en/latest/esp32/vers
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
Please refer to [Espressif Security Incident Response Process](https://www.espressif.com/sites/default/files/Espressif%20Security%20Incident%20Response%20Process%20v1.0_EN.pdf) on the guidelines to report a security vulnerability. Please do **NOT** create a public GitHub issue.
|
||||
If you think you have found a security vulnerability in Espressif solutions (including ESP-IDF), then please send an email to our Bug Bounty team at bugbounty@espressif.com. Please do **NOT** create a public GitHub issue.
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
|
||||
# Core Components
|
||||
|
||||
## Overview
|
||||
@@ -8,9 +9,16 @@ This document contains details about what the core components are, what they con
|
||||
|
||||
The core components are organized into two groups.
|
||||
|
||||
The first group (referred to as `G0`) includes `hal`, `arch` (where `arch` is either `riscv` or `xtensa` depending on the chip), `esp_rom`, `esp_common`, and `soc`. This group contains information about and provides low-level access to the underlying hardware. In the case of `esp_common`, it contains hardware-agnostic code and utilities. These components may have dependencies on each other within the group, but outside dependencies should be minimized. The reason for this approach is that these components are fundamental, and many other components may require them. Ideally, the dependency relationship only goes one way, making it easier for this group to be usable in other projects.
|
||||
The first group (referred to as `G0` from now on) contains `hal`, `xtensa` and `riscv` (referred to as `arch` components from now on), `esp_rom`, `esp_common`, and `soc`. This
|
||||
group contain information about and low-level access to underlying hardware; or in the case of `esp_common`, hardware-agnostic code and utilities.
|
||||
These components can depend on each other, but as much as possible have no dependencies outside the group. The reason for this is that, due to the
|
||||
nature of what these components contain, the likelihood is high that a lot of other components will require these. Ideally, then, the dependency
|
||||
relationship only goes one way. This makes it easier for these components, as a group, to be usable in another project. One can conceivably implement
|
||||
a competing SDK to ESP-IDF on top of these components.
|
||||
|
||||
The second group (referred to as `G1`) operates at a higher level than the first group. `G1` includes the components `esp_hw_support`, `esp_system`, `newlib`, `spi_flash`, `freertos`, `log`, and `heap`. Like the first group, circular dependencies within this group are allowed, and these components can have dependencies on the first group. G1 components represent essential software mechanisms for building other components.
|
||||
The second group (referred to as `G1` from now on) sits at a higher level than the first group. This group contains the components `esp_hw_support`, `esp_system`, `newlib`, `spi_flash`,
|
||||
`freertos`, `log`, and `heap`. Like the first group, circular dependencies within the group are allowed; and being at a higher level, dependency on the first group
|
||||
is allowed. These components represent software mechanisms essential to building other components.
|
||||
|
||||
## Descriptions
|
||||
|
||||
@@ -25,7 +33,7 @@ into routines that achieve a meaningful action or state of the peripheral.
|
||||
|
||||
Example:
|
||||
|
||||
- `spi_flash_ll_set_address` is a low-level function part of the hardware abstraction `spi_flash_hal_read_block`
|
||||
- `spi_flash_ll_set_address` is a low-level function part of the hardware abstraction `spi_flash_hal_read_block`
|
||||
|
||||
#### `arch`
|
||||
|
||||
@@ -34,9 +42,9 @@ This can also contain files provided by the architecture vendor.
|
||||
|
||||
Example:
|
||||
|
||||
- `xt_set_exception_handler`
|
||||
- `rv_utils_intr_enable`
|
||||
- `ERI_PERFMON_MAX`
|
||||
- `xt_set_exception_handler`
|
||||
- `rv_utils_intr_enable`
|
||||
- `ERI_PERFMON_MAX`
|
||||
|
||||
#### `esp_common`
|
||||
|
||||
@@ -44,9 +52,9 @@ Contains hardware-agnostic definitions, constants, macros, utilities, 'pure' and
|
||||
|
||||
Example:
|
||||
|
||||
- `BIT(nr)` and other bit manipulation utilities in the future
|
||||
- `IDF_DEPRECATED(REASON)`
|
||||
- `ESP_IDF_VERSION_MAJOR`
|
||||
- `BIT(nr)` and other bit manipulation utilities in the future
|
||||
- `IDF_DEPRECATED(REASON)`
|
||||
- `ESP_IDF_VERSION_MAJOR`
|
||||
|
||||
#### `soc`
|
||||
|
||||
@@ -54,9 +62,9 @@ Contains description of the underlying hardware: register structure, addresses,
|
||||
|
||||
Example:
|
||||
|
||||
- `DR_REG_DPORT_BASE`
|
||||
- `SOC_MCPWM_SUPPORTED`
|
||||
- `uart_dev_s`
|
||||
- `DR_REG_DPORT_BASE`
|
||||
- `SOC_MCPWM_SUPPORTED`
|
||||
- `uart_dev_s`
|
||||
|
||||
#### `esp_rom`
|
||||
|
||||
@@ -64,8 +72,8 @@ Contains headers, linker scripts, abstraction layer, patches, and other related
|
||||
|
||||
Example:
|
||||
|
||||
- `esp32.rom.eco3.ld`
|
||||
- `rom/aes.h`
|
||||
- `esp32.rom.eco3.ld`
|
||||
- `rom/aes.h`
|
||||
|
||||
### `G1` Components
|
||||
|
||||
@@ -91,20 +99,8 @@ Some functions n the standard library are implemented here, especially those nee
|
||||
|
||||
Example:
|
||||
|
||||
- `malloc` is implemented in terms of the component `heap`'s functions
|
||||
- `gettimeofday` is implemented in terms of system time in `esp_system`
|
||||
|
||||
#### `esp_mm`
|
||||
|
||||
Memory management. Currently, this encompasses:
|
||||
|
||||
- Memory mapping for MMU supported memories
|
||||
- Memory synchronisation via Cache
|
||||
- Utils such as APIs to convert between virtual address and physical address
|
||||
|
||||
#### `esp_psram`
|
||||
|
||||
Contains implementation of PSRAM services
|
||||
- `malloc` is implemented in terms of the component `heap`'s functions
|
||||
- `gettimeofday` is implemented in terms of system time in `esp_system`
|
||||
|
||||
#### `esp_system`
|
||||
|
||||
@@ -112,10 +108,10 @@ Contains implementation of system services and controls system behavior. The imp
|
||||
here may take hardware resources and/or decide on a hardware state needed for support of a system service/feature/mechanism.
|
||||
Currently, this encompasses the following, but not limited to:
|
||||
|
||||
- Startup and initialization
|
||||
- Panic and debug
|
||||
- Reset and reset reason
|
||||
- Task and interrupt watchdogs
|
||||
- Startup and initialization
|
||||
- Panic and debug
|
||||
- Reset and reset reason
|
||||
- Task and interrupt watchdogs
|
||||
|
||||
#### `esp_hw_support`
|
||||
|
||||
@@ -123,19 +119,19 @@ Contains implementations that provide hardware operations, arbitration, or resou
|
||||
is used in the system. Unlike `esp_system`, implementations here do not decide on a hardware state or takes hardware resource, acting
|
||||
merely as facilitator to hardware access. Currently, this encompasses the following, but not limited to:
|
||||
|
||||
- Interrupt allocation
|
||||
- Sleep functions
|
||||
- Memory functions (external SPIRAM, async memory, etc.)
|
||||
- Clock and clock control
|
||||
- Random generation
|
||||
- CPU utilities
|
||||
- MAC settings
|
||||
- Interrupt allocation
|
||||
- Sleep functions
|
||||
- Memory functions (external SPIRAM, async memory, etc.)
|
||||
- Clock and clock control
|
||||
- Random generation
|
||||
- CPU utilities
|
||||
- MAC settings
|
||||
|
||||
### `esp_hw_support` vs `esp_system`
|
||||
|
||||
This section details list some implementations and the reason for placing it in either `esp_hw_support` or `esp_system`.
|
||||
|
||||
#### `task_wdt.c` (`esp_system`) vs `intr_alloc.c` (`esp_hw_support`)
|
||||
#### `task_wdt.c` (`esp_system`) vs `intr_alloc.c` (`esp_hw_support`)
|
||||
|
||||
The task watchdog fits the definition of taking and configuring hardware resources (wdt, interrupt) for implementation of a system service/mechanism.
|
||||
|
||||
|
||||
@@ -1,9 +1,3 @@
|
||||
idf_build_get_property(target IDF_TARGET)
|
||||
|
||||
if(${target} STREQUAL "linux")
|
||||
return() # This component is not supported by the POSIX/Linux simulator
|
||||
endif()
|
||||
|
||||
set(srcs
|
||||
"app_trace.c"
|
||||
"app_trace_util.c"
|
||||
@@ -72,12 +66,6 @@ idf_component_register(SRCS "${srcs}"
|
||||
idf_component_get_property(app_trace app_trace COMPONENT_LIB)
|
||||
|
||||
if(CONFIG_APPTRACE_GCOV_ENABLE)
|
||||
if(CMAKE_C_COMPILER_ID MATCHES "Clang")
|
||||
# Coverage info is not supported when clang is used
|
||||
# TODO: LLVM-214
|
||||
message(FATAL_ERROR "Coverage info is not supported when building with Clang!")
|
||||
endif()
|
||||
|
||||
# The original Gcov library from toolchain will be objcopy with symbols redefinitions (see file gcov/io_sym.map).
|
||||
# This needs because ESP has no file-system onboard, and redefined functions solves this problem and transmits
|
||||
# output file to host PC.
|
||||
|
||||
@@ -389,11 +389,4 @@ menu "Application Level Tracing"
|
||||
help
|
||||
Enables support for GCOV data transfer to host.
|
||||
|
||||
config APPTRACE_GCOV_DUMP_TASK_STACK_SIZE
|
||||
int "Gcov dump task stack size"
|
||||
depends on APPTRACE_GCOV_ENABLE
|
||||
default 2048
|
||||
help
|
||||
Configures stack size of Gcov dump task
|
||||
|
||||
endmenu
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* SPDX-FileCopyrightText: 2017-2023 Espressif Systems (Shanghai) CO LTD
|
||||
* SPDX-FileCopyrightText: 2017-2021 Espressif Systems (Shanghai) CO LTD
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
@@ -76,8 +76,7 @@ gcov_exit:
|
||||
void gcov_create_task(void *arg)
|
||||
{
|
||||
ESP_EARLY_LOGV(TAG, "%s", __FUNCTION__);
|
||||
xTaskCreatePinnedToCore(&gcov_dump_task, "gcov_dump_task", CONFIG_APPTRACE_GCOV_DUMP_TASK_STACK_SIZE,
|
||||
(void *)&s_gcov_task_running, configMAX_PRIORITIES - 1, NULL, 0);
|
||||
xTaskCreatePinnedToCore(&gcov_dump_task, "gcov_dump_task", 2048, (void *)&s_gcov_task_running, configMAX_PRIORITIES - 1, NULL, 0);
|
||||
}
|
||||
|
||||
void gcov_create_task_tick_hook(void)
|
||||
@@ -173,13 +172,6 @@ long gcov_rtio_ftell(void *stream)
|
||||
return ret;
|
||||
}
|
||||
|
||||
int gcov_rtio_feof(void *stream)
|
||||
{
|
||||
int ret = esp_apptrace_feof(ESP_APPTRACE_DEST_TRAX, stream);
|
||||
ESP_EARLY_LOGV(TAG, "%s(%p) = %d", __FUNCTION__, stream, ret);
|
||||
return ret;
|
||||
}
|
||||
|
||||
void gcov_rtio_setbuf(void *arg1 __attribute__ ((unused)), void *arg2 __attribute__ ((unused)))
|
||||
{
|
||||
return;
|
||||
|
||||
@@ -5,4 +5,3 @@ fread gcov_rtio_fread
|
||||
fseek gcov_rtio_fseek
|
||||
ftell gcov_rtio_ftell
|
||||
setbuf gcov_rtio_setbuf
|
||||
feof gcov_rtio_feof
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* SPDX-FileCopyrightText: 2017-2023 Espressif Systems (Shanghai) CO LTD
|
||||
* SPDX-FileCopyrightText: 2017-2021 Espressif Systems (Shanghai) CO LTD
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
@@ -29,7 +29,6 @@ const static char *TAG = "esp_host_file_io";
|
||||
#define ESP_APPTRACE_FILE_CMD_FSEEK 0x4
|
||||
#define ESP_APPTRACE_FILE_CMD_FTELL 0x5
|
||||
#define ESP_APPTRACE_FILE_CMD_STOP 0x6 // indicates that there is no files to transfer
|
||||
#define ESP_APPTRACE_FILE_CMD_FEOF 0x7
|
||||
|
||||
/** File operation header */
|
||||
typedef struct {
|
||||
@@ -69,11 +68,6 @@ typedef struct {
|
||||
void * file;
|
||||
} esp_apptrace_fseek_args_t;
|
||||
|
||||
/** Helper structure for feof */
|
||||
typedef struct {
|
||||
void *file;
|
||||
} esp_apptrace_feof_args_t;
|
||||
|
||||
/** Helper structure for ftell */
|
||||
typedef struct {
|
||||
void *file;
|
||||
@@ -234,11 +228,8 @@ size_t esp_apptrace_fwrite(esp_apptrace_dest_t dest, const void *ptr, size_t siz
|
||||
ESP_EARLY_LOGE(TAG, "Failed to read response (%d)!", ret);
|
||||
return 0;
|
||||
}
|
||||
/* OpenOCD writes it like that:
|
||||
* fwrite(buf, size, 1, file);
|
||||
* So, if 1 was returned that means fwrite succeed
|
||||
*/
|
||||
return resp == 1 ? nmemb : 0;
|
||||
|
||||
return resp/size; // return the number of items written
|
||||
}
|
||||
|
||||
static void esp_apptrace_fread_args_prepare(uint8_t *buf, void *priv)
|
||||
@@ -284,10 +275,6 @@ size_t esp_apptrace_fread(esp_apptrace_dest_t dest, void *ptr, size_t size, size
|
||||
ESP_EARLY_LOGE(TAG, "Failed to read file data (%d)!", ret);
|
||||
return 0;
|
||||
}
|
||||
/* OpenOCD reads it like that:
|
||||
* fread(buf, 1 ,size, file);
|
||||
* So, total read bytes count returns
|
||||
*/
|
||||
return resp/size; // return the number of items read
|
||||
}
|
||||
|
||||
@@ -367,34 +354,4 @@ int esp_apptrace_fstop(esp_apptrace_dest_t dest)
|
||||
return ret;
|
||||
}
|
||||
|
||||
static void esp_apptrace_feof_args_prepare(uint8_t *buf, void *priv)
|
||||
{
|
||||
esp_apptrace_feof_args_t *args = priv;
|
||||
|
||||
memcpy(buf, &args->file, sizeof(args->file));
|
||||
}
|
||||
|
||||
int esp_apptrace_feof(esp_apptrace_dest_t dest, void *stream)
|
||||
{
|
||||
esp_apptrace_feof_args_t cmd_args;
|
||||
|
||||
cmd_args.file = stream;
|
||||
esp_err_t ret = esp_apptrace_file_cmd_send(dest, ESP_APPTRACE_FILE_CMD_FEOF, esp_apptrace_feof_args_prepare,
|
||||
&cmd_args, sizeof(cmd_args));
|
||||
if (ret != ESP_OK) {
|
||||
ESP_EARLY_LOGE(TAG, "Failed to send file cmd (%d)!", ret);
|
||||
return EOF;
|
||||
}
|
||||
|
||||
// now read the answer
|
||||
int resp;
|
||||
ret = esp_apptrace_file_rsp_recv(dest, (uint8_t *)&resp, sizeof(resp));
|
||||
if (ret != ESP_OK) {
|
||||
ESP_EARLY_LOGE(TAG, "Failed to read response (%d)!", ret);
|
||||
return EOF;
|
||||
}
|
||||
|
||||
return resp;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* SPDX-FileCopyrightText: 2017-2023 Espressif Systems (Shanghai) CO LTD
|
||||
* SPDX-FileCopyrightText: 2017-2021 Espressif Systems (Shanghai) CO LTD
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
@@ -255,17 +255,6 @@ int esp_apptrace_ftell(esp_apptrace_dest_t dest, void *stream);
|
||||
*/
|
||||
int esp_apptrace_fstop(esp_apptrace_dest_t dest);
|
||||
|
||||
/**
|
||||
* @brief Test end-of-file indicator on a stream.
|
||||
* This function has the same semantic as 'feof' except for the first argument.
|
||||
*
|
||||
* @param dest Indicates HW interface to use.
|
||||
* @param stream File handle returned by esp_apptrace_fopen.
|
||||
*
|
||||
* @return Non-Zero if end-of-file indicator is set for stream. See feof for details.
|
||||
*/
|
||||
int esp_apptrace_feof(esp_apptrace_dest_t dest, void *stream);
|
||||
|
||||
/**
|
||||
* @brief Triggers gcov info dump.
|
||||
* This function waits for the host to connect to target before dumping data.
|
||||
|
||||
@@ -137,7 +137,7 @@ gptimer_handle_t s_sv_gptimer;
|
||||
#endif
|
||||
|
||||
#elif CONFIG_FREERTOS_SYSTICK_USES_SYSTIMER
|
||||
#define SYSTICK_INTR_ID (ETS_SYSTIMER_TARGET0_INTR_SOURCE)
|
||||
#define SYSTICK_INTR_ID (ETS_SYSTIMER_TARGET0_EDGE_INTR_SOURCE)
|
||||
#endif // CONFIG_FREERTOS_TICK_SUPPORT_CORETIMER
|
||||
|
||||
// SystemView is single core specific: it implies that SEGGER_SYSVIEW_LOCK()
|
||||
|
||||
@@ -72,13 +72,13 @@ void SEGGER_RTT_ESP_FlushNoLock(unsigned long min_sz, unsigned long tmo)
|
||||
if (s_events_buf_filled > 0) {
|
||||
res = esp_apptrace_write(ESP_APPTRACE_DEST_SYSVIEW, s_events_buf, s_events_buf_filled, tmo);
|
||||
if (res != ESP_OK) {
|
||||
ESP_LOGE(TAG, "Failed to flush buffered events (%d)!", res);
|
||||
ESP_LOGE(TAG, "Failed to flush buffered events (%d)!\n", res);
|
||||
}
|
||||
}
|
||||
// flush even if we failed to write buffered events, because no new events will be sent after STOP
|
||||
res = esp_apptrace_flush_nolock(ESP_APPTRACE_DEST_SYSVIEW, min_sz, tmo);
|
||||
if (res != ESP_OK) {
|
||||
ESP_LOGE(TAG, "Failed to flush apptrace data (%d)!", res);
|
||||
ESP_LOGE(TAG, "Failed to flush apptrace data (%d)!\n", res);
|
||||
}
|
||||
s_events_buf_filled = 0;
|
||||
}
|
||||
@@ -167,8 +167,8 @@ unsigned SEGGER_RTT_WriteSkipNoLock(unsigned BufferIndex, const void* pBuffer, u
|
||||
(event_id == SYSVIEW_EVTID_TASK_STOP_EXEC) ||
|
||||
(event_id == SYSVIEW_EVTID_TASK_START_READY) ||
|
||||
(event_id == SYSVIEW_EVTID_TASK_STOP_READY) ||
|
||||
(event_id == SYSVIEW_EVTID_MARK_START) ||
|
||||
(event_id == SYSVIEW_EVTID_MARK_STOP) ||
|
||||
(event_id == SYSVIEW_EVTID_USER_START) ||
|
||||
(event_id == SYSVIEW_EVTID_USER_STOP) ||
|
||||
(event_id == SYSVIEW_EVTID_TIMER_ENTER) ||
|
||||
(event_id == SYSVIEW_EVTID_TIMER_EXIT) ||
|
||||
(event_id == SYSVIEW_EVTID_STACK_INFO) ||
|
||||
|
||||
@@ -18,7 +18,7 @@ int esp_sysview_vprintf(const char * format, va_list args)
|
||||
portENTER_CRITICAL(&s_log_mutex);
|
||||
size_t len = vsnprintf(log_buffer, sizeof(log_buffer), format, args);
|
||||
if (len > sizeof(log_buffer) - 1) {
|
||||
log_buffer[sizeof(log_buffer) - 1] = 0;
|
||||
log_buffer[sizeof(log_buffer - 1)] = 0;
|
||||
}
|
||||
SEGGER_SYSVIEW_Print(log_buffer);
|
||||
portEXIT_CRITICAL(&s_log_mutex);
|
||||
|
||||
4
components/app_trace/test/CMakeLists.txt
Normal file
4
components/app_trace/test/CMakeLists.txt
Normal file
@@ -0,0 +1,4 @@
|
||||
idf_component_register(SRC_DIRS "."
|
||||
PRIV_INCLUDE_DIRS "."
|
||||
PRIV_REQUIRES cmock driver)
|
||||
target_compile_options(${COMPONENT_LIB} PRIVATE "-Wno-format")
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* SPDX-FileCopyrightText: 2021-2023 Espressif Systems (Shanghai) CO LTD
|
||||
* SPDX-FileCopyrightText: 2021-2022 Espressif Systems (Shanghai) CO LTD
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
@@ -16,7 +16,7 @@
|
||||
#include "freertos/FreeRTOS.h"
|
||||
#include "freertos/semphr.h"
|
||||
#include "freertos/task.h"
|
||||
|
||||
#if CONFIG_APPTRACE_ENABLE == 1
|
||||
#include "esp_app_trace.h"
|
||||
#include "esp_app_trace_util.h"
|
||||
|
||||
@@ -423,6 +423,7 @@ TEST_CASE("App trace test (1 task + 1 crashed timer ISR @ 1 core)", "[trace][ign
|
||||
esp_apptrace_test(&test_cfg);
|
||||
}
|
||||
|
||||
|
||||
TEST_CASE("App trace test (1 crashed task)", "[trace][ignore]")
|
||||
{
|
||||
esp_apptrace_test_task_arg_t s_test_tasks[1];
|
||||
@@ -716,7 +717,6 @@ typedef struct {
|
||||
static bool esp_sysview_test_timer_isr(gptimer_handle_t timer, const gptimer_alarm_event_data_t *edata, void *user_ctx)
|
||||
{
|
||||
esp_sysviewtrace_timer_arg_t *tim_arg = (esp_sysviewtrace_timer_arg_t *)user_ctx;
|
||||
(void) tim_arg;
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -724,7 +724,7 @@ static void esp_sysviewtrace_test_task(void *p)
|
||||
{
|
||||
esp_sysviewtrace_task_arg_t *arg = (esp_sysviewtrace_task_arg_t *) p;
|
||||
volatile uint32_t tmp = 0;
|
||||
printf("%p: run sysview task\n", xTaskGetCurrentTaskHandle());
|
||||
printf("%x: run sysview task\n", (uint32_t)xTaskGetCurrentTaskHandle());
|
||||
|
||||
if (arg->timer) {
|
||||
gptimer_alarm_config_t alarm_config = {
|
||||
@@ -744,7 +744,7 @@ static void esp_sysviewtrace_test_task(void *p)
|
||||
int i = 0;
|
||||
while (1) {
|
||||
static uint32_t count;
|
||||
printf("%" PRIu32, arg->id);
|
||||
printf("%d", arg->id);
|
||||
if ((++count % 80) == 0) {
|
||||
printf("\n");
|
||||
}
|
||||
@@ -890,22 +890,22 @@ TEST_CASE("SysView trace test 2", "[trace][ignore]")
|
||||
TEST_ESP_OK(gptimer_new_timer(&timer_config, &tim_arg2.gptimer));
|
||||
|
||||
xTaskCreatePinnedToCore(esp_sysviewtrace_test_task, "svtrace0", 2048, &arg1, 3, &thnd, 0);
|
||||
printf("Created task %p\n", thnd);
|
||||
printf("Created task %x\n", (uint32_t)thnd);
|
||||
#if CONFIG_FREERTOS_UNICORE == 0
|
||||
xTaskCreatePinnedToCore(esp_sysviewtrace_test_task, "svtrace1", 2048, &arg2, 4, &thnd, 1);
|
||||
#else
|
||||
xTaskCreatePinnedToCore(esp_sysviewtrace_test_task, "svtrace1", 2048, &arg2, 4, &thnd, 0);
|
||||
#endif
|
||||
printf("Created task %p\n", thnd);
|
||||
printf("Created task %x\n", (uint32_t)thnd);
|
||||
|
||||
xTaskCreatePinnedToCore(esp_sysviewtrace_test_task, "svsync0", 2048, &arg3, 3, &thnd, 0);
|
||||
printf("Created task %p\n", thnd);
|
||||
printf("Created task %x\n", (uint32_t)thnd);
|
||||
#if CONFIG_FREERTOS_UNICORE == 0
|
||||
xTaskCreatePinnedToCore(esp_sysviewtrace_test_task, "svsync1", 2048, &arg4, 5, &thnd, 1);
|
||||
#else
|
||||
xTaskCreatePinnedToCore(esp_sysviewtrace_test_task, "svsync1", 2048, &arg4, 5, &thnd, 0);
|
||||
#endif
|
||||
printf("Created task %p\n", thnd);
|
||||
printf("Created task %x\n", (uint32_t)thnd);
|
||||
|
||||
xSemaphoreTake(arg1.done, portMAX_DELAY);
|
||||
vSemaphoreDelete(arg1.done);
|
||||
@@ -924,3 +924,4 @@ TEST_CASE("SysView trace test 2", "[trace][ignore]")
|
||||
TEST_ESP_OK(gptimer_del_timer(tim_arg2.gptimer));
|
||||
}
|
||||
#endif // #if CONFIG_APPTRACE_SV_ENABLE == 0
|
||||
#endif // #if CONFIG_APPTRACE_ENABLE == 1
|
||||
@@ -1,9 +0,0 @@
|
||||
# Documentation: .gitlab/ci/README.md#manifest-file-to-control-the-buildtest-apps
|
||||
|
||||
components/app_trace/test_apps:
|
||||
depends_components:
|
||||
- app_trace
|
||||
- esp_timer
|
||||
- soc
|
||||
- driver
|
||||
- esp_hw_support
|
||||
@@ -1,9 +0,0 @@
|
||||
cmake_minimum_required(VERSION 3.16)
|
||||
|
||||
include($ENV{IDF_PATH}/tools/cmake/project.cmake)
|
||||
set(COMPONENTS main)
|
||||
list(PREPEND SDKCONFIG_DEFAULTS
|
||||
"$ENV{IDF_PATH}/tools/test_apps/configs/sdkconfig.debug_helpers"
|
||||
"sdkconfig.defaults")
|
||||
|
||||
project(app_trace_test)
|
||||
@@ -1,16 +0,0 @@
|
||||
| Supported Targets | ESP32 | ESP32-C2 | ESP32-C3 | ESP32-C6 | ESP32-H2 | ESP32-P4 | ESP32-S2 | ESP32-S3 |
|
||||
| ----------------- | ----- | -------- | -------- | -------- | -------- | -------- | -------- | -------- |
|
||||
|
||||
# app_trace test
|
||||
|
||||
To build and run this test app for app_trace related tests:
|
||||
```bash
|
||||
IDF_TARGET=esp32 idf.py @app_trace build flash monitor
|
||||
```
|
||||
|
||||
To build and run this test app for SystemView related tests:
|
||||
```bash
|
||||
IDF_TARGET=esp32 idf.py @sysview build flash monitor
|
||||
```
|
||||
|
||||
`@app_trace` and `@sysview` arguments apply additional `idf.py` options, from [app_trace](app_trace) and [sysview](sysview) files.
|
||||
@@ -1 +0,0 @@
|
||||
-DSDKCONFIG_DEFAULTS="sdkconfig.defaults;sdkconfig.ci.app_trace" -B build/app_trace -DSDKCONFIG=build/app_trace/sdkconfig
|
||||
@@ -1,4 +0,0 @@
|
||||
idf_component_register(SRCS "test_app_trace_main.c" "test_trace.c"
|
||||
INCLUDE_DIRS "."
|
||||
PRIV_REQUIRES app_trace unity driver
|
||||
WHOLE_ARCHIVE)
|
||||
@@ -1,48 +0,0 @@
|
||||
/*
|
||||
* SPDX-FileCopyrightText: 2023 Espressif Systems (Shanghai) CO LTD
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
#include "unity.h"
|
||||
#include "unity_test_runner.h"
|
||||
#include "esp_heap_caps.h"
|
||||
|
||||
#define TEST_MEMORY_LEAK_THRESHOLD_DEFAULT 0
|
||||
static int leak_threshold = TEST_MEMORY_LEAK_THRESHOLD_DEFAULT;
|
||||
void set_leak_threshold(int threshold)
|
||||
{
|
||||
leak_threshold = threshold;
|
||||
}
|
||||
|
||||
static size_t before_free_8bit;
|
||||
static size_t before_free_32bit;
|
||||
|
||||
static void check_leak(size_t before_free, size_t after_free, const char *type)
|
||||
{
|
||||
ssize_t delta = after_free - before_free;
|
||||
printf("MALLOC_CAP_%s: Before %u bytes free, After %u bytes free (delta %d)\n", type, before_free, after_free, delta);
|
||||
TEST_ASSERT_MESSAGE(delta >= leak_threshold, "memory leak");
|
||||
}
|
||||
|
||||
void setUp(void)
|
||||
{
|
||||
before_free_8bit = heap_caps_get_free_size(MALLOC_CAP_8BIT);
|
||||
before_free_32bit = heap_caps_get_free_size(MALLOC_CAP_32BIT);
|
||||
}
|
||||
|
||||
void tearDown(void)
|
||||
{
|
||||
size_t after_free_8bit = heap_caps_get_free_size(MALLOC_CAP_8BIT);
|
||||
size_t after_free_32bit = heap_caps_get_free_size(MALLOC_CAP_32BIT);
|
||||
check_leak(before_free_8bit, after_free_8bit, "8BIT");
|
||||
check_leak(before_free_32bit, after_free_32bit, "32BIT");
|
||||
|
||||
leak_threshold = TEST_MEMORY_LEAK_THRESHOLD_DEFAULT;
|
||||
}
|
||||
|
||||
void app_main(void)
|
||||
{
|
||||
printf("Running app_trace component tests\n");
|
||||
unity_run_menu();
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
# app_trace is already enabled by sdkconfig.defaults, so no options are needed here
|
||||
@@ -1 +0,0 @@
|
||||
CONFIG_APPTRACE_SV_ENABLE=y
|
||||
@@ -1,2 +0,0 @@
|
||||
CONFIG_ESP_TASK_WDT_CHECK_IDLE_TASK_CPU0=n
|
||||
CONFIG_APPTRACE_DEST_JTAG=y
|
||||
@@ -1 +0,0 @@
|
||||
-DSDKCONFIG_DEFAULTS="sdkconfig.defaults;sdkconfig.ci.sysview" -B build/sysview -DSDKCONFIG=build/sysview/sdkconfig
|
||||
7
components/app_update/.build-test-rules.yml
Normal file
7
components/app_update/.build-test-rules.yml
Normal file
@@ -0,0 +1,7 @@
|
||||
# Documentation: .gitlab/ci/README.md#manifest-file-to-control-the-buildtest-apps
|
||||
|
||||
components/app_update/test_apps:
|
||||
disable:
|
||||
- if: IDF_TARGET == "esp32c6" or IDF_TARGET == "esp32h2"
|
||||
temporary: true
|
||||
reason: target esp32c6, esp32h2 is not supported yet
|
||||
@@ -1,12 +1,6 @@
|
||||
idf_build_get_property(target IDF_TARGET)
|
||||
|
||||
if(${target} STREQUAL "linux")
|
||||
return() # This component is not supported by the POSIX/Linux simulator
|
||||
endif()
|
||||
|
||||
idf_component_register(SRCS "esp_ota_ops.c" "esp_ota_app_desc.c"
|
||||
INCLUDE_DIRS "include"
|
||||
REQUIRES partition_table bootloader_support esp_app_format esp_bootloader_format esp_partition
|
||||
REQUIRES partition_table bootloader_support esp_app_format esp_partition
|
||||
PRIV_REQUIRES esptool_py efuse spi_flash)
|
||||
|
||||
if(NOT BOOTLOADER_BUILD)
|
||||
|
||||
@@ -15,7 +15,7 @@ const esp_app_desc_t *esp_ota_get_app_description(void)
|
||||
return esp_app_get_description();
|
||||
}
|
||||
|
||||
int esp_ota_get_app_elf_sha256(char* dst, size_t size)
|
||||
int IRAM_ATTR esp_ota_get_app_elf_sha256(char* dst, size_t size)
|
||||
{
|
||||
return esp_app_get_elf_sha256(dst, size);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* SPDX-FileCopyrightText: 2015-2023 Espressif Systems (Shanghai) CO LTD
|
||||
* SPDX-FileCopyrightText: 2015-2021 Espressif Systems (Shanghai) CO LTD
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
@@ -28,8 +28,6 @@
|
||||
#include "esp_system.h"
|
||||
#include "esp_efuse.h"
|
||||
#include "esp_attr.h"
|
||||
#include "esp_bootloader_desc.h"
|
||||
#include "esp_flash.h"
|
||||
|
||||
#if CONFIG_IDF_TARGET_ESP32
|
||||
#include "esp32/rom/secure_boot.h"
|
||||
@@ -45,8 +43,6 @@
|
||||
#include "esp32c6/rom/secure_boot.h"
|
||||
#elif CONFIG_IDF_TARGET_ESP32H2
|
||||
#include "esp32h2/rom/secure_boot.h"
|
||||
#elif CONFIG_IDF_TARGET_ESP32P4
|
||||
#include "esp32p4/rom/secure_boot.h"
|
||||
#endif
|
||||
|
||||
#define SUB_TYPE_ID(i) (i & 0x0F)
|
||||
@@ -199,18 +195,13 @@ esp_err_t esp_ota_write(esp_ota_handle_t handle, const void *data, size_t size)
|
||||
return ESP_ERR_INVALID_ARG;
|
||||
}
|
||||
|
||||
if (size == 0) {
|
||||
ESP_LOGD(TAG, "write data size is 0");
|
||||
return ESP_OK;
|
||||
}
|
||||
|
||||
// find ota handle in linked list
|
||||
for (it = LIST_FIRST(&s_ota_ops_entries_head); it != NULL; it = LIST_NEXT(it, entries)) {
|
||||
if (it->handle == handle) {
|
||||
if (it->need_erase) {
|
||||
// must erase the partition before writing to it
|
||||
uint32_t first_sector = it->wrote_size / SPI_FLASH_SEC_SIZE; // first affected sector
|
||||
uint32_t last_sector = (it->wrote_size + size - 1) / SPI_FLASH_SEC_SIZE; // last affected sector
|
||||
uint32_t first_sector = it->wrote_size / SPI_FLASH_SEC_SIZE;
|
||||
uint32_t last_sector = (it->wrote_size + size) / SPI_FLASH_SEC_SIZE;
|
||||
|
||||
ret = ESP_OK;
|
||||
if ((it->wrote_size % SPI_FLASH_SEC_SIZE) == 0) {
|
||||
@@ -637,32 +628,6 @@ const esp_partition_t* esp_ota_get_next_update_partition(const esp_partition_t *
|
||||
|
||||
}
|
||||
|
||||
esp_err_t esp_ota_get_bootloader_description(const esp_partition_t *bootloader_partition, esp_bootloader_desc_t *desc)
|
||||
{
|
||||
if (desc == NULL) {
|
||||
return ESP_ERR_INVALID_ARG;
|
||||
}
|
||||
esp_partition_t partition = { 0 };
|
||||
if (bootloader_partition == NULL) {
|
||||
partition.flash_chip = esp_flash_default_chip;
|
||||
partition.encrypted = esp_flash_encryption_enabled();
|
||||
partition.address = CONFIG_BOOTLOADER_OFFSET_IN_FLASH;
|
||||
partition.size = CONFIG_PARTITION_TABLE_OFFSET - CONFIG_BOOTLOADER_OFFSET_IN_FLASH;
|
||||
} else {
|
||||
memcpy(&partition, bootloader_partition, sizeof(partition));
|
||||
}
|
||||
esp_err_t err = esp_partition_read(&partition, sizeof(esp_image_header_t) + sizeof(esp_image_segment_header_t), desc, sizeof(esp_bootloader_desc_t));
|
||||
if (err != ESP_OK) {
|
||||
return err;
|
||||
}
|
||||
|
||||
if (desc->magic_byte != ESP_BOOTLOADER_DESC_MAGIC_BYTE) {
|
||||
return ESP_ERR_NOT_FOUND;
|
||||
}
|
||||
|
||||
return ESP_OK;
|
||||
}
|
||||
|
||||
esp_err_t esp_ota_get_partition_description(const esp_partition_t *partition, esp_app_desc_t *app_desc)
|
||||
{
|
||||
if (partition == NULL || app_desc == NULL) {
|
||||
@@ -931,27 +896,9 @@ esp_err_t esp_ota_erase_last_boot_app_partition(void)
|
||||
return ESP_OK;
|
||||
}
|
||||
|
||||
#if SOC_SUPPORT_SECURE_BOOT_REVOKE_KEY && CONFIG_SECURE_BOOT_V2_ENABLED
|
||||
#if SOC_EFUSE_SECURE_BOOT_KEY_DIGESTS > 1 && CONFIG_SECURE_BOOT_V2_ENABLED
|
||||
esp_err_t esp_ota_revoke_secure_boot_public_key(esp_ota_secure_boot_public_key_index_t index) {
|
||||
|
||||
// Validates the image at "app_pos" with the secure boot digests other than "revoked_key_index"
|
||||
static bool validate_img(esp_ota_secure_boot_public_key_index_t revoked_key_index, esp_partition_pos_t *app_pos)
|
||||
{
|
||||
bool verified = false;
|
||||
for (int i = 0; i < SOC_EFUSE_SECURE_BOOT_KEY_DIGESTS; i++) {
|
||||
if (i == revoked_key_index) {
|
||||
continue;
|
||||
}
|
||||
if (esp_secure_boot_verify_with_efuse_digest_index(i, app_pos) == ESP_OK) {
|
||||
verified = true;
|
||||
ESP_LOGI(TAG, "Application successfully verified with public key digest %d", i);
|
||||
break;
|
||||
}
|
||||
}
|
||||
return verified;
|
||||
}
|
||||
|
||||
esp_err_t esp_ota_revoke_secure_boot_public_key(esp_ota_secure_boot_public_key_index_t index)
|
||||
{
|
||||
if (!esp_secure_boot_enabled()) {
|
||||
ESP_LOGE(TAG, "Secure boot v2 has not been enabled.");
|
||||
return ESP_FAIL;
|
||||
@@ -964,21 +911,14 @@ esp_err_t esp_ota_revoke_secure_boot_public_key(esp_ota_secure_boot_public_key_i
|
||||
return ESP_ERR_INVALID_ARG;
|
||||
}
|
||||
|
||||
const esp_partition_t *running_app_part = esp_ota_get_running_partition();
|
||||
esp_err_t ret = ESP_FAIL;
|
||||
#ifdef CONFIG_BOOTLOADER_APP_ROLLBACK_ENABLE
|
||||
esp_ota_img_states_t running_app_state;
|
||||
ret = esp_ota_get_state_partition(running_app_part, &running_app_state);
|
||||
if (ret != ESP_OK) {
|
||||
ESP_LOGE(TAG, "esp_ota_get_state_partition returned: %s", esp_err_to_name(ret));
|
||||
esp_image_sig_public_key_digests_t app_digests = { 0 };
|
||||
esp_err_t err = esp_secure_boot_get_signature_blocks_for_running_app(true, &app_digests);
|
||||
if (err != ESP_OK || app_digests.num_digests == 0) {
|
||||
ESP_LOGE(TAG, "This app is not signed, but check signature on update is enabled in config. It won't be possible to verify any update.");
|
||||
return ESP_FAIL;
|
||||
}
|
||||
if (running_app_state != ESP_OTA_IMG_VALID) {
|
||||
ESP_LOGE(TAG, "The current running application is not marked as a valid image. Aborting..");
|
||||
return ESP_FAIL;
|
||||
}
|
||||
#endif
|
||||
|
||||
esp_err_t ret;
|
||||
esp_secure_boot_key_digests_t trusted_keys;
|
||||
ret = esp_secure_boot_read_key_digests(&trusted_keys);
|
||||
if (ret != ESP_OK) {
|
||||
@@ -987,38 +927,55 @@ esp_err_t esp_ota_revoke_secure_boot_public_key(esp_ota_secure_boot_public_key_i
|
||||
}
|
||||
|
||||
if (trusted_keys.key_digests[index] == NULL) {
|
||||
ESP_LOGI(TAG, "Given public key digest(%d) is already revoked.", index);
|
||||
ESP_LOGI(TAG, "Trusted Key block(%d) already revoked.", index);
|
||||
return ESP_OK;
|
||||
}
|
||||
|
||||
/* Check if the application can be verified with a key other than the one being revoked */
|
||||
esp_partition_pos_t running_app_pos = {
|
||||
.offset = running_app_part->address,
|
||||
.size = running_app_part->size,
|
||||
};
|
||||
esp_image_sig_public_key_digests_t trusted_digests = { 0 };
|
||||
for (unsigned i = 0; i < SECURE_BOOT_NUM_BLOCKS; i++) {
|
||||
if (i == index) {
|
||||
continue; // omitting - to find if there is a valid key after revoking this digest
|
||||
}
|
||||
|
||||
if (!validate_img(index, &running_app_pos)) {
|
||||
ESP_LOGE(TAG, "Application cannot be verified with any key other than the one being revoked");
|
||||
if (trusted_keys.key_digests[i] != NULL) {
|
||||
bool all_zeroes = true;
|
||||
for (unsigned j = 0; j < ESP_SECURE_BOOT_DIGEST_LEN; j++) {
|
||||
all_zeroes = all_zeroes && (*(uint8_t *)(trusted_keys.key_digests[i] + j) == 0);
|
||||
}
|
||||
if (!all_zeroes) {
|
||||
memcpy(trusted_digests.key_digests[trusted_digests.num_digests++], (uint8_t *)trusted_keys.key_digests[i], ESP_SECURE_BOOT_DIGEST_LEN);
|
||||
} else {
|
||||
ESP_LOGD(TAG, "Empty trusted key block (%d).", i);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool match = false;
|
||||
for (unsigned i = 0; i < trusted_digests.num_digests; i++) {
|
||||
if (match == true) {
|
||||
break;
|
||||
}
|
||||
|
||||
for (unsigned j = 0; j < app_digests.num_digests; j++) {
|
||||
if (memcmp(trusted_digests.key_digests[i], app_digests.key_digests[j], ESP_SECURE_BOOT_DIGEST_LEN) == 0) {
|
||||
ESP_LOGI(TAG, "App key block(%d) matches Trusted key block(%d)[%d -> Next active trusted key block].", j, i, i);
|
||||
esp_err_t err = esp_efuse_set_digest_revoke(index);
|
||||
if (err != ESP_OK) {
|
||||
ESP_LOGE(TAG, "Failed to revoke digest (0x%x).", err);
|
||||
return ESP_FAIL;
|
||||
}
|
||||
ESP_LOGI(TAG, "Revoked signature block %d.", index);
|
||||
match = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (match == false) {
|
||||
ESP_LOGE(TAG, "Running app doesn't have another valid secure boot key. Cannot revoke current key(%d).", index);
|
||||
return ESP_FAIL;
|
||||
}
|
||||
|
||||
/* Check if bootloder can be verified with any key other than the one being revoked */
|
||||
esp_partition_pos_t bootloader_pos = {
|
||||
.offset = ESP_BOOTLOADER_OFFSET,
|
||||
.size = (ESP_PARTITION_TABLE_OFFSET - ESP_BOOTLOADER_OFFSET),
|
||||
};
|
||||
|
||||
if (!validate_img(index, &bootloader_pos)) {
|
||||
ESP_LOGE(TAG, "Bootloader cannot be verified with any key other than the one being revoked");
|
||||
return ESP_FAIL;
|
||||
}
|
||||
|
||||
esp_err_t err = esp_efuse_set_digest_revoke(index);
|
||||
if (err != ESP_OK) {
|
||||
ESP_LOGE(TAG, "Failed to revoke digest (0x%x).", err);
|
||||
return ESP_FAIL;
|
||||
}
|
||||
ESP_LOGI(TAG, "Revoked signature block %d.", index);
|
||||
return ESP_OK;
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* SPDX-FileCopyrightText: 2015-2023 Espressif Systems (Shanghai) CO LTD
|
||||
* SPDX-FileCopyrightText: 2015-2021 Espressif Systems (Shanghai) CO LTD
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
@@ -13,7 +13,6 @@
|
||||
#include "esp_err.h"
|
||||
#include "esp_partition.h"
|
||||
#include "esp_app_desc.h"
|
||||
#include "esp_bootloader_desc.h"
|
||||
#include "esp_flash_partitions.h"
|
||||
#include "soc/soc_caps.h"
|
||||
|
||||
@@ -111,7 +110,7 @@ esp_err_t esp_ota_begin(const esp_partition_t* partition, size_t image_size, esp
|
||||
* @param size Size of data buffer in bytes.
|
||||
*
|
||||
* @return
|
||||
* - ESP_OK: Data was written to flash successfully, or size = 0
|
||||
* - ESP_OK: Data was written to flash successfully.
|
||||
* - ESP_ERR_INVALID_ARG: handle is invalid.
|
||||
* - ESP_ERR_OTA_VALIDATE_FAILED: First byte of image contains invalid app image magic byte.
|
||||
* - ESP_ERR_FLASH_OP_TIMEOUT or ESP_ERR_FLASH_OP_FAIL: Flash write failed.
|
||||
@@ -252,23 +251,6 @@ const esp_partition_t* esp_ota_get_next_update_partition(const esp_partition_t *
|
||||
*/
|
||||
esp_err_t esp_ota_get_partition_description(const esp_partition_t *partition, esp_app_desc_t *app_desc);
|
||||
|
||||
/**
|
||||
* @brief Returns the description structure of the bootloader.
|
||||
*
|
||||
* @param[in] bootloader_partition Pointer to bootloader partition.
|
||||
* If NULL, then the current bootloader is used (the default location).
|
||||
* offset = CONFIG_BOOTLOADER_OFFSET_IN_FLASH,
|
||||
* size = CONFIG_PARTITION_TABLE_OFFSET - CONFIG_BOOTLOADER_OFFSET_IN_FLASH,
|
||||
* @param[out] desc Structure of info about bootloader.
|
||||
* @return
|
||||
* - ESP_OK Successful.
|
||||
* - ESP_ERR_NOT_FOUND Description structure is not found in the bootloader image. Magic byte is incorrect.
|
||||
* - ESP_ERR_INVALID_ARG Arguments is NULL.
|
||||
* - ESP_ERR_INVALID_SIZE Read would go out of bounds of the partition.
|
||||
* - or one of error codes from lower-level flash driver.
|
||||
*/
|
||||
esp_err_t esp_ota_get_bootloader_description(const esp_partition_t *bootloader_partition, esp_bootloader_desc_t *desc);
|
||||
|
||||
/**
|
||||
* @brief Returns number of ota partitions provided in partition table.
|
||||
*
|
||||
@@ -350,10 +332,10 @@ typedef enum {
|
||||
} esp_ota_secure_boot_public_key_index_t;
|
||||
|
||||
/**
|
||||
* @brief Revokes the signature digest denoted by the given index. This should be called in the application only after the rollback logic otherwise the device may end up in unrecoverable state.
|
||||
* @brief Revokes the old signature digest. To be called in the application after the rollback logic.
|
||||
*
|
||||
* Relevant for Secure boot v2 on ESP32-S2, ESP32-S3, ESP32-C3, ESP32-C6, ESP32-H2 where up to 3 key digests can be stored (Key \#N-1, Key \#N, Key \#N+1).
|
||||
* When a key used to sign an app is invalidated, an OTA update is to be sent with an app signed with at least one of the other two keys which has not been revoked already.
|
||||
* Relevant for Secure boot v2 on ESP32-S2, ESP32-S3, ESP32-C3, ESP32-C6, ESP32-H2 where upto 3 key digests can be stored (Key \#N-1, Key \#N, Key \#N+1).
|
||||
* When key \#N-1 used to sign an app is invalidated, an OTA update is to be sent with an app signed with key \#N-1 & Key \#N.
|
||||
* After successfully booting the OTA app should call this function to revoke Key \#N-1.
|
||||
*
|
||||
* @param index - The index of the signature block to be revoked
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
# Documentation: .gitlab/ci/README.md#manifest-file-to-control-the-buildtest-apps
|
||||
|
||||
components/app_update/test_apps:
|
||||
disable:
|
||||
- if: IDF_TARGET in ["esp32c6", "esp32h2", "esp32p4"]
|
||||
temporary: true
|
||||
reason: target esp32c6, esp32h2, esp32p4 is not supported yet # TODO: IDF-8068
|
||||
@@ -1,2 +1,3 @@
|
||||
| Supported Targets | ESP32 | ESP32-C2 | ESP32-C3 | ESP32-S2 | ESP32-S3 |
|
||||
| ----------------- | ----- | -------- | -------- | -------- | -------- |
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/*
|
||||
* SPDX-FileCopyrightText: 2021-2023 Espressif Systems (Shanghai) CO LTD
|
||||
* SPDX-FileCopyrightText: 2021-2022 Espressif Systems (Shanghai) CO LTD
|
||||
*
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
@@ -185,8 +185,8 @@ static void erase_ota_data(void)
|
||||
static void reboot_as_deep_sleep(void)
|
||||
{
|
||||
ESP_LOGI(TAG, "reboot as deep sleep");
|
||||
esp_deep_sleep(20000);
|
||||
TEST_FAIL_MESSAGE("Should never be reachable except when sleep is rejected, abort");
|
||||
esp_sleep_enable_timer_wakeup(2000);
|
||||
esp_deep_sleep_start();
|
||||
}
|
||||
|
||||
/* @brief Copies a current app to next partition (OTA0-15), after that ESP is rebooting and run this (the next) OTAx.
|
||||
|
||||
@@ -1,9 +1,3 @@
|
||||
idf_build_get_property(target IDF_TARGET)
|
||||
|
||||
if(${target} STREQUAL "linux")
|
||||
return() # This component is not supported by the POSIX/Linux simulator
|
||||
endif()
|
||||
|
||||
idf_component_register(PRIV_REQUIRES partition_table esptool_py)
|
||||
|
||||
# Do not generate flash file when building bootloader or is in early expansion of the build
|
||||
@@ -13,9 +7,8 @@ endif()
|
||||
|
||||
add_dependencies(bootloader partition_table_bin)
|
||||
|
||||
# When secure boot is enabled and CONFIG_SECURE_BOOT_FLASH_BOOTLOADER_DEFAULT is not enabled
|
||||
# do not flash the bootloader along with the other artifacts using the command `idf.py flash`
|
||||
if(NOT CONFIG_SECURE_BOOT OR CONFIG_SECURE_BOOT_FLASH_BOOTLOADER_DEFAULT)
|
||||
# When secure boot is enabled, do not flash bootloader along with invocation of `idf.py flash`
|
||||
if(NOT CONFIG_SECURE_BOOT)
|
||||
set(flash_bootloader FLASH_IN_PROJECT)
|
||||
endif()
|
||||
|
||||
@@ -25,7 +18,7 @@ esptool_py_flash_target_image(bootloader-flash bootloader
|
||||
"${BOOTLOADER_BUILD_DIR}/bootloader.bin")
|
||||
|
||||
# Also attach an image to the project flash target
|
||||
if(NOT CONFIG_SECURE_BOOT OR CONFIG_SECURE_BOOT_FLASH_BOOTLOADER_DEFAULT)
|
||||
if(NOT CONFIG_SECURE_BOOT)
|
||||
esptool_py_flash_target_image(flash bootloader
|
||||
${CONFIG_BOOTLOADER_OFFSET_IN_FLASH}
|
||||
"${BOOTLOADER_BUILD_DIR}/bootloader.bin")
|
||||
|
||||
@@ -1,12 +1,8 @@
|
||||
menu "Bootloader config"
|
||||
|
||||
orsource "../esp_bootloader_format/Kconfig.bootloader"
|
||||
|
||||
config BOOTLOADER_OFFSET_IN_FLASH
|
||||
hex
|
||||
default 0x1000 if IDF_TARGET_ESP32 || IDF_TARGET_ESP32S2
|
||||
# the first 2 sectors are reserved for the key manager with AES-XTS (flash encryption) purpose
|
||||
default 0x2000 if IDF_TARGET_ESP32P4
|
||||
default 0x0
|
||||
help
|
||||
Offset address that 2nd bootloader will be flashed to.
|
||||
@@ -66,83 +62,34 @@ menu "Bootloader config"
|
||||
default 4 if BOOTLOADER_LOG_LEVEL_DEBUG
|
||||
default 5 if BOOTLOADER_LOG_LEVEL_VERBOSE
|
||||
|
||||
menu "Serial Flash Configurations"
|
||||
config BOOTLOADER_SPI_CUSTOM_WP_PIN
|
||||
bool "Use custom SPI Flash WP Pin when flash pins set in eFuse (read help)"
|
||||
depends on IDF_TARGET_ESP32 && (ESPTOOLPY_FLASHMODE_QIO || ESPTOOLPY_FLASHMODE_QOUT)
|
||||
default y if BOOTLOADER_SPI_WP_PIN != 7 # backwards compatibility, can remove in IDF 5
|
||||
default n
|
||||
help
|
||||
This setting is only used if the SPI flash pins have been overridden by setting the eFuses
|
||||
SPI_PAD_CONFIG_xxx, and the SPI flash mode is QIO or QOUT.
|
||||
config BOOTLOADER_SPI_CUSTOM_WP_PIN
|
||||
bool "Use custom SPI Flash WP Pin when flash pins set in eFuse (read help)"
|
||||
depends on IDF_TARGET_ESP32 && (ESPTOOLPY_FLASHMODE_QIO || ESPTOOLPY_FLASHMODE_QOUT)
|
||||
default y if BOOTLOADER_SPI_WP_PIN != 7 # backwards compatibility, can remove in IDF 5
|
||||
default n
|
||||
help
|
||||
This setting is only used if the SPI flash pins have been overridden by setting the eFuses
|
||||
SPI_PAD_CONFIG_xxx, and the SPI flash mode is QIO or QOUT.
|
||||
|
||||
When this is the case, the eFuse config only defines 3 of the 4 Quad I/O data pins. The WP pin (aka
|
||||
ESP32 pin "SD_DATA_3" or SPI flash pin "IO2") is not specified in eFuse. The same pin is also used
|
||||
for external SPIRAM if it is enabled.
|
||||
When this is the case, the eFuse config only defines 3 of the 4 Quad I/O data pins. The WP pin (aka
|
||||
ESP32 pin "SD_DATA_3" or SPI flash pin "IO2") is not specified in eFuse. The same pin is also used
|
||||
for external SPIRAM if it is enabled.
|
||||
|
||||
If this config item is set to N (default), the correct WP pin will be automatically used for any
|
||||
Espressif chip or module with integrated flash. If a custom setting is needed, set this config item to
|
||||
Y and specify the GPIO number connected to the WP.
|
||||
If this config item is set to N (default), the correct WP pin will be automatically used for any
|
||||
Espressif chip or module with integrated flash. If a custom setting is needed, set this config item to
|
||||
Y and specify the GPIO number connected to the WP.
|
||||
|
||||
config BOOTLOADER_SPI_WP_PIN
|
||||
int "Custom SPI Flash WP Pin"
|
||||
range 0 33
|
||||
default 7
|
||||
depends on IDF_TARGET_ESP32 && (ESPTOOLPY_FLASHMODE_QIO || ESPTOOLPY_FLASHMODE_QOUT)
|
||||
#depends on BOOTLOADER_SPI_CUSTOM_WP_PIN # backwards compatibility, can uncomment in IDF 5
|
||||
help
|
||||
The option "Use custom SPI Flash WP Pin" must be set or this value is ignored
|
||||
config BOOTLOADER_SPI_WP_PIN
|
||||
int "Custom SPI Flash WP Pin"
|
||||
range 0 33
|
||||
default 7
|
||||
depends on IDF_TARGET_ESP32 && (ESPTOOLPY_FLASHMODE_QIO || ESPTOOLPY_FLASHMODE_QOUT)
|
||||
#depends on BOOTLOADER_SPI_CUSTOM_WP_PIN # backwards compatibility, can uncomment in IDF 5
|
||||
help
|
||||
The option "Use custom SPI Flash WP Pin" must be set or this value is ignored
|
||||
|
||||
If burning a customized set of SPI flash pins in eFuse and using QIO or QOUT mode for flash, set this
|
||||
value to the GPIO number of the SPI flash WP pin.
|
||||
|
||||
config BOOTLOADER_FLASH_DC_AWARE
|
||||
bool "Allow app adjust Dummy Cycle bits in SPI Flash for higher frequency (READ HELP FIRST)"
|
||||
help
|
||||
This will force 2nd bootloader to be loaded by DOUT mode, and will restore Dummy Cycle setting by
|
||||
resetting the Flash
|
||||
|
||||
config BOOTLOADER_FLASH_XMC_SUPPORT
|
||||
bool "Enable the support for flash chips of XMC (READ DOCS FIRST)"
|
||||
default y
|
||||
depends on !IDF_ENV_BRINGUP
|
||||
help
|
||||
Perform the startup flow recommended by XMC. Please consult XMC for the details of this flow.
|
||||
XMC chips will be forbidden to be used, when this option is disabled.
|
||||
|
||||
DON'T DISABLE THIS UNLESS YOU KNOW WHAT YOU ARE DOING.
|
||||
|
||||
comment "Features below require specific hardware (READ DOCS FIRST!)"
|
||||
|
||||
config BOOTLOADER_FLASH_32BIT_ADDR
|
||||
bool
|
||||
default y if ESPTOOLPY_FLASHSIZE_32MB || ESPTOOLPY_FLASHSIZE_64MB || ESPTOOLPY_FLASHSIZE_128MB
|
||||
default n
|
||||
help
|
||||
This is a helper config for 32bits address flash. Invisible for users.
|
||||
|
||||
config BOOTLOADER_FLASH_NEEDS_32BIT_FEAT
|
||||
bool
|
||||
default y if BOOTLOADER_FLASH_32BIT_ADDR && !ESPTOOLPY_OCT_FLASH
|
||||
help
|
||||
This is a helper config for 32bits address flash. Invisible for users.
|
||||
|
||||
config BOOTLOADER_CACHE_32BIT_ADDR_QUAD_FLASH
|
||||
bool "Enable cache access to 32-bit-address (over 16MB) range of SPI Flash (READ DOCS FIRST)"
|
||||
depends on BOOTLOADER_FLASH_NEEDS_32BIT_FEAT && IDF_TARGET_ESP32S3 && IDF_EXPERIMENTAL_FEATURES
|
||||
default n
|
||||
help
|
||||
Enabling this option allows the CPU to access 32-bit-address flash beyond 16M range.
|
||||
1. This option only valid for 4-line flash. Octal flash doesn't need this.
|
||||
2. This option is experimental, which means it can’t use on all flash chips stable, for more
|
||||
information, please contact Espressif Business support.
|
||||
|
||||
config BOOTLOADER_CACHE_32BIT_ADDR_OCTAL_FLASH
|
||||
bool
|
||||
default y if ESPTOOLPY_OCT_FLASH && BOOTLOADER_FLASH_32BIT_ADDR
|
||||
default n
|
||||
|
||||
endmenu
|
||||
If burning a customized set of SPI flash pins in eFuse and using QIO or QOUT mode for flash, set this
|
||||
value to the GPIO number of the SPI flash WP pin.
|
||||
|
||||
choice BOOTLOADER_VDDSDIO_BOOST
|
||||
bool "VDDSDIO LDO voltage"
|
||||
@@ -477,6 +424,15 @@ menu "Bootloader config"
|
||||
- "Reserve RTC FAST memory for custom purposes"
|
||||
- "GPIO triggers factory reset"
|
||||
|
||||
config BOOTLOADER_FLASH_XMC_SUPPORT
|
||||
bool "Enable the support for flash chips of XMC (READ HELP FIRST)"
|
||||
default y
|
||||
help
|
||||
Perform the startup flow recommended by XMC. Please consult XMC for the details of this flow.
|
||||
XMC chips will be forbidden to be used, when this option is disabled.
|
||||
|
||||
DON'T DISABLE THIS UNLESS YOU KNOW WHAT YOU ARE DOING.
|
||||
|
||||
endmenu # Bootloader
|
||||
|
||||
|
||||
@@ -748,12 +704,14 @@ menu "Security features"
|
||||
|
||||
config SECURE_BOOT_VERIFICATION_KEY
|
||||
string "Secure boot public signature verification key"
|
||||
depends on SECURE_SIGNED_APPS && SECURE_SIGNED_APPS_ECDSA_SCHEME && !SECURE_BOOT_BUILD_SIGNED_BINARIES
|
||||
depends on SECURE_SIGNED_APPS && !SECURE_BOOT_BUILD_SIGNED_BINARIES && !SECURE_SIGNED_APPS_RSA_SCHEME
|
||||
default "signature_verification_key.bin"
|
||||
help
|
||||
Path to a public key file used to verify signed images.
|
||||
Secure Boot V1: This ECDSA public key is compiled into the bootloader and/or
|
||||
app, to verify app images.
|
||||
Secure Boot V2: This RSA public key is compiled into the signature block at
|
||||
the end of the bootloader/app.
|
||||
|
||||
Key file is in raw binary format, and can be extracted from a
|
||||
PEM formatted private key using the espsecure.py
|
||||
@@ -777,23 +735,6 @@ menu "Security features"
|
||||
This can lead to permanent bricking of the device, in case all keys are revoked
|
||||
because of signature verification failure.
|
||||
|
||||
config SECURE_BOOT_FLASH_BOOTLOADER_DEFAULT
|
||||
bool "Flash bootloader along with other artifacts when using the default flash command"
|
||||
depends on SECURE_BOOT_V2_ENABLED && SECURE_BOOT_BUILD_SIGNED_BINARIES
|
||||
default N
|
||||
help
|
||||
When Secure Boot V2 is enabled, by default the bootloader is not flashed along with other artifacts
|
||||
like the application and the partition table images, i.e. bootloader has to be seperately flashed
|
||||
using the command `idf.py bootloader flash`, whereas, the application and partition table can be flashed
|
||||
using the command `idf.py flash` itself.
|
||||
Enabling this option allows flashing the bootloader along with the other artifacts
|
||||
by invocation of the command `idf.py flash`.
|
||||
|
||||
If this option is enabled make sure that even the bootloader is signed using the correct secure boot key,
|
||||
otherwise the bootloader signature verification would fail, as hash of the public key which is present in
|
||||
the bootloader signature would not match with the digest stored into the efuses
|
||||
and thus the device will not be able to boot up.
|
||||
|
||||
choice SECURE_BOOTLOADER_KEY_ENCODING
|
||||
bool "Hardware Key Encoding"
|
||||
depends on SECURE_BOOTLOADER_REFLASHABLE
|
||||
@@ -843,11 +784,11 @@ menu "Security features"
|
||||
before enabling.
|
||||
|
||||
choice SECURE_FLASH_ENCRYPTION_KEYSIZE
|
||||
bool "Size of generated XTS-AES key"
|
||||
bool "Size of generated AES-XTS key"
|
||||
default SECURE_FLASH_ENCRYPTION_AES128
|
||||
depends on SOC_FLASH_ENCRYPTION_XTS_AES_OPTIONS && SECURE_FLASH_ENC_ENABLED
|
||||
help
|
||||
Size of generated XTS-AES key.
|
||||
Size of generated AES-XTS key.
|
||||
|
||||
- AES-128 uses a 256-bit key (32 bytes) derived from 128 bits (16 bytes) burned in half Efuse key block.
|
||||
Internally, it calculates SHA256(128 bits)
|
||||
@@ -905,9 +846,7 @@ menu "Security features"
|
||||
default y if (SOC_EFUSE_DIS_ICACHE || IDF_TARGET_ESP32) && SECURE_FLASH_ENC_ENABLED
|
||||
|
||||
menu "Potentially insecure options"
|
||||
visible if (SECURE_FLASH_ENCRYPTION_MODE_DEVELOPMENT || \
|
||||
SECURE_BOOT_INSECURE || \
|
||||
SECURE_SIGNED_ON_UPDATE_NO_SECURE_BOOT) # NOERROR
|
||||
visible if SECURE_FLASH_ENCRYPTION_MODE_DEVELOPMENT || SECURE_BOOT_INSECURE || SECURE_SIGNED_ON_UPDATE_NO_SECURE_BOOT # NOERROR
|
||||
|
||||
# NOTE: Options in this menu NEED to have SECURE_BOOT_INSECURE
|
||||
# and/or SECURE_FLASH_ENCRYPTION_MODE_DEVELOPMENT in "depends on", as the menu
|
||||
@@ -1011,8 +950,7 @@ menu "Security features"
|
||||
|
||||
config SECURE_FLASH_UART_BOOTLOADER_ALLOW_CACHE
|
||||
bool "Leave UART bootloader flash cache enabled"
|
||||
depends on SECURE_FLASH_ENCRYPTION_MODE_DEVELOPMENT && \
|
||||
(IDF_TARGET_ESP32 || SOC_EFUSE_DIS_DOWNLOAD_ICACHE || SOC_EFUSE_DIS_DOWNLOAD_DCACHE) # NOERROR
|
||||
depends on SECURE_FLASH_ENCRYPTION_MODE_DEVELOPMENT && (IDF_TARGET_ESP32 || SOC_EFUSE_DIS_DOWNLOAD_ICACHE || SOC_EFUSE_DIS_DOWNLOAD_DCACHE) # NOERROR
|
||||
default N
|
||||
select SECURE_FLASH_SKIP_WRITE_PROTECTION_CACHE if SECURE_FLASH_HAS_WRITE_PROTECTION_CACHE
|
||||
help
|
||||
|
||||
@@ -124,20 +124,13 @@ idf_build_get_property(sdkconfig SDKCONFIG)
|
||||
idf_build_get_property(python PYTHON)
|
||||
idf_build_get_property(extra_cmake_args EXTRA_CMAKE_ARGS)
|
||||
|
||||
# We cannot pass lists are a parameter to the external project without modifying the ';' spearator
|
||||
string(REPLACE ";" "|" BOOTLOADER_IGNORE_EXTRA_COMPONENT "${BOOTLOADER_IGNORE_EXTRA_COMPONENT}")
|
||||
|
||||
externalproject_add(bootloader
|
||||
SOURCE_DIR "${CMAKE_CURRENT_LIST_DIR}/subproject"
|
||||
BINARY_DIR "${BOOTLOADER_BUILD_DIR}"
|
||||
# Modiying the list separator for the arguments, as such, we won't need to manually
|
||||
# replace the new separator by the default ';' in the subproject
|
||||
LIST_SEPARATOR |
|
||||
CMAKE_ARGS -DSDKCONFIG=${sdkconfig} -DIDF_PATH=${idf_path} -DIDF_TARGET=${idf_target}
|
||||
-DPYTHON_DEPS_CHECKED=1 -DPYTHON=${python}
|
||||
-DEXTRA_COMPONENT_DIRS=${CMAKE_CURRENT_LIST_DIR}
|
||||
-DPROJECT_SOURCE_DIR=${PROJECT_SOURCE_DIR}
|
||||
-DIGNORE_EXTRA_COMPONENT=${BOOTLOADER_IGNORE_EXTRA_COMPONENT}
|
||||
${sign_key_arg} ${ver_key_arg}
|
||||
${extra_cmake_args}
|
||||
INSTALL_COMMAND ""
|
||||
|
||||
@@ -23,7 +23,3 @@ CONFIG_FLASH_ENCRYPTION_UART_BOOTLOADER_ALLOW_CACHE CONFIG_SECURE_FLASH_
|
||||
|
||||
# Secure Boot Scheme
|
||||
CONFIG_SECURE_BOOT_ENABLED CONFIG_SECURE_BOOT_V1_ENABLED
|
||||
|
||||
CONFIG_SPI_FLASH_32BIT_ADDR_ENABLE CONFIG_BOOTLOADER_CACHE_32BIT_ADDR_QUAD_FLASH
|
||||
CONFIG_SPI_FLASH_QUAD_32BIT_ADDR_ENABLE CONFIG_BOOTLOADER_CACHE_32BIT_ADDR_QUAD_FLASH
|
||||
CONFIG_SPI_FLASH_OCTAL_32BIT_ADDR_ENABLE CONFIG_BOOTLOADER_CACHE_32BIT_ADDR_OCTAL_FLASH
|
||||
|
||||
@@ -41,18 +41,11 @@ if(EXISTS ${PROJECT_EXTRA_COMPONENTS})
|
||||
list(APPEND EXTRA_COMPONENT_DIRS "${PROJECT_EXTRA_COMPONENTS}")
|
||||
endif()
|
||||
|
||||
if(IGNORE_EXTRA_COMPONENT)
|
||||
# Prefix all entries of the list with ${PROJECT_EXTRA_COMPONENTS} absolute path
|
||||
list(TRANSFORM IGNORE_EXTRA_COMPONENT
|
||||
PREPEND "${PROJECT_EXTRA_COMPONENTS}/"
|
||||
OUTPUT_VARIABLE EXTRA_COMPONENT_EXCLUDE_DIRS)
|
||||
endif()
|
||||
|
||||
# Consider each directory in the project's bootloader_components as a component to be compiled
|
||||
# Consider each directory in project's bootloader_components as a component to be compiled
|
||||
file(GLOB proj_components RELATIVE ${PROJECT_EXTRA_COMPONENTS} ${PROJECT_EXTRA_COMPONENTS}/*)
|
||||
foreach(component ${proj_components})
|
||||
# Only directories are considered components
|
||||
if(IS_DIRECTORY "${PROJECT_EXTRA_COMPONENTS}/${component}" AND NOT ${component} IN_LIST IGNORE_EXTRA_COMPONENT)
|
||||
# Only directories are considered as components
|
||||
if(IS_DIRECTORY ${curdir}/${child})
|
||||
list(APPEND COMPONENTS ${component})
|
||||
endif()
|
||||
endforeach()
|
||||
@@ -60,7 +53,6 @@ endforeach()
|
||||
set(BOOTLOADER_BUILD 1)
|
||||
include("${IDF_PATH}/tools/cmake/project.cmake")
|
||||
set(common_req log esp_rom esp_common esp_hw_support newlib)
|
||||
idf_build_set_property(EXTRA_COMPONENT_EXCLUDE_DIRS "${EXTRA_COMPONENT_EXCLUDE_DIRS}")
|
||||
idf_build_set_property(__COMPONENT_REQUIRES_COMMON "${common_req}")
|
||||
idf_build_set_property(__OUTPUT_SDKCONFIG 0)
|
||||
project(bootloader)
|
||||
@@ -210,11 +202,7 @@ elseif(CONFIG_SECURE_BOOTLOADER_REFLASHABLE)
|
||||
"* Not recommended to re-use the same secure boot keyfile on multiple production devices."
|
||||
DEPENDS gen_secure_bootloader_key gen_bootloader_digest_bin
|
||||
VERBATIM)
|
||||
elseif(
|
||||
CONFIG_SECURE_BOOT_V2_ENABLED AND
|
||||
(CONFIG_SOC_EFUSE_SECURE_BOOT_KEY_DIGESTS GREATER 1) AND
|
||||
NOT CONFIG_SECURE_BOOT_FLASH_BOOTLOADER_DEFAULT
|
||||
)
|
||||
elseif(CONFIG_SECURE_BOOT_V2_ENABLED AND (CONFIG_IDF_TARGET_ESP32S2 OR CONFIG_IDF_TARGET_ESP32C3))
|
||||
add_custom_command(TARGET bootloader.elf POST_BUILD
|
||||
COMMAND ${CMAKE_COMMAND} -E echo
|
||||
"=============================================================================="
|
||||
@@ -233,7 +221,7 @@ elseif(
|
||||
"=============================================================================="
|
||||
DEPENDS gen_signed_bootloader
|
||||
VERBATIM)
|
||||
elseif(CONFIG_SECURE_BOOT_V2_ENABLED AND NOT CONFIG_SECURE_BOOT_FLASH_BOOTLOADER_DEFAULT)
|
||||
elseif(CONFIG_SECURE_BOOT_V2_ENABLED)
|
||||
add_custom_command(TARGET bootloader.elf POST_BUILD
|
||||
COMMAND ${CMAKE_COMMAND} -E echo
|
||||
"=============================================================================="
|
||||
|
||||
@@ -42,9 +42,7 @@ SECTIONS
|
||||
*(.stub .gnu.warning .gnu.linkonce.literal.* .gnu.linkonce.t.*.literal .gnu.linkonce.t.*)
|
||||
*(.iram1 .iram1.*) /* catch stray IRAM_ATTR */
|
||||
*liblog.a:(.literal .text .literal.* .text.*)
|
||||
/* we use either libgcc or compiler-rt, so put similar entries for them here */
|
||||
*libgcc.a:(.literal .text .literal.* .text.*)
|
||||
*libclang_rt.builtins.a:(.literal .text .literal.* .text.*)
|
||||
*libbootloader_support.a:bootloader_clock_loader.*(.literal .text .literal.* .text.*)
|
||||
*libbootloader_support.a:bootloader_common_loader.*(.literal .text .literal.* .text.*)
|
||||
*libbootloader_support.a:bootloader_flash.*(.literal .text .literal.* .text.*)
|
||||
@@ -111,14 +109,9 @@ SECTIONS
|
||||
_bss_end = ABSOLUTE(.);
|
||||
} >dram_seg
|
||||
|
||||
.dram0.bootdesc : ALIGN(0x10)
|
||||
.dram0.data :
|
||||
{
|
||||
_data_start = ABSOLUTE(.);
|
||||
*(.data_bootloader_desc .data_bootloader_desc.*) /* Should be the first. Bootloader version info. DO NOT PUT ANYTHING BEFORE IT! */
|
||||
} > dram_seg
|
||||
|
||||
.dram0.data : ALIGN(0x10)
|
||||
{
|
||||
*(.data)
|
||||
*(.data.*)
|
||||
*(.gnu.linkonce.d.*)
|
||||
|
||||
@@ -0,0 +1,9 @@
|
||||
PROVIDE ( ets_update_cpu_frequency = 0x40008550 ); /* Updates g_ticks_per_us on the current CPU only; not on the other core */
|
||||
PROVIDE ( MD5Final = 0x4005db1c );
|
||||
PROVIDE ( MD5Init = 0x4005da7c );
|
||||
PROVIDE ( MD5Update = 0x4005da9c );
|
||||
|
||||
/* bootloader will use following functions from xtensa hal library */
|
||||
xthal_get_ccount = 0x4000c050;
|
||||
xthal_get_ccompare = 0x4000c078;
|
||||
xthal_set_ccompare = 0x4000c058;
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user