mirror of
https://github.com/home-assistant/core.git
synced 2026-04-13 05:06:12 +02:00
Compare commits
378 Commits
rc
...
add_device
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c6a7fd5b79 | ||
|
|
38b27d624a | ||
|
|
f437d65d3c | ||
|
|
5ba0764a87 | ||
|
|
69fd6532cc | ||
|
|
ee8bd9f016 | ||
|
|
de5a2d47a5 | ||
|
|
54b2e0285c | ||
|
|
a0e118d411 | ||
|
|
07c33233ee | ||
|
|
962cac902b | ||
|
|
9ff5c9863f | ||
|
|
b60e396241 | ||
|
|
6e567ced92 | ||
|
|
e1c1e9a8b2 | ||
|
|
25b66be84d | ||
|
|
4d6a278137 | ||
|
|
7a77b071a2 | ||
|
|
279c9e71df | ||
|
|
2881916c91 | ||
|
|
f09602363c | ||
|
|
79b37bff0b | ||
|
|
7c549870b5 | ||
|
|
e50b7f41aa | ||
|
|
efc8053027 | ||
|
|
d104a1126f | ||
|
|
a573ef4b1c | ||
|
|
83e8c3fc19 | ||
|
|
cd0ed42941 | ||
|
|
2beca6b322 | ||
|
|
0fc62c3150 | ||
|
|
7daaf3de6a | ||
|
|
6470cbeada | ||
|
|
983bade8c5 | ||
|
|
9d27b9290c | ||
|
|
d9acf64904 | ||
|
|
cc1114de63 | ||
|
|
bff97254d7 | ||
|
|
6355adc6de | ||
|
|
879d9176bd | ||
|
|
a3badd0a83 | ||
|
|
73da736ebb | ||
|
|
c077538015 | ||
|
|
33bcd710fc | ||
|
|
6cf264dc18 | ||
|
|
d50d6db1bd | ||
|
|
d680c72c7c | ||
|
|
49a8c73f72 | ||
|
|
dc00fcaf60 | ||
|
|
b056723b98 | ||
|
|
1b4286381d | ||
|
|
524c2129eb | ||
|
|
8fe09e1837 | ||
|
|
99a186fad7 | ||
|
|
c1a9f293a7 | ||
|
|
783e2f0a00 | ||
|
|
36045c4bd3 | ||
|
|
18cd488622 | ||
|
|
ef66446a0d | ||
|
|
4870bb749c | ||
|
|
2e2ad0aaec | ||
|
|
fa7576dc5a | ||
|
|
c6ec90c871 | ||
|
|
c2065f1f14 | ||
|
|
5197722733 | ||
|
|
49d63892d1 | ||
|
|
713054f9f8 | ||
|
|
0b67644b97 | ||
|
|
f2001db68c | ||
|
|
df08d989f2 | ||
|
|
d5c7a04751 | ||
|
|
3369dfece1 | ||
|
|
7268571587 | ||
|
|
2341f8dd5a | ||
|
|
dd1722b5d6 | ||
|
|
c8667addd8 | ||
|
|
3b9a9ca6cb | ||
|
|
52050711a3 | ||
|
|
17abdd02d3 | ||
|
|
996f9fdca2 | ||
|
|
a434a0ab90 | ||
|
|
7bff0e2f3f | ||
|
|
9cf6911b7f | ||
|
|
b0201e893e | ||
|
|
df74d76ff2 | ||
|
|
6dc391e169 | ||
|
|
c7cf78952e | ||
|
|
2591cf2b3d | ||
|
|
2b1c93724f | ||
|
|
899b776e54 | ||
|
|
423b694a0d | ||
|
|
01324a84a8 | ||
|
|
b63ea35959 | ||
|
|
bb345dfd09 | ||
|
|
c05c2b7f70 | ||
|
|
3d07ec8696 | ||
|
|
3b396814ae | ||
|
|
b2047c1aca | ||
|
|
2b0cff2c93 | ||
|
|
fa7af34678 | ||
|
|
7563ea6217 | ||
|
|
08726af215 | ||
|
|
4fa1d6b0a1 | ||
|
|
3c86f1eee8 | ||
|
|
3a63f9fbb1 | ||
|
|
7b5408d20c | ||
|
|
058e8ba455 | ||
|
|
bba3c0e6bb | ||
|
|
a266976c33 | ||
|
|
f29c051c73 | ||
|
|
8842b4840e | ||
|
|
586d2ceff6 | ||
|
|
69a2284a00 | ||
|
|
19761a25da | ||
|
|
e4328fe34d | ||
|
|
e91b49e7cd | ||
|
|
7d145cd3b8 | ||
|
|
962d5386c7 | ||
|
|
3ba985f771 | ||
|
|
ef6718c242 | ||
|
|
02bcae00cf | ||
|
|
d6cd1dffa4 | ||
|
|
fc32f0dbd3 | ||
|
|
cda1974e40 | ||
|
|
5425e82fb4 | ||
|
|
84f36b0d4d | ||
|
|
0807525e1b | ||
|
|
73a86b8606 | ||
|
|
b8652e70e5 | ||
|
|
a3f3b0bed4 | ||
|
|
daaa68ce22 | ||
|
|
9ada10e0cf | ||
|
|
35287c381b | ||
|
|
2ff84b633c | ||
|
|
c09d91765f | ||
|
|
ac6ddf32c8 | ||
|
|
f15d9e5956 | ||
|
|
f95601a2e7 | ||
|
|
0aef0cc121 | ||
|
|
d1bfd94d33 | ||
|
|
8a9c0f4fde | ||
|
|
3596771af1 | ||
|
|
7b9b457f15 | ||
|
|
cb8597d62f | ||
|
|
c82cfaf633 | ||
|
|
80802c9997 | ||
|
|
971579f021 | ||
|
|
af6b8d4f66 | ||
|
|
e9a61963f2 | ||
|
|
b350712f9e | ||
|
|
51785f10c1 | ||
|
|
24e0627b41 | ||
|
|
6c453c8b49 | ||
|
|
904a2d1b4d | ||
|
|
f3b64dcbe0 | ||
|
|
0edc2cbbab | ||
|
|
751f06eb58 | ||
|
|
9bfac71bd7 | ||
|
|
9499476940 | ||
|
|
eda1eb2e35 | ||
|
|
075e179972 | ||
|
|
99e8066607 | ||
|
|
7ce32f0668 | ||
|
|
dc5547d7b6 | ||
|
|
de98bc7dcf | ||
|
|
a71d48085a | ||
|
|
9e20a13936 | ||
|
|
e164e65217 | ||
|
|
07998de35e | ||
|
|
5253dc11dc | ||
|
|
3f9022cd53 | ||
|
|
073f498c75 | ||
|
|
c5b24e9470 | ||
|
|
c12b7bfd18 | ||
|
|
1c2f583587 | ||
|
|
58a376e68b | ||
|
|
78b251e7cb | ||
|
|
a2c65b9126 | ||
|
|
5e443681c3 | ||
|
|
13756863f1 | ||
|
|
fd54e45aeb | ||
|
|
52af74c3b6 | ||
|
|
dc111a475e | ||
|
|
14cb42349a | ||
|
|
c42b50418e | ||
|
|
501b4e6efb | ||
|
|
ca2099b165 | ||
|
|
69b55c295d | ||
|
|
13709b1c90 | ||
|
|
2c013777db | ||
|
|
91099ea489 | ||
|
|
70cea66e5b | ||
|
|
e78bb97e84 | ||
|
|
732b170190 | ||
|
|
0a05993a4e | ||
|
|
42c3610685 | ||
|
|
4ad73da7ec | ||
|
|
0d14bdab24 | ||
|
|
157362f225 | ||
|
|
1aa380fdfa | ||
|
|
9348948afa | ||
|
|
14b9915914 | ||
|
|
607462028b | ||
|
|
8c07348a3d | ||
|
|
cda52af178 | ||
|
|
d1ccda18f7 | ||
|
|
9fb0b69f0a | ||
|
|
f0848edea9 | ||
|
|
5be12a213d | ||
|
|
20b284d0e9 | ||
|
|
49c3376c95 | ||
|
|
174b5f5593 | ||
|
|
b38e41a34a | ||
|
|
b6350478a5 | ||
|
|
b75af6d84a | ||
|
|
194485d863 | ||
|
|
d6458bc574 | ||
|
|
434f1dca2c | ||
|
|
c6ad6da6ae | ||
|
|
be3d65538d | ||
|
|
297e9e265a | ||
|
|
119dfbddea | ||
|
|
970925141e | ||
|
|
51131beaec | ||
|
|
c509226d17 | ||
|
|
067a9a0c25 | ||
|
|
d10197d535 | ||
|
|
8978d197ca | ||
|
|
afc73fdcfd | ||
|
|
31a24446a8 | ||
|
|
e80caaa7cd | ||
|
|
2b3a504a05 | ||
|
|
a93229bd32 | ||
|
|
99306a75d3 | ||
|
|
3a761116e4 | ||
|
|
a6ec59d6a5 | ||
|
|
ca51123115 | ||
|
|
cfc58bd415 | ||
|
|
a18f3cba32 | ||
|
|
6218741602 | ||
|
|
2285db5bb1 | ||
|
|
738b85c17d | ||
|
|
b7bb185d50 | ||
|
|
f4544cf952 | ||
|
|
beab473dcc | ||
|
|
96891228c9 | ||
|
|
a4a36b5cbd | ||
|
|
4a0a400e22 | ||
|
|
fbe4195ae0 | ||
|
|
116fa57903 | ||
|
|
2399da93db | ||
|
|
3850bb0e57 | ||
|
|
f45c84b2a8 | ||
|
|
a2e60f84da | ||
|
|
3757289c73 | ||
|
|
09067a18b7 | ||
|
|
6eb834946b | ||
|
|
0e1663f259 | ||
|
|
0ba3a94a3b | ||
|
|
3562a3800f | ||
|
|
de0efa1639 | ||
|
|
818cf41c22 | ||
|
|
25bfb16936 | ||
|
|
75782e6f17 | ||
|
|
3e5c291338 | ||
|
|
30163fa2e7 | ||
|
|
16231d8d36 | ||
|
|
0c0d6595d6 | ||
|
|
a443060faa | ||
|
|
9807722077 | ||
|
|
12b485b17e | ||
|
|
45def46a45 | ||
|
|
685b921fe7 | ||
|
|
b813aa213f | ||
|
|
79ec3ff484 | ||
|
|
63ba49ce4c | ||
|
|
85c7bf1dff | ||
|
|
894e9bab0a | ||
|
|
b39c83efd2 | ||
|
|
e855b92b82 | ||
|
|
30ee28a0d3 | ||
|
|
78f6b934bb | ||
|
|
fbef3b27bd | ||
|
|
646f56d015 | ||
|
|
f82d21886a | ||
|
|
f5054d41e1 | ||
|
|
53f64bff49 | ||
|
|
65cb9b8528 | ||
|
|
ecd16d759a | ||
|
|
8498e2a715 | ||
|
|
4fa4ba5ad0 | ||
|
|
a953b697ce | ||
|
|
c543743245 | ||
|
|
5b76fab646 | ||
|
|
6153705b61 | ||
|
|
8632420b8f | ||
|
|
4f89715453 | ||
|
|
8ca8c2191f | ||
|
|
cb43950ccf | ||
|
|
ddfef18183 | ||
|
|
ac65ba7d20 | ||
|
|
d76272d74a | ||
|
|
8e5daeb7dd | ||
|
|
5d7abae490 | ||
|
|
f875c77af0 | ||
|
|
c00a68383c | ||
|
|
5544157d5e | ||
|
|
70aa58913d | ||
|
|
cc363e4ebd | ||
|
|
8d28b399b0 | ||
|
|
fe76fe5408 | ||
|
|
a7de418213 | ||
|
|
e359a8952b | ||
|
|
0a9d4ef138 | ||
|
|
5620cfbfd8 | ||
|
|
fb65cf48c9 | ||
|
|
7fd7b2c203 | ||
|
|
69e691f042 | ||
|
|
f690e6de6a | ||
|
|
ee3c2e6f80 | ||
|
|
5ffe301384 | ||
|
|
e5ad6092d1 | ||
|
|
bd79958d10 | ||
|
|
fe485f853f | ||
|
|
3c67c6087a | ||
|
|
cb7f9b5f49 | ||
|
|
2547563e8c | ||
|
|
213b370693 | ||
|
|
2c9ecb394d | ||
|
|
51a5f5793f | ||
|
|
33f11f2263 | ||
|
|
45069b623c | ||
|
|
5defb4dbff | ||
|
|
bc7c3f0617 | ||
|
|
704c0d1eb0 | ||
|
|
6c864a1725 | ||
|
|
299c6556bb | ||
|
|
f0fc98cb66 | ||
|
|
cd63d14e6f | ||
|
|
30dfd23da8 | ||
|
|
d39ef523b8 | ||
|
|
b6c2fbb8c0 | ||
|
|
758d5469aa | ||
|
|
ea99f88d10 | ||
|
|
0a8f76864c | ||
|
|
ad522d723c | ||
|
|
0f41a311c8 | ||
|
|
412a9a050e | ||
|
|
d5efc3abd5 | ||
|
|
a205623d52 | ||
|
|
8208eecf8c | ||
|
|
f84398eb9c | ||
|
|
aca5adb673 | ||
|
|
f361d01b8b | ||
|
|
d2cef2d26e | ||
|
|
90524e53ec | ||
|
|
668d220400 | ||
|
|
9e28db0535 | ||
|
|
c5807463fd | ||
|
|
f72a9e52f5 | ||
|
|
619582bd03 | ||
|
|
bcc02d7adc | ||
|
|
a9083d5362 | ||
|
|
dd89fa0f5b | ||
|
|
88d0bd5a1d | ||
|
|
a045c2907f | ||
|
|
bcca7655f8 | ||
|
|
269ef5f824 | ||
|
|
c80a9aab71 | ||
|
|
33180a658a | ||
|
|
c5955ada1a | ||
|
|
fd7d936a0d | ||
|
|
84cd137bae | ||
|
|
3a77a638d5 | ||
|
|
599f4f01d0 | ||
|
|
bd298e92d0 | ||
|
|
fabbfd93df | ||
|
|
1ecbc44368 |
@@ -1,18 +1,11 @@
|
||||
---
|
||||
name: github-pr-reviewer
|
||||
description: Review a GitHub pull request and provide feedback comments. Use when the user says "review the current PR" or asks to review a specific PR.
|
||||
description: Reviews GitHub pull requests and provides feedback comments.
|
||||
disallowedTools: Write, Edit
|
||||
---
|
||||
|
||||
# Review GitHub Pull Request
|
||||
|
||||
## Preparation:
|
||||
- Check if the local commit matches the last one in the PR. If not, checkout the PR locally using 'gh pr checkout'.
|
||||
- CRITICAL: If 'gh pr checkout' fails for ANY reason, you MUST immediately STOP.
|
||||
- Do NOT attempt any workarounds.
|
||||
- Do NOT proceed with the review.
|
||||
- ALERT about the failure and WAIT for instructions.
|
||||
- This is a hard requirement - no exceptions.
|
||||
|
||||
## Follow these steps:
|
||||
1. Use 'gh pr view' to get the PR details and description.
|
||||
2. Use 'gh pr diff' to see all the changes in the PR.
|
||||
229
.claude/agents/raise-pull-request.md
Normal file
229
.claude/agents/raise-pull-request.md
Normal file
@@ -0,0 +1,229 @@
|
||||
---
|
||||
name: raise-pull-request
|
||||
description: |
|
||||
Use this agent when creating a pull request for the Home Assistant core repository after completing implementation work. This agent automates the PR creation process including running tests, formatting checks, and proper checkbox handling.
|
||||
model: inherit
|
||||
color: green
|
||||
tools: Read, Bash, Grep, Glob
|
||||
---
|
||||
|
||||
You are an expert at creating pull requests for the Home Assistant core repository. You will automate the PR creation process with proper verification, formatting, testing, and checkbox handling.
|
||||
|
||||
**Execute each step in order. Do not skip steps.**
|
||||
|
||||
## Step 1: Gather Information
|
||||
|
||||
Run these commands in parallel to analyze the changes:
|
||||
|
||||
```bash
|
||||
# Get current branch and remote
|
||||
git branch --show-current
|
||||
git remote -v | grep push
|
||||
|
||||
# Determine the best available dev reference
|
||||
if git rev-parse --verify --quiet upstream/dev >/dev/null; then
|
||||
BASE_REF="upstream/dev"
|
||||
elif git rev-parse --verify --quiet origin/dev >/dev/null; then
|
||||
BASE_REF="origin/dev"
|
||||
elif git rev-parse --verify --quiet dev >/dev/null; then
|
||||
BASE_REF="dev"
|
||||
else
|
||||
echo "Could not find upstream/dev, origin/dev, or local dev"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
BASE_SHA="$(git merge-base "$BASE_REF" HEAD)"
|
||||
echo "BASE_REF=$BASE_REF"
|
||||
echo "BASE_SHA=$BASE_SHA"
|
||||
|
||||
# Get commit info for this branch vs dev
|
||||
git log "${BASE_SHA}..HEAD" --oneline
|
||||
|
||||
# Check what files changed
|
||||
git diff "${BASE_SHA}..HEAD" --name-only
|
||||
|
||||
# Check if test files were added/modified
|
||||
git diff "${BASE_SHA}..HEAD" --name-only | grep -E "^tests/.*\.py$" || echo "NO_TESTS_CHANGED"
|
||||
|
||||
# Check if manifest.json changed
|
||||
git diff "${BASE_SHA}..HEAD" --name-only | grep "manifest.json" || echo "NO_MANIFEST_CHANGED"
|
||||
```
|
||||
|
||||
From the file paths, extract the **integration domain** from `homeassistant/components/{integration}/` or `tests/components/{integration}/`.
|
||||
|
||||
**Track results:**
|
||||
- `BASE_REF`: the dev reference used for comparison
|
||||
- `BASE_SHA`: the merge-base commit used for diff-based checks
|
||||
- `TESTS_CHANGED`: true if test files were added or modified
|
||||
- `MANIFEST_CHANGED`: true if manifest.json was modified
|
||||
|
||||
**If no suitable dev reference is available, STOP and tell the user to fetch `upstream/dev`, `origin/dev`, or a local `dev` branch before continuing.**
|
||||
|
||||
## Step 2: Run Code Quality Checks
|
||||
|
||||
Run `prek` to perform code quality checks (formatting, linting, hassfest, etc.) on the files changed since `BASE_SHA`:
|
||||
|
||||
```bash
|
||||
prek run --from-ref "$BASE_SHA" --to-ref HEAD
|
||||
```
|
||||
|
||||
**Track results:**
|
||||
- `PREK_PASSED`: true if `prek run` exits with code 0
|
||||
|
||||
**If `prek` fails or is not available, STOP and report the failure to the user. Do not proceed with PR creation. If the failure appears to be an environment setup issue (e.g., missing tools, command not found, venv not activated), also point the user to https://developers.home-assistant.io/docs/development_environment.**
|
||||
|
||||
## Step 3: Stage Any Changes from Checks
|
||||
|
||||
If `prek` made any formatting or generated file changes, stage and commit them as a separate commit:
|
||||
|
||||
```bash
|
||||
git status --porcelain
|
||||
# If changes exist:
|
||||
git add -A
|
||||
git commit -m "Apply prek formatting and generated file updates"
|
||||
```
|
||||
|
||||
## Step 4: Run Tests
|
||||
|
||||
Run pytest for the specific integration:
|
||||
|
||||
```bash
|
||||
pytest tests/components/{integration} \
|
||||
--timeout=60 \
|
||||
--durations-min=1 \
|
||||
--durations=0 \
|
||||
-q
|
||||
```
|
||||
|
||||
**Track results:**
|
||||
- `TESTS_PASSED`: true if pytest exits with code 0
|
||||
|
||||
**If tests fail, STOP and report the failures to the user. Do not proceed with PR creation.**
|
||||
|
||||
## Step 5: Identify PR Metadata
|
||||
|
||||
Write a release-note-style PR title summarizing the change. The title becomes the release notes entry, so it should be a complete sentence fragment describing what changed in imperative mood.
|
||||
|
||||
**PR Title Examples by Type:**
|
||||
| Type | Example titles |
|
||||
|------|----------------|
|
||||
| Bugfix | `Fix Hikvision NVR binary sensors not being detected` |
|
||||
| | `Fix JSON serialization of time objects in anthropic tool results` |
|
||||
| | `Fix config flow bug in Tesla Fleet` |
|
||||
| Dependency | `Bump eheimdigital to 1.5.0` |
|
||||
| | `Bump python-otbr-api to 2.7.1` |
|
||||
| New feature | `Add asyncio-level timeout to Backblaze B2 uploads` |
|
||||
| | `Add Nettleie optimization option` |
|
||||
| Code quality | `Add exception translations to Teslemetry` |
|
||||
| | `Improve test coverage of Tesla Fleet` |
|
||||
| | `Refactor adguard tests to use proper fixtures for mocking` |
|
||||
| | `Simplify entity init in Proxmox` |
|
||||
|
||||
## Step 6: Verify Development Checklist
|
||||
|
||||
Check each item from the [development checklist](https://developers.home-assistant.io/docs/development_checklist/):
|
||||
|
||||
| Item | How to verify |
|
||||
|------|---------------|
|
||||
| External libraries on PyPI | Check manifest.json requirements - all should be PyPI packages |
|
||||
| Dependencies in requirements_all.txt | Only if dependency declarations changed (the `requirements` field in `manifest.json` or `requirements_all.txt`), run `python -m script.gen_requirements_all` |
|
||||
| Codeowners updated | If this is a new integration, ensure its `manifest.json` includes a `codeowners` field with one or more GitHub usernames |
|
||||
| No commented out code | Visually scan the diff for blocks of commented-out code |
|
||||
|
||||
**Track results:**
|
||||
- `NO_COMMENTED_CODE`: true if no blocks of commented-out code found in the diff
|
||||
- `DEPENDENCIES_CHANGED`: true if the diff changes the `requirements` field in `manifest.json` or changes `requirements_all.txt`
|
||||
- `REQUIREMENTS_UPDATED`: true if `DEPENDENCIES_CHANGED` is true and requirements_all.txt was regenerated successfully; not applicable if `DEPENDENCIES_CHANGED` is false
|
||||
- `CHECKLIST_PASSED`: true if all items above pass
|
||||
|
||||
## Step 7: Determine Type of Change
|
||||
|
||||
Select exactly ONE based on the changes. Mark the selected type with `[x]` and all others with `[ ]` (space):
|
||||
|
||||
| Type | Condition |
|
||||
|------|-----------|
|
||||
| Dependency upgrade | Only manifest.json/requirements changes |
|
||||
| Bugfix | Fixes broken behavior, no new features |
|
||||
| New integration | New folder in components/ |
|
||||
| New feature | Adds capability to existing integration |
|
||||
| Deprecation | Adds deprecation warnings for future breaking change |
|
||||
| Breaking change | Removes or changes existing functionality |
|
||||
| Code quality | Only refactoring or test additions, no functional change |
|
||||
|
||||
**Track results:**
|
||||
- `CHANGE_TYPE`: the selected type (e.g., "Bugfix", "New feature", "Code quality", etc.)
|
||||
|
||||
**Important:** All seven type options must remain in the PR body. Only the selected type gets `[x]`, all others get `[ ]`.
|
||||
|
||||
## Step 8: Determine Checkbox States
|
||||
|
||||
Based on the verification steps above, determine checkbox states:
|
||||
|
||||
| Checkbox | Condition to tick |
|
||||
|----------|-------------------|
|
||||
| The code change is tested and works locally | Leave unchecked for the contributor to verify manually (this refers to manual testing, not unit tests) |
|
||||
| Local tests pass | Tick only if `TESTS_PASSED` is true |
|
||||
| I understand the code I am submitting and can explain how it works | Leave unchecked for the contributor to review and set manually |
|
||||
| There is no commented out code | Tick only if `NO_COMMENTED_CODE` is true |
|
||||
| Development checklist | Tick only if `CHECKLIST_PASSED` is true |
|
||||
| Perfect PR recommendations | Tick only if the PR affects a single integration or closely related modules, represents one primary type of change, and has a clear, self-contained scope |
|
||||
| Formatted using Ruff | Tick only if `PREK_PASSED` is true |
|
||||
| Tests have been added | Tick only if `TESTS_CHANGED` is true AND the changes exercise new or changed functionality (not only cosmetic test changes) |
|
||||
| Documentation added/updated | Tick if documentation PR created (or not applicable) |
|
||||
| Manifest file fields filled out | Tick if `PREK_PASSED` is true (or not applicable) |
|
||||
| Dependencies in requirements_all.txt | Tick only if `DEPENDENCIES_CHANGED` is false, or if `DEPENDENCIES_CHANGED` is true and `REQUIREMENTS_UPDATED` is true |
|
||||
| Dependency changelog linked | Tick if dependency changelog linked in PR description (or not applicable) |
|
||||
| Any generated code has been carefully reviewed | Leave unchecked for the contributor to review and set manually |
|
||||
|
||||
## Step 9: Breaking Change Section
|
||||
|
||||
**If `CHANGE_TYPE` is NOT "Breaking change" or "Deprecation": REMOVE the entire "## Breaking change" section from the PR body (including the heading).**
|
||||
|
||||
If `CHANGE_TYPE` IS "Breaking change" or "Deprecation", keep the `## Breaking change` section and describe:
|
||||
- What breaks
|
||||
- How users can fix it
|
||||
- Why it was necessary
|
||||
|
||||
## Step 10: Push Branch and Create PR
|
||||
|
||||
```bash
|
||||
# Get branch name and GitHub username
|
||||
BRANCH=$(git branch --show-current)
|
||||
PUSH_REMOTE=$(git config "branch.$BRANCH.remote" 2>/dev/null || git remote | head -1)
|
||||
GITHUB_USER=$(gh api user --jq .login 2>/dev/null || git remote get-url "$PUSH_REMOTE" | sed -E 's#.*[:/]([^/]+)/([^/]+)(\.git)?$#\1#')
|
||||
|
||||
# Create PR (gh pr create pushes the branch automatically)
|
||||
gh pr create --repo home-assistant/core --base dev \
|
||||
--head "$GITHUB_USER:$BRANCH" \
|
||||
--draft \
|
||||
--title "TITLE_HERE" \
|
||||
--body "$(cat <<'EOF'
|
||||
BODY_HERE
|
||||
EOF
|
||||
)"
|
||||
```
|
||||
|
||||
### PR Body Template
|
||||
|
||||
Read the PR template from `.github/PULL_REQUEST_TEMPLATE.md` and use it as the basis for the PR body. **Do not hardcode the template — always read it from the file to stay in sync with upstream changes.**
|
||||
|
||||
Use any HTML comments (`<!-- ... -->`) in the template as guidance to understand what to fill in. For the final PR body sent to GitHub, keep the template text intact — do not delete any text from the template unless it explicitly instructs removal (e.g., the breaking change section when not applicable). Then fill in the sections:
|
||||
|
||||
1. **Breaking change section**: If the type is NOT "Breaking change" or "Deprecation", remove the entire `## Breaking change` section (heading and body). Otherwise, describe what breaks, how users can fix it, and why.
|
||||
2. **Proposed change section**: Fill in a description of the change extracted from commit messages.
|
||||
3. **Type of change**: Check exactly ONE checkbox matching the determined type from Step 7. Leave all others unchecked.
|
||||
4. **Additional information**: Fill in any related issue numbers if known.
|
||||
5. **Checklist**: Check boxes based on the conditions in Step 8. Leave manual-verification boxes unchecked for the contributor.
|
||||
|
||||
**Important:** Preserve all template structure, options, and link references exactly as they appear in the file — only modify checkbox states and fill in content sections.
|
||||
|
||||
## Step 11: Report Result
|
||||
|
||||
Provide the user with:
|
||||
1. **PR URL** - The created pull request link
|
||||
2. **Verification Summary** - Which checks passed/failed
|
||||
3. **Unchecked Items** - List any checkboxes left unchecked and why
|
||||
4. **User Action Required** - Remind user to:
|
||||
- Review and set manual-verification checkboxes ("I understand the code..." and "Any generated code...") as applicable
|
||||
- Consider reviewing two other open PRs
|
||||
- Add any related issue numbers if applicable
|
||||
@@ -3,54 +3,27 @@ name: Home Assistant Integration knowledge
|
||||
description: Everything you need to know to build, test and review Home Assistant Integrations. If you're looking at an integration, you must use this as your primary reference.
|
||||
---
|
||||
|
||||
### File Locations
|
||||
## File Locations
|
||||
- **Integration code**: `./homeassistant/components/<integration_domain>/`
|
||||
- **Integration tests**: `./tests/components/<integration_domain>/`
|
||||
|
||||
## Integration Templates
|
||||
## General guidelines
|
||||
|
||||
### Standard Integration Structure
|
||||
```
|
||||
homeassistant/components/my_integration/
|
||||
├── __init__.py # Entry point with async_setup_entry
|
||||
├── manifest.json # Integration metadata and dependencies
|
||||
├── const.py # Domain and constants
|
||||
├── config_flow.py # UI configuration flow
|
||||
├── coordinator.py # Data update coordinator (if needed)
|
||||
├── entity.py # Base entity class (if shared patterns)
|
||||
├── sensor.py # Sensor platform
|
||||
├── strings.json # User-facing text and translations
|
||||
├── services.yaml # Service definitions (if applicable)
|
||||
└── quality_scale.yaml # Quality scale rule status
|
||||
```
|
||||
- When looking for examples, prefer integrations with the platinum or gold quality scale level first.
|
||||
- Polling intervals are NOT user-configurable. Never add scan_interval, update_interval, or polling frequency options to config flows or config entries.
|
||||
- Do NOT allow users to set config entry names in config flows. Names are automatically generated or can be customized later in UI. Exception: helper integrations may allow custom names.
|
||||
|
||||
An integration can have platforms as needed (e.g., `sensor.py`, `switch.py`, etc.). The following platforms have extra guidelines:
|
||||
The following platforms have extra guidelines:
|
||||
- **Diagnostics**: [`platform-diagnostics.md`](platform-diagnostics.md) for diagnostic data collection
|
||||
- **Repairs**: [`platform-repairs.md`](platform-repairs.md) for user-actionable repair issues
|
||||
|
||||
### Minimal Integration Checklist
|
||||
- [ ] `manifest.json` with required fields (domain, name, codeowners, etc.)
|
||||
- [ ] `__init__.py` with `async_setup_entry` and `async_unload_entry`
|
||||
- [ ] `config_flow.py` with UI configuration support
|
||||
- [ ] `const.py` with `DOMAIN` constant
|
||||
- [ ] `strings.json` with at least config flow text
|
||||
- [ ] Platform files (`sensor.py`, etc.) as needed
|
||||
- [ ] `quality_scale.yaml` with rule status tracking
|
||||
|
||||
## Integration Quality Scale
|
||||
|
||||
Home Assistant uses an Integration Quality Scale to ensure code quality and consistency. The quality level determines which rules apply:
|
||||
- When validating the quality scale rules, check them at https://developers.home-assistant.io/docs/core/integration-quality-scale/rules
|
||||
- When implementing or reviewing an integration, always consider the quality scale rules, since they promote best practices.
|
||||
|
||||
### Quality Scale Levels
|
||||
- **Bronze**: Basic requirements (ALL Bronze rules are mandatory)
|
||||
- **Silver**: Enhanced functionality
|
||||
- **Gold**: Advanced features
|
||||
- **Platinum**: Highest quality standards
|
||||
|
||||
### Quality Scale Progression
|
||||
- **Bronze → Silver**: Add entity unavailability, parallel updates, auth flows
|
||||
- **Silver → Gold**: Add device management, diagnostics, translations
|
||||
- **Gold → Platinum**: Add strict typing, async dependencies, websession injection
|
||||
Template scale file: `./script/scaffold/templates/integration/integration/quality_scale.yaml`
|
||||
|
||||
### How Rules Apply
|
||||
1. **Check `manifest.json`**: Look for `"quality_scale"` key to determine integration level
|
||||
@@ -61,726 +34,7 @@ Home Assistant uses an Integration Quality Scale to ensure code quality and cons
|
||||
- `exempt`: Rule doesn't apply (with reason in comment)
|
||||
- `todo`: Rule needs implementation
|
||||
|
||||
### Example `quality_scale.yaml` Structure
|
||||
```yaml
|
||||
rules:
|
||||
# Bronze (mandatory)
|
||||
config-flow: done
|
||||
entity-unique-id: done
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: Integration does not register custom actions.
|
||||
|
||||
# Silver (if targeting Silver+)
|
||||
entity-unavailable: done
|
||||
parallel-updates: done
|
||||
|
||||
# Gold (if targeting Gold+)
|
||||
devices: done
|
||||
diagnostics: done
|
||||
|
||||
# Platinum (if targeting Platinum)
|
||||
strict-typing: done
|
||||
```
|
||||
|
||||
**When Reviewing/Creating Code**: Always check the integration's quality scale level and exemption status before applying rules.
|
||||
|
||||
## Code Organization
|
||||
|
||||
### Core Locations
|
||||
- Shared constants: `homeassistant/const.py` (use these instead of hardcoding)
|
||||
- Integration structure:
|
||||
- `homeassistant/components/{domain}/const.py` - Constants
|
||||
- `homeassistant/components/{domain}/models.py` - Data models
|
||||
- `homeassistant/components/{domain}/coordinator.py` - Update coordinator
|
||||
- `homeassistant/components/{domain}/config_flow.py` - Configuration flow
|
||||
- `homeassistant/components/{domain}/{platform}.py` - Platform implementations
|
||||
|
||||
### Common Modules
|
||||
- **coordinator.py**: Centralize data fetching logic
|
||||
```python
|
||||
class MyCoordinator(DataUpdateCoordinator[MyData]):
|
||||
def __init__(self, hass: HomeAssistant, client: MyClient, config_entry: ConfigEntry) -> None:
|
||||
super().__init__(
|
||||
hass,
|
||||
logger=LOGGER,
|
||||
name=DOMAIN,
|
||||
update_interval=timedelta(minutes=1),
|
||||
config_entry=config_entry, # ✅ Pass config_entry - it's accepted and recommended
|
||||
)
|
||||
```
|
||||
- **entity.py**: Base entity definitions to reduce duplication
|
||||
```python
|
||||
class MyEntity(CoordinatorEntity[MyCoordinator]):
|
||||
_attr_has_entity_name = True
|
||||
```
|
||||
|
||||
### Runtime Data Storage
|
||||
- **Use ConfigEntry.runtime_data**: Store non-persistent runtime data
|
||||
```python
|
||||
type MyIntegrationConfigEntry = ConfigEntry[MyClient]
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: MyIntegrationConfigEntry) -> bool:
|
||||
client = MyClient(entry.data[CONF_HOST])
|
||||
entry.runtime_data = client
|
||||
```
|
||||
|
||||
### Manifest Requirements
|
||||
- **Required Fields**: `domain`, `name`, `codeowners`, `integration_type`, `documentation`, `requirements`
|
||||
- **Integration Types**: `device`, `hub`, `service`, `system`, `helper`
|
||||
- **IoT Class**: Always specify connectivity method (e.g., `cloud_polling`, `local_polling`, `local_push`)
|
||||
- **Discovery Methods**: Add when applicable: `zeroconf`, `dhcp`, `bluetooth`, `ssdp`, `usb`
|
||||
- **Dependencies**: Include platform dependencies (e.g., `application_credentials`, `bluetooth_adapters`)
|
||||
|
||||
### Config Flow Patterns
|
||||
- **Version Control**: Always set `VERSION = 1` and `MINOR_VERSION = 1`
|
||||
- **Unique ID Management**:
|
||||
```python
|
||||
await self.async_set_unique_id(device_unique_id)
|
||||
self._abort_if_unique_id_configured()
|
||||
```
|
||||
- **Error Handling**: Define errors in `strings.json` under `config.error`
|
||||
- **Step Methods**: Use standard naming (`async_step_user`, `async_step_discovery`, etc.)
|
||||
|
||||
### Integration Ownership
|
||||
- **manifest.json**: Add GitHub usernames to `codeowners`:
|
||||
```json
|
||||
{
|
||||
"domain": "my_integration",
|
||||
"name": "My Integration",
|
||||
"codeowners": ["@me"]
|
||||
}
|
||||
```
|
||||
|
||||
### Async Dependencies (Platinum)
|
||||
- **Requirement**: All dependencies must use asyncio
|
||||
- Ensures efficient task handling without thread context switching
|
||||
|
||||
### WebSession Injection (Platinum)
|
||||
- **Pass WebSession**: Support passing web sessions to dependencies
|
||||
```python
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: MyConfigEntry) -> bool:
|
||||
"""Set up integration from config entry."""
|
||||
client = MyClient(entry.data[CONF_HOST], async_get_clientsession(hass))
|
||||
```
|
||||
- For cookies: Use `async_create_clientsession` (aiohttp) or `create_async_httpx_client` (httpx)
|
||||
|
||||
### Data Update Coordinator
|
||||
- **Standard Pattern**: Use for efficient data management
|
||||
```python
|
||||
class MyCoordinator(DataUpdateCoordinator):
|
||||
def __init__(self, hass: HomeAssistant, client: MyClient, config_entry: ConfigEntry) -> None:
|
||||
super().__init__(
|
||||
hass,
|
||||
logger=LOGGER,
|
||||
name=DOMAIN,
|
||||
update_interval=timedelta(minutes=5),
|
||||
config_entry=config_entry, # ✅ Pass config_entry - it's accepted and recommended
|
||||
)
|
||||
self.client = client
|
||||
|
||||
async def _async_update_data(self):
|
||||
try:
|
||||
return await self.client.fetch_data()
|
||||
except ApiError as err:
|
||||
raise UpdateFailed(f"API communication error: {err}")
|
||||
```
|
||||
- **Error Types**: Use `UpdateFailed` for API errors, `ConfigEntryAuthFailed` for auth issues
|
||||
- **Config Entry**: Always pass `config_entry` parameter to coordinator - it's accepted and recommended
|
||||
|
||||
## Integration Guidelines
|
||||
|
||||
### Configuration Flow
|
||||
- **UI Setup Required**: All integrations must support configuration via UI
|
||||
- **Manifest**: Set `"config_flow": true` in `manifest.json`
|
||||
- **Data Storage**:
|
||||
- Connection-critical config: Store in `ConfigEntry.data`
|
||||
- Non-critical settings: Store in `ConfigEntry.options`
|
||||
- **Validation**: Always validate user input before creating entries
|
||||
- **Config Entry Naming**:
|
||||
- ❌ Do NOT allow users to set config entry names in config flows
|
||||
- Names are automatically generated or can be customized later in UI
|
||||
- ✅ Exception: Helper integrations MAY allow custom names in config flow
|
||||
- **Connection Testing**: Test device/service connection during config flow:
|
||||
```python
|
||||
try:
|
||||
await client.get_data()
|
||||
except MyException:
|
||||
errors["base"] = "cannot_connect"
|
||||
```
|
||||
- **Duplicate Prevention**: Prevent duplicate configurations:
|
||||
```python
|
||||
# Using unique ID
|
||||
await self.async_set_unique_id(identifier)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
# Using unique data
|
||||
self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]})
|
||||
```
|
||||
|
||||
### Reauthentication Support
|
||||
- **Required Method**: Implement `async_step_reauth` in config flow
|
||||
- **Credential Updates**: Allow users to update credentials without re-adding
|
||||
- **Validation**: Verify account matches existing unique ID:
|
||||
```python
|
||||
await self.async_set_unique_id(user_id)
|
||||
self._abort_if_unique_id_mismatch(reason="wrong_account")
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(),
|
||||
data_updates={CONF_API_TOKEN: user_input[CONF_API_TOKEN]}
|
||||
)
|
||||
```
|
||||
|
||||
### Reconfiguration Flow
|
||||
- **Purpose**: Allow configuration updates without removing device
|
||||
- **Implementation**: Add `async_step_reconfigure` method
|
||||
- **Validation**: Prevent changing underlying account with `_abort_if_unique_id_mismatch`
|
||||
|
||||
### Device Discovery
|
||||
- **Manifest Configuration**: Add discovery method (zeroconf, dhcp, etc.)
|
||||
```json
|
||||
{
|
||||
"zeroconf": ["_mydevice._tcp.local."]
|
||||
}
|
||||
```
|
||||
- **Discovery Handler**: Implement appropriate `async_step_*` method:
|
||||
```python
|
||||
async def async_step_zeroconf(self, discovery_info):
|
||||
"""Handle zeroconf discovery."""
|
||||
await self.async_set_unique_id(discovery_info.properties["serialno"])
|
||||
self._abort_if_unique_id_configured(updates={CONF_HOST: discovery_info.host})
|
||||
```
|
||||
- **Network Updates**: Use discovery to update dynamic IP addresses
|
||||
|
||||
### Network Discovery Implementation
|
||||
- **Zeroconf/mDNS**: Use async instances
|
||||
```python
|
||||
aiozc = await zeroconf.async_get_async_instance(hass)
|
||||
```
|
||||
- **SSDP Discovery**: Register callbacks with cleanup
|
||||
```python
|
||||
entry.async_on_unload(
|
||||
ssdp.async_register_callback(
|
||||
hass, _async_discovered_device,
|
||||
{"st": "urn:schemas-upnp-org:device:ZonePlayer:1"}
|
||||
)
|
||||
)
|
||||
```
|
||||
|
||||
### Bluetooth Integration
|
||||
- **Manifest Dependencies**: Add `bluetooth_adapters` to dependencies
|
||||
- **Connectable**: Set `"connectable": true` for connection-required devices
|
||||
- **Scanner Usage**: Always use shared scanner instance
|
||||
```python
|
||||
scanner = bluetooth.async_get_scanner()
|
||||
entry.async_on_unload(
|
||||
bluetooth.async_register_callback(
|
||||
hass, _async_discovered_device,
|
||||
{"service_uuid": "example_uuid"},
|
||||
bluetooth.BluetoothScanningMode.ACTIVE
|
||||
)
|
||||
)
|
||||
```
|
||||
- **Connection Handling**: Never reuse `BleakClient` instances, use 10+ second timeouts
|
||||
|
||||
### Setup Validation
|
||||
- **Test Before Setup**: Verify integration can be set up in `async_setup_entry`
|
||||
- **Exception Handling**:
|
||||
- `ConfigEntryNotReady`: Device offline or temporary failure
|
||||
- `ConfigEntryAuthFailed`: Authentication issues
|
||||
- `ConfigEntryError`: Unresolvable setup problems
|
||||
|
||||
### Config Entry Unloading
|
||||
- **Required**: Implement `async_unload_entry` for runtime removal/reload
|
||||
- **Platform Unloading**: Use `hass.config_entries.async_unload_platforms`
|
||||
- **Cleanup**: Register callbacks with `entry.async_on_unload`:
|
||||
```python
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: MyConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
entry.runtime_data.listener() # Clean up resources
|
||||
return unload_ok
|
||||
```
|
||||
|
||||
### Service Actions
|
||||
- **Registration**: Register all service actions in `async_setup`, NOT in `async_setup_entry`
|
||||
- **Validation**: Check config entry existence and loaded state:
|
||||
```python
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
async def service_action(call: ServiceCall) -> ServiceResponse:
|
||||
if not (entry := hass.config_entries.async_get_entry(call.data[ATTR_CONFIG_ENTRY_ID])):
|
||||
raise ServiceValidationError("Entry not found")
|
||||
if entry.state is not ConfigEntryState.LOADED:
|
||||
raise ServiceValidationError("Entry not loaded")
|
||||
```
|
||||
- **Exception Handling**: Raise appropriate exceptions:
|
||||
```python
|
||||
# For invalid input
|
||||
if end_date < start_date:
|
||||
raise ServiceValidationError("End date must be after start date")
|
||||
|
||||
# For service errors
|
||||
try:
|
||||
await client.set_schedule(start_date, end_date)
|
||||
except MyConnectionError as err:
|
||||
raise HomeAssistantError("Could not connect to the schedule") from err
|
||||
```
|
||||
|
||||
### Service Registration Patterns
|
||||
- **Entity Services**: Register on platform setup
|
||||
```python
|
||||
platform.async_register_entity_service(
|
||||
"my_entity_service",
|
||||
{vol.Required("parameter"): cv.string},
|
||||
"handle_service_method"
|
||||
)
|
||||
```
|
||||
- **Service Schema**: Always validate input
|
||||
```python
|
||||
SERVICE_SCHEMA = vol.Schema({
|
||||
vol.Required("entity_id"): cv.entity_ids,
|
||||
vol.Required("parameter"): cv.string,
|
||||
vol.Optional("timeout", default=30): cv.positive_int,
|
||||
})
|
||||
```
|
||||
- **Services File**: Create `services.yaml` with descriptions and field definitions
|
||||
|
||||
### Polling
|
||||
- Use update coordinator pattern when possible
|
||||
- **Polling intervals are NOT user-configurable**: Never add scan_interval, update_interval, or polling frequency options to config flows or config entries
|
||||
- **Integration determines intervals**: Set `update_interval` programmatically based on integration logic, not user input
|
||||
- **Minimum Intervals**:
|
||||
- Local network: 5 seconds
|
||||
- Cloud services: 60 seconds
|
||||
- **Parallel Updates**: Specify number of concurrent updates:
|
||||
```python
|
||||
PARALLEL_UPDATES = 1 # Serialize updates to prevent overwhelming device
|
||||
# OR
|
||||
PARALLEL_UPDATES = 0 # Unlimited (for coordinator-based or read-only)
|
||||
```
|
||||
|
||||
## Entity Development
|
||||
|
||||
### Unique IDs
|
||||
- **Required**: Every entity must have a unique ID for registry tracking
|
||||
- Must be unique per platform (not per integration)
|
||||
- Don't include integration domain or platform in ID
|
||||
- **Implementation**:
|
||||
```python
|
||||
class MySensor(SensorEntity):
|
||||
def __init__(self, device_id: str) -> None:
|
||||
self._attr_unique_id = f"{device_id}_temperature"
|
||||
```
|
||||
|
||||
**Acceptable ID Sources**:
|
||||
- Device serial numbers
|
||||
- MAC addresses (formatted using `format_mac` from device registry)
|
||||
- Physical identifiers (printed/EEPROM)
|
||||
- Config entry ID as last resort: `f"{entry.entry_id}-battery"`
|
||||
|
||||
**Never Use**:
|
||||
- IP addresses, hostnames, URLs
|
||||
- Device names
|
||||
- Email addresses, usernames
|
||||
|
||||
### Entity Descriptions
|
||||
- **Lambda/Anonymous Functions**: Often used in EntityDescription for value transformation
|
||||
- **Multiline Lambdas**: When lambdas exceed line length, wrap in parentheses for readability
|
||||
- **Bad pattern**:
|
||||
```python
|
||||
SensorEntityDescription(
|
||||
key="temperature",
|
||||
name="Temperature",
|
||||
value_fn=lambda data: round(data["temp_value"] * 1.8 + 32, 1) if data.get("temp_value") is not None else None, # ❌ Too long
|
||||
)
|
||||
```
|
||||
- **Good pattern**:
|
||||
```python
|
||||
SensorEntityDescription(
|
||||
key="temperature",
|
||||
name="Temperature",
|
||||
value_fn=lambda data: ( # ✅ Parenthesis on same line as lambda
|
||||
round(data["temp_value"] * 1.8 + 32, 1)
|
||||
if data.get("temp_value") is not None
|
||||
else None
|
||||
),
|
||||
)
|
||||
```
|
||||
|
||||
### Entity Naming
|
||||
- **Use has_entity_name**: Set `_attr_has_entity_name = True`
|
||||
- **For specific fields**:
|
||||
```python
|
||||
class MySensor(SensorEntity):
|
||||
_attr_has_entity_name = True
|
||||
def __init__(self, device: Device, field: str) -> None:
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, device.id)},
|
||||
name=device.name,
|
||||
)
|
||||
self._attr_name = field # e.g., "temperature", "humidity"
|
||||
```
|
||||
- **For device itself**: Set `_attr_name = None`
|
||||
|
||||
### Event Lifecycle Management
|
||||
- **Subscribe in `async_added_to_hass`**:
|
||||
```python
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Subscribe to events."""
|
||||
self.async_on_remove(
|
||||
self.client.events.subscribe("my_event", self._handle_event)
|
||||
)
|
||||
```
|
||||
- **Unsubscribe in `async_will_remove_from_hass`** if not using `async_on_remove`
|
||||
- Never subscribe in `__init__` or other methods
|
||||
|
||||
### State Handling
|
||||
- Unknown values: Use `None` (not "unknown" or "unavailable")
|
||||
- Availability: Implement `available()` property instead of using "unavailable" state
|
||||
|
||||
### Entity Availability
|
||||
- **Mark Unavailable**: When data cannot be fetched from device/service
|
||||
- **Coordinator Pattern**:
|
||||
```python
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return super().available and self.identifier in self.coordinator.data
|
||||
```
|
||||
- **Direct Update Pattern**:
|
||||
```python
|
||||
async def async_update(self) -> None:
|
||||
"""Update entity."""
|
||||
try:
|
||||
data = await self.client.get_data()
|
||||
except MyException:
|
||||
self._attr_available = False
|
||||
else:
|
||||
self._attr_available = True
|
||||
self._attr_native_value = data.value
|
||||
```
|
||||
|
||||
### Extra State Attributes
|
||||
- All attribute keys must always be present
|
||||
- Unknown values: Use `None`
|
||||
- Provide descriptive attributes
|
||||
|
||||
## Device Management
|
||||
|
||||
### Device Registry
|
||||
- **Create Devices**: Group related entities under devices
|
||||
- **Device Info**: Provide comprehensive metadata:
|
||||
```python
|
||||
_attr_device_info = DeviceInfo(
|
||||
connections={(CONNECTION_NETWORK_MAC, device.mac)},
|
||||
identifiers={(DOMAIN, device.id)},
|
||||
name=device.name,
|
||||
manufacturer="My Company",
|
||||
model="My Sensor",
|
||||
sw_version=device.version,
|
||||
)
|
||||
```
|
||||
- For services: Add `entry_type=DeviceEntryType.SERVICE`
|
||||
|
||||
### Dynamic Device Addition
|
||||
- **Auto-detect New Devices**: After initial setup
|
||||
- **Implementation Pattern**:
|
||||
```python
|
||||
def _check_device() -> None:
|
||||
current_devices = set(coordinator.data)
|
||||
new_devices = current_devices - known_devices
|
||||
if new_devices:
|
||||
known_devices.update(new_devices)
|
||||
async_add_entities([MySensor(coordinator, device_id) for device_id in new_devices])
|
||||
|
||||
entry.async_on_unload(coordinator.async_add_listener(_check_device))
|
||||
```
|
||||
|
||||
### Stale Device Removal
|
||||
- **Auto-remove**: When devices disappear from hub/account
|
||||
- **Device Registry Update**:
|
||||
```python
|
||||
device_registry.async_update_device(
|
||||
device_id=device.id,
|
||||
remove_config_entry_id=self.config_entry.entry_id,
|
||||
)
|
||||
```
|
||||
- **Manual Deletion**: Implement `async_remove_config_entry_device` when needed
|
||||
|
||||
### Entity Categories
|
||||
- **Required**: Assign appropriate category to entities
|
||||
- **Implementation**: Set `_attr_entity_category`
|
||||
```python
|
||||
class MySensor(SensorEntity):
|
||||
_attr_entity_category = EntityCategory.DIAGNOSTIC
|
||||
```
|
||||
- Categories include: `DIAGNOSTIC` for system/technical information
|
||||
|
||||
### Device Classes
|
||||
- **Use When Available**: Set appropriate device class for entity type
|
||||
```python
|
||||
class MyTemperatureSensor(SensorEntity):
|
||||
_attr_device_class = SensorDeviceClass.TEMPERATURE
|
||||
```
|
||||
- Provides context for: unit conversion, voice control, UI representation
|
||||
|
||||
### Disabled by Default
|
||||
- **Disable Noisy/Less Popular Entities**: Reduce resource usage
|
||||
```python
|
||||
class MySignalStrengthSensor(SensorEntity):
|
||||
_attr_entity_registry_enabled_default = False
|
||||
```
|
||||
- Target: frequently changing states, technical diagnostics
|
||||
|
||||
### Entity Translations
|
||||
- **Required with has_entity_name**: Support international users
|
||||
- **Implementation**:
|
||||
```python
|
||||
class MySensor(SensorEntity):
|
||||
_attr_has_entity_name = True
|
||||
_attr_translation_key = "phase_voltage"
|
||||
```
|
||||
- Create `strings.json` with translations:
|
||||
```json
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"phase_voltage": {
|
||||
"name": "Phase voltage"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Exception Translations (Gold)
|
||||
- **Translatable Errors**: Use translation keys for user-facing exceptions
|
||||
- **Implementation**:
|
||||
```python
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="end_date_before_start_date",
|
||||
)
|
||||
```
|
||||
- Add to `strings.json`:
|
||||
```json
|
||||
{
|
||||
"exceptions": {
|
||||
"end_date_before_start_date": {
|
||||
"message": "The end date cannot be before the start date."
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Icon Translations (Gold)
|
||||
- **Dynamic Icons**: Support state and range-based icon selection
|
||||
- **State-based Icons**:
|
||||
```json
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"tree_pollen": {
|
||||
"default": "mdi:tree",
|
||||
"state": {
|
||||
"high": "mdi:tree-outline"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
- **Range-based Icons** (for numeric values):
|
||||
```json
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"battery_level": {
|
||||
"default": "mdi:battery-unknown",
|
||||
"range": {
|
||||
"0": "mdi:battery-outline",
|
||||
"90": "mdi:battery-90",
|
||||
"100": "mdi:battery"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Testing Requirements
|
||||
|
||||
- **Location**: `tests/components/{domain}/`
|
||||
- **Coverage Requirement**: Above 95% test coverage for all modules
|
||||
- **Best Practices**:
|
||||
- Use pytest fixtures from `tests.common`
|
||||
- Mock all external dependencies
|
||||
- Use snapshots for complex data structures
|
||||
- Follow existing test patterns
|
||||
|
||||
### Config Flow Testing
|
||||
- **100% Coverage Required**: All config flow paths must be tested
|
||||
- **Patch Boundaries**: Only patch library or client methods when testing config flows. Do not patch methods defined in `config_flow.py`; exercise the flow logic end-to-end.
|
||||
- **Test Scenarios**:
|
||||
- All flow initiation methods (user, discovery, import)
|
||||
- Successful configuration paths
|
||||
- Error recovery scenarios
|
||||
- Prevention of duplicate entries
|
||||
- Flow completion after errors
|
||||
- Reauthentication/reconfigure flows
|
||||
|
||||
### Testing
|
||||
- **Integration-specific tests** (recommended):
|
||||
```bash
|
||||
pytest ./tests/components/<integration_domain> \
|
||||
--cov=homeassistant.components.<integration_domain> \
|
||||
--cov-report term-missing \
|
||||
--durations-min=1 \
|
||||
--durations=0 \
|
||||
--numprocesses=auto
|
||||
```
|
||||
|
||||
### Testing Best Practices
|
||||
- **Never access `hass.data` directly** - Use fixtures and proper integration setup instead
|
||||
- **Use snapshot testing** - For verifying entity states and attributes
|
||||
- **Test through integration setup** - Don't test entities in isolation
|
||||
- **Mock external APIs** - Use fixtures with realistic JSON data
|
||||
- **Verify registries** - Ensure entities are properly registered with devices
|
||||
|
||||
### Config Flow Testing Template
|
||||
```python
|
||||
async def test_user_flow_success(hass, mock_api):
|
||||
"""Test successful user flow."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
assert result["type"] == FlowResultType.FORM
|
||||
assert result["step_id"] == "user"
|
||||
|
||||
# Test form submission
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], user_input=TEST_USER_INPUT
|
||||
)
|
||||
assert result["type"] == FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "My Device"
|
||||
assert result["data"] == TEST_USER_INPUT
|
||||
|
||||
async def test_flow_connection_error(hass, mock_api_error):
|
||||
"""Test connection error handling."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], user_input=TEST_USER_INPUT
|
||||
)
|
||||
assert result["type"] == FlowResultType.FORM
|
||||
assert result["errors"] == {"base": "cannot_connect"}
|
||||
```
|
||||
|
||||
### Entity Testing Patterns
|
||||
```python
|
||||
@pytest.fixture
|
||||
def platforms() -> list[Platform]:
|
||||
"""Overridden fixture to specify platforms to test."""
|
||||
return [Platform.SENSOR] # Or another specific platform as needed.
|
||||
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default", "init_integration")
|
||||
async def test_entities(
|
||||
hass: HomeAssistant,
|
||||
snapshot: SnapshotAssertion,
|
||||
entity_registry: er.EntityRegistry,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test the sensor entities."""
|
||||
await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id)
|
||||
|
||||
# Ensure entities are correctly assigned to device
|
||||
device_entry = device_registry.async_get_device(
|
||||
identifiers={(DOMAIN, "device_unique_id")}
|
||||
)
|
||||
assert device_entry
|
||||
entity_entries = er.async_entries_for_config_entry(
|
||||
entity_registry, mock_config_entry.entry_id
|
||||
)
|
||||
for entity_entry in entity_entries:
|
||||
assert entity_entry.device_id == device_entry.id
|
||||
```
|
||||
|
||||
### Mock Patterns
|
||||
```python
|
||||
# Modern integration fixture setup
|
||||
@pytest.fixture
|
||||
def mock_config_entry() -> MockConfigEntry:
|
||||
"""Return the default mocked config entry."""
|
||||
return MockConfigEntry(
|
||||
title="My Integration",
|
||||
domain=DOMAIN,
|
||||
data={CONF_HOST: "127.0.0.1", CONF_API_KEY: "test_key"},
|
||||
unique_id="device_unique_id",
|
||||
)
|
||||
|
||||
@pytest.fixture
|
||||
def mock_device_api() -> Generator[MagicMock]:
|
||||
"""Return a mocked device API."""
|
||||
with patch("homeassistant.components.my_integration.MyDeviceAPI", autospec=True) as api_mock:
|
||||
api = api_mock.return_value
|
||||
api.get_data.return_value = MyDeviceData.from_json(
|
||||
load_fixture("device_data.json", DOMAIN)
|
||||
)
|
||||
yield api
|
||||
|
||||
@pytest.fixture
|
||||
def platforms() -> list[Platform]:
|
||||
"""Fixture to specify platforms to test."""
|
||||
return PLATFORMS
|
||||
|
||||
@pytest.fixture
|
||||
async def init_integration(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_device_api: MagicMock,
|
||||
platforms: list[Platform],
|
||||
) -> MockConfigEntry:
|
||||
"""Set up the integration for testing."""
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
with patch("homeassistant.components.my_integration.PLATFORMS", platforms):
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
return mock_config_entry
|
||||
```
|
||||
|
||||
## Debugging & Troubleshooting
|
||||
|
||||
### Common Issues & Solutions
|
||||
- **Integration won't load**: Check `manifest.json` syntax and required fields
|
||||
- **Entities not appearing**: Verify `unique_id` and `has_entity_name` implementation
|
||||
- **Config flow errors**: Check `strings.json` entries and error handling
|
||||
- **Discovery not working**: Verify manifest discovery configuration and callbacks
|
||||
- **Tests failing**: Check mock setup and async context
|
||||
|
||||
### Debug Logging Setup
|
||||
```python
|
||||
# Enable debug logging in tests
|
||||
caplog.set_level(logging.DEBUG, logger="my_integration")
|
||||
|
||||
# In integration code - use proper logging
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
_LOGGER.debug("Processing data: %s", data) # Use lazy logging
|
||||
```
|
||||
|
||||
### Validation Commands
|
||||
```bash
|
||||
# Check specific integration
|
||||
python -m script.hassfest --integration-path homeassistant/components/my_integration
|
||||
|
||||
# Validate quality scale
|
||||
# Check quality_scale.yaml against current rules
|
||||
|
||||
# Run integration tests with coverage
|
||||
pytest ./tests/components/my_integration \
|
||||
--cov=homeassistant.components.my_integration \
|
||||
--cov-report term-missing
|
||||
```
|
||||
- Tests should avoid interacting or mocking internal integration details. For more info, see https://developers.home-assistant.io/docs/development_testing/#writing-tests-for-integrations
|
||||
|
||||
@@ -3,17 +3,4 @@
|
||||
Platform exists as `homeassistant/components/<domain>/diagnostics.py`.
|
||||
|
||||
- **Required**: Implement diagnostic data collection
|
||||
- **Implementation**:
|
||||
```python
|
||||
TO_REDACT = [CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE]
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: MyConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
return {
|
||||
"entry_data": async_redact_data(entry.data, TO_REDACT),
|
||||
"data": entry.runtime_data.data,
|
||||
}
|
||||
```
|
||||
- **Security**: Never expose passwords, tokens, or sensitive coordinates
|
||||
|
||||
@@ -8,29 +8,6 @@ Platform exists as `homeassistant/components/<domain>/repairs.py`.
|
||||
- Provide specific steps users need to take to resolve the issue
|
||||
- Use friendly, helpful language
|
||||
- Include relevant context (device names, error details, etc.)
|
||||
- **Implementation**:
|
||||
```python
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"outdated_version",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.ERROR,
|
||||
translation_key="outdated_version",
|
||||
)
|
||||
```
|
||||
- **Translation Strings Requirements**: Must contain user-actionable text in `strings.json`:
|
||||
```json
|
||||
{
|
||||
"issues": {
|
||||
"outdated_version": {
|
||||
"title": "Device firmware is outdated",
|
||||
"description": "Your device firmware version {current_version} is below the minimum required version {min_version}. To fix this issue: 1) Open the manufacturer's mobile app, 2) Navigate to device settings, 3) Select 'Update Firmware', 4) Wait for the update to complete, then 5) Restart Home Assistant."
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
- **String Content Must Include**:
|
||||
- What the problem is
|
||||
- Why it matters
|
||||
@@ -41,15 +18,4 @@ Platform exists as `homeassistant/components/<domain>/repairs.py`.
|
||||
- `CRITICAL`: Reserved for extreme scenarios only
|
||||
- `ERROR`: Requires immediate user attention
|
||||
- `WARNING`: Indicates future potential breakage
|
||||
- **Additional Attributes**:
|
||||
```python
|
||||
ir.async_create_issue(
|
||||
hass, DOMAIN, "issue_id",
|
||||
breaks_in_ha_version="2024.1.0",
|
||||
is_fixable=True,
|
||||
is_persistent=True,
|
||||
severity=ir.IssueSeverity.ERROR,
|
||||
translation_key="issue_description",
|
||||
)
|
||||
```
|
||||
- Only create issues for problems users can potentially resolve
|
||||
|
||||
6
.github/workflows/builder.yml
vendored
6
.github/workflows/builder.yml
vendored
@@ -49,7 +49,7 @@ jobs:
|
||||
|
||||
- name: Get information
|
||||
id: info
|
||||
uses: home-assistant/actions/helpers/info@5f5b077d63a1e4c53019231409a0c4d791fb74e5 # zizmor: ignore[unpinned-uses]
|
||||
uses: home-assistant/actions/helpers/info@master # zizmor: ignore[unpinned-uses]
|
||||
|
||||
- name: Get version
|
||||
id: version
|
||||
@@ -112,7 +112,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@2536c51d3d126276eb39f74d6bc9c72ac6ef30d3 # v16
|
||||
uses: dawidd6/action-download-artifact@8a338493df3d275e4a7a63bcff3b8fe97e51a927 # v19
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: home-assistant/frontend
|
||||
@@ -123,7 +123,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of intents
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@2536c51d3d126276eb39f74d6bc9c72ac6ef30d3 # v16
|
||||
uses: dawidd6/action-download-artifact@8a338493df3d275e4a7a63bcff3b8fe97e51a927 # v19
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: OHF-Voice/intents-package
|
||||
|
||||
56
.github/workflows/ci.yaml
vendored
56
.github/workflows/ci.yaml
vendored
@@ -40,7 +40,7 @@ env:
|
||||
CACHE_VERSION: 3
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2026.4"
|
||||
HA_SHORT_VERSION: "2026.5"
|
||||
ADDITIONAL_PYTHON_VERSIONS: "[]"
|
||||
# 10.3 is the oldest supported version
|
||||
# - 10.3.32 is the version currently shipped with Synology (as of 17 Feb 2022)
|
||||
@@ -280,7 +280,7 @@ jobs:
|
||||
echo "::add-matcher::.github/workflows/matchers/check-executables-have-shebangs.json"
|
||||
echo "::add-matcher::.github/workflows/matchers/codespell.json"
|
||||
- name: Run prek
|
||||
uses: j178/prek-action@0bb87d7f00b0c99306c8bcb8b8beba1eb581c037 # v1.1.1
|
||||
uses: j178/prek-action@53276d8b0d10f8b6672aa85b4588c6921d0370cc # v2.0.1
|
||||
env:
|
||||
PREK_SKIP: no-commit-to-branch,mypy,pylint,gen_requirements_all,hassfest,hassfest-metadata,hassfest-mypy-config,zizmor
|
||||
RUFF_OUTPUT_FORMAT: github
|
||||
@@ -301,7 +301,7 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Run zizmor
|
||||
uses: j178/prek-action@0bb87d7f00b0c99306c8bcb8b8beba1eb581c037 # v1.1.1
|
||||
uses: j178/prek-action@53276d8b0d10f8b6672aa85b4588c6921d0370cc # v2.0.1
|
||||
with:
|
||||
extra-args: --all-files zizmor
|
||||
|
||||
@@ -364,7 +364,7 @@ jobs:
|
||||
echo "key=uv-${UV_CACHE_VERSION}-${uv_version}-${HA_SHORT_VERSION}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@@ -372,7 +372,7 @@ jobs:
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore uv wheel cache
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
with:
|
||||
path: ${{ env.UV_CACHE_DIR }}
|
||||
key: >-
|
||||
@@ -384,7 +384,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-
|
||||
- name: Check if apt cache exists
|
||||
id: cache-apt-check
|
||||
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
with:
|
||||
lookup-only: ${{ steps.cache-venv.outputs.cache-hit == 'true' }}
|
||||
path: |
|
||||
@@ -430,7 +430,7 @@ jobs:
|
||||
fi
|
||||
- name: Save apt cache
|
||||
if: steps.cache-apt-check.outputs.cache-hit != 'true'
|
||||
uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -484,7 +484,7 @@ jobs:
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -515,7 +515,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -552,7 +552,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -643,7 +643,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -694,7 +694,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -747,7 +747,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -804,7 +804,7 @@ jobs:
|
||||
echo "key=mypy-${MYPY_CACHE_VERSION}-${mypy_version}-${HA_SHORT_VERSION}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore full Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -812,7 +812,7 @@ jobs:
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore mypy cache
|
||||
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
with:
|
||||
path: .mypy_cache
|
||||
key: >-
|
||||
@@ -854,7 +854,7 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -887,7 +887,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -930,7 +930,7 @@ jobs:
|
||||
group: ${{ fromJson(needs.info.outputs.test_groups) }}
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -964,7 +964,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1080,7 +1080,7 @@ jobs:
|
||||
mariadb-group: ${{ fromJson(needs.info.outputs.mariadb_groups) }}
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -1115,7 +1115,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1238,7 +1238,7 @@ jobs:
|
||||
postgresql-group: ${{ fromJson(needs.info.outputs.postgresql_groups) }}
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -1275,7 +1275,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1392,7 +1392,7 @@ jobs:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
|
||||
uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5.5.3
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
@@ -1421,7 +1421,7 @@ jobs:
|
||||
group: ${{ fromJson(needs.info.outputs.test_groups) }}
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -1455,7 +1455,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1563,7 +1563,7 @@ jobs:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
|
||||
uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5.5.3
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }} # zizmor: ignore[secrets-outside-env]
|
||||
@@ -1591,7 +1591,7 @@ jobs:
|
||||
with:
|
||||
pattern: test-results-*
|
||||
- name: Upload test results to Codecov
|
||||
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
|
||||
uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5.5.3
|
||||
with:
|
||||
report_type: test_results
|
||||
fail_ci_if_error: true
|
||||
|
||||
@@ -174,6 +174,7 @@ homeassistant.components.dnsip.*
|
||||
homeassistant.components.doorbird.*
|
||||
homeassistant.components.dormakaba_dkey.*
|
||||
homeassistant.components.downloader.*
|
||||
homeassistant.components.dropbox.*
|
||||
homeassistant.components.droplet.*
|
||||
homeassistant.components.dsmr.*
|
||||
homeassistant.components.duckdns.*
|
||||
@@ -578,6 +579,7 @@ homeassistant.components.trmnl.*
|
||||
homeassistant.components.tts.*
|
||||
homeassistant.components.twentemilieu.*
|
||||
homeassistant.components.unifi.*
|
||||
homeassistant.components.unifi_access.*
|
||||
homeassistant.components.unifiprotect.*
|
||||
homeassistant.components.upcloud.*
|
||||
homeassistant.components.update.*
|
||||
|
||||
20
CODEOWNERS
generated
20
CODEOWNERS
generated
@@ -222,8 +222,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/binary_sensor/ @home-assistant/core
|
||||
/tests/components/binary_sensor/ @home-assistant/core
|
||||
/homeassistant/components/bizkaibus/ @UgaitzEtxebarria
|
||||
/homeassistant/components/blebox/ @bbx-a @swistakm
|
||||
/tests/components/blebox/ @bbx-a @swistakm
|
||||
/homeassistant/components/blebox/ @bbx-a @swistakm @bkobus-bbx
|
||||
/tests/components/blebox/ @bbx-a @swistakm @bkobus-bbx
|
||||
/homeassistant/components/blink/ @fronzbot
|
||||
/tests/components/blink/ @fronzbot
|
||||
/homeassistant/components/blue_current/ @gleeuwen @NickKoepr @jtodorova23
|
||||
@@ -401,6 +401,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/dremel_3d_printer/ @tkdrob
|
||||
/homeassistant/components/drop_connect/ @ChandlerSystems @pfrazer
|
||||
/tests/components/drop_connect/ @ChandlerSystems @pfrazer
|
||||
/homeassistant/components/dropbox/ @bdr99
|
||||
/tests/components/dropbox/ @bdr99
|
||||
/homeassistant/components/droplet/ @sarahseidman
|
||||
/tests/components/droplet/ @sarahseidman
|
||||
/homeassistant/components/dsmr/ @Robbie1221
|
||||
@@ -739,8 +741,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/honeywell/ @rdfurman @mkmer
|
||||
/homeassistant/components/hr_energy_qube/ @MattieGit
|
||||
/tests/components/hr_energy_qube/ @MattieGit
|
||||
/homeassistant/components/html5/ @alexyao2015
|
||||
/tests/components/html5/ @alexyao2015
|
||||
/homeassistant/components/html5/ @alexyao2015 @tr4nt0r
|
||||
/tests/components/html5/ @alexyao2015 @tr4nt0r
|
||||
/homeassistant/components/http/ @home-assistant/core
|
||||
/tests/components/http/ @home-assistant/core
|
||||
/homeassistant/components/huawei_lte/ @scop @fphammerle
|
||||
@@ -1226,12 +1228,12 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/onewire/ @garbled1 @epenet
|
||||
/homeassistant/components/onkyo/ @arturpragacz @eclair4151
|
||||
/tests/components/onkyo/ @arturpragacz @eclair4151
|
||||
/homeassistant/components/onvif/ @hunterjm @jterrace
|
||||
/tests/components/onvif/ @hunterjm @jterrace
|
||||
/homeassistant/components/onvif/ @jterrace
|
||||
/tests/components/onvif/ @jterrace
|
||||
/homeassistant/components/open_meteo/ @frenck
|
||||
/tests/components/open_meteo/ @frenck
|
||||
/homeassistant/components/open_router/ @joostlek
|
||||
/tests/components/open_router/ @joostlek
|
||||
/homeassistant/components/open_router/ @joostlek @ab3lson
|
||||
/tests/components/open_router/ @joostlek @ab3lson
|
||||
/homeassistant/components/opendisplay/ @g4bri3lDev
|
||||
/tests/components/opendisplay/ @g4bri3lDev
|
||||
/homeassistant/components/openerz/ @misialq
|
||||
@@ -1299,6 +1301,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/pi_hole/ @shenxn
|
||||
/homeassistant/components/picnic/ @corneyl @codesalatdev
|
||||
/tests/components/picnic/ @corneyl @codesalatdev
|
||||
/homeassistant/components/picotts/ @rooggiieerr
|
||||
/tests/components/picotts/ @rooggiieerr
|
||||
/homeassistant/components/ping/ @jpbede
|
||||
/tests/components/ping/ @jpbede
|
||||
/homeassistant/components/plaato/ @JohNan
|
||||
|
||||
@@ -1 +1 @@
|
||||
"""The actiontec component."""
|
||||
"""The Actiontec integration."""
|
||||
|
||||
@@ -33,14 +33,21 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DEFAULT_SSL, DEFAULT_VERIFY_SSL, DOMAIN, SECTION_ADVANCED_SETTINGS
|
||||
from .coordinator import AirOSConfigEntry, AirOSDataUpdateCoordinator
|
||||
from .coordinator import (
|
||||
AirOSConfigEntry,
|
||||
AirOSDataUpdateCoordinator,
|
||||
AirOSFirmwareUpdateCoordinator,
|
||||
AirOSRuntimeData,
|
||||
)
|
||||
|
||||
_PLATFORMS: list[Platform] = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.BUTTON,
|
||||
Platform.SENSOR,
|
||||
Platform.UPDATE,
|
||||
]
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -86,10 +93,20 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> boo
|
||||
|
||||
airos_device = airos_class(**conn_data)
|
||||
|
||||
coordinator = AirOSDataUpdateCoordinator(hass, entry, device_data, airos_device)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
data_coordinator = AirOSDataUpdateCoordinator(
|
||||
hass, entry, device_data, airos_device
|
||||
)
|
||||
await data_coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
firmware_coordinator: AirOSFirmwareUpdateCoordinator | None = None
|
||||
if device_data["fw_major"] >= 8:
|
||||
firmware_coordinator = AirOSFirmwareUpdateCoordinator(hass, entry, airos_device)
|
||||
await firmware_coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = AirOSRuntimeData(
|
||||
status=data_coordinator,
|
||||
firmware=firmware_coordinator,
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, _PLATFORMS)
|
||||
|
||||
|
||||
@@ -87,7 +87,7 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the AirOS binary sensors from a config entry."""
|
||||
coordinator = config_entry.runtime_data
|
||||
coordinator = config_entry.runtime_data.status
|
||||
|
||||
entities = [
|
||||
AirOSBinarySensor(coordinator, description)
|
||||
|
||||
@@ -31,7 +31,9 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the AirOS button from a config entry."""
|
||||
async_add_entities([AirOSRebootButton(config_entry.runtime_data, REBOOT_BUTTON)])
|
||||
async_add_entities(
|
||||
[AirOSRebootButton(config_entry.runtime_data.status, REBOOT_BUTTON)]
|
||||
)
|
||||
|
||||
|
||||
class AirOSRebootButton(AirOSEntity, ButtonEntity):
|
||||
|
||||
@@ -5,6 +5,7 @@ from datetime import timedelta
|
||||
DOMAIN = "airos"
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
UPDATE_SCAN_INTERVAL = timedelta(days=1)
|
||||
|
||||
MANUFACTURER = "Ubiquiti"
|
||||
|
||||
|
||||
@@ -2,7 +2,10 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import Any, TypeVar
|
||||
|
||||
from airos.airos6 import AirOS6, AirOS6Data
|
||||
from airos.airos8 import AirOS8, AirOS8Data
|
||||
@@ -19,20 +22,61 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN, SCAN_INTERVAL
|
||||
from .const import DOMAIN, SCAN_INTERVAL, UPDATE_SCAN_INTERVAL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
AirOSDeviceDetect = AirOS8 | AirOS6
|
||||
AirOSDataDetect = AirOS8Data | AirOS6Data
|
||||
type AirOSDeviceDetect = AirOS8 | AirOS6
|
||||
type AirOSDataDetect = AirOS8Data | AirOS6Data
|
||||
type AirOSUpdateData = dict[str, Any]
|
||||
|
||||
type AirOSConfigEntry = ConfigEntry[AirOSDataUpdateCoordinator]
|
||||
type AirOSConfigEntry = ConfigEntry[AirOSRuntimeData]
|
||||
|
||||
T = TypeVar("T", bound=AirOSDataDetect | AirOSUpdateData)
|
||||
|
||||
|
||||
@dataclass
|
||||
class AirOSRuntimeData:
|
||||
"""Data for AirOS config entry."""
|
||||
|
||||
status: AirOSDataUpdateCoordinator
|
||||
firmware: AirOSFirmwareUpdateCoordinator | None
|
||||
|
||||
|
||||
async def async_fetch_airos_data(
|
||||
airos_device: AirOSDeviceDetect,
|
||||
update_method: Callable[[], Awaitable[T]],
|
||||
) -> T:
|
||||
"""Fetch data from AirOS device."""
|
||||
try:
|
||||
await airos_device.login()
|
||||
return await update_method()
|
||||
except AirOSConnectionAuthenticationError as err:
|
||||
_LOGGER.exception("Error authenticating with airOS device")
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN, translation_key="invalid_auth"
|
||||
) from err
|
||||
except (
|
||||
AirOSConnectionSetupError,
|
||||
AirOSDeviceConnectionError,
|
||||
TimeoutError,
|
||||
) as err:
|
||||
_LOGGER.error("Error connecting to airOS device: %s", err)
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect",
|
||||
) from err
|
||||
except AirOSDataMissingError as err:
|
||||
_LOGGER.error("Expected data not returned by airOS device: %s", err)
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="error_data_missing",
|
||||
) from err
|
||||
|
||||
|
||||
class AirOSDataUpdateCoordinator(DataUpdateCoordinator[AirOSDataDetect]):
|
||||
"""Class to manage fetching AirOS data from single endpoint."""
|
||||
"""Class to manage fetching AirOS status data from single endpoint."""
|
||||
|
||||
airos_device: AirOSDeviceDetect
|
||||
config_entry: AirOSConfigEntry
|
||||
|
||||
def __init__(
|
||||
@@ -54,28 +98,33 @@ class AirOSDataUpdateCoordinator(DataUpdateCoordinator[AirOSDataDetect]):
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> AirOSDataDetect:
|
||||
"""Fetch data from AirOS."""
|
||||
try:
|
||||
await self.airos_device.login()
|
||||
return await self.airos_device.status()
|
||||
except AirOSConnectionAuthenticationError as err:
|
||||
_LOGGER.exception("Error authenticating with airOS device")
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN, translation_key="invalid_auth"
|
||||
) from err
|
||||
except (
|
||||
AirOSConnectionSetupError,
|
||||
AirOSDeviceConnectionError,
|
||||
TimeoutError,
|
||||
) as err:
|
||||
_LOGGER.error("Error connecting to airOS device: %s", err)
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect",
|
||||
) from err
|
||||
except AirOSDataMissingError as err:
|
||||
_LOGGER.error("Expected data not returned by airOS device: %s", err)
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="error_data_missing",
|
||||
) from err
|
||||
"""Fetch status data from AirOS."""
|
||||
return await async_fetch_airos_data(self.airos_device, self.airos_device.status)
|
||||
|
||||
|
||||
class AirOSFirmwareUpdateCoordinator(DataUpdateCoordinator[AirOSUpdateData]):
|
||||
"""Class to manage fetching AirOS firmware."""
|
||||
|
||||
config_entry: AirOSConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: AirOSConfigEntry,
|
||||
airos_device: AirOSDeviceDetect,
|
||||
) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
self.airos_device = airos_device
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=DOMAIN,
|
||||
update_interval=UPDATE_SCAN_INTERVAL,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> AirOSUpdateData:
|
||||
"""Fetch firmware data from AirOS."""
|
||||
return await async_fetch_airos_data(
|
||||
self.airos_device, self.airos_device.update_check
|
||||
)
|
||||
|
||||
@@ -29,5 +29,15 @@ async def async_get_config_entry_diagnostics(
|
||||
"""Return diagnostics for a config entry."""
|
||||
return {
|
||||
"entry_data": async_redact_data(entry.data, TO_REDACT_HA),
|
||||
"data": async_redact_data(entry.runtime_data.data.to_dict(), TO_REDACT_AIROS),
|
||||
"data": {
|
||||
"status_data": async_redact_data(
|
||||
entry.runtime_data.status.data.to_dict(), TO_REDACT_AIROS
|
||||
),
|
||||
"firmware_data": async_redact_data(
|
||||
entry.runtime_data.firmware.data
|
||||
if entry.runtime_data.firmware is not None
|
||||
else {},
|
||||
TO_REDACT_AIROS,
|
||||
),
|
||||
},
|
||||
}
|
||||
|
||||
@@ -180,7 +180,7 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the AirOS sensors from a config entry."""
|
||||
coordinator = config_entry.runtime_data
|
||||
coordinator = config_entry.runtime_data.status
|
||||
|
||||
entities = [AirOSSensor(coordinator, description) for description in COMMON_SENSORS]
|
||||
|
||||
|
||||
@@ -206,6 +206,12 @@
|
||||
},
|
||||
"reboot_failed": {
|
||||
"message": "The device did not accept the reboot request. Try again, or check your device web interface for errors."
|
||||
},
|
||||
"update_connection_authentication_error": {
|
||||
"message": "Authentication or connection failed during firmware update"
|
||||
},
|
||||
"update_error": {
|
||||
"message": "Connection failed during firmware update"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
101
homeassistant/components/airos/update.py
Normal file
101
homeassistant/components/airos/update.py
Normal file
@@ -0,0 +1,101 @@
|
||||
"""AirOS update component for Home Assistant."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from airos.exceptions import AirOSConnectionAuthenticationError, AirOSException
|
||||
|
||||
from homeassistant.components.update import (
|
||||
UpdateDeviceClass,
|
||||
UpdateEntity,
|
||||
UpdateEntityFeature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import (
|
||||
AirOSConfigEntry,
|
||||
AirOSDataUpdateCoordinator,
|
||||
AirOSFirmwareUpdateCoordinator,
|
||||
)
|
||||
from .entity import AirOSEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: AirOSConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the AirOS update entity from a config entry."""
|
||||
runtime_data = config_entry.runtime_data
|
||||
|
||||
if runtime_data.firmware is None: # Unsupported device
|
||||
return
|
||||
async_add_entities([AirOSUpdateEntity(runtime_data.status, runtime_data.firmware)])
|
||||
|
||||
|
||||
class AirOSUpdateEntity(AirOSEntity, UpdateEntity):
|
||||
"""Update entity for AirOS firmware updates."""
|
||||
|
||||
_attr_device_class = UpdateDeviceClass.FIRMWARE
|
||||
_attr_supported_features = UpdateEntityFeature.INSTALL
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
status: AirOSDataUpdateCoordinator,
|
||||
firmware: AirOSFirmwareUpdateCoordinator,
|
||||
) -> None:
|
||||
"""Initialize the AirOS update entity."""
|
||||
super().__init__(status)
|
||||
self.status = status
|
||||
self.firmware = firmware
|
||||
|
||||
self._attr_unique_id = f"{status.data.derived.mac}_firmware_update"
|
||||
|
||||
@property
|
||||
def installed_version(self) -> str | None:
|
||||
"""Return the installed firmware version."""
|
||||
return self.status.data.host.fwversion
|
||||
|
||||
@property
|
||||
def latest_version(self) -> str | None:
|
||||
"""Return the latest firmware version."""
|
||||
if not self.firmware.data.get("update", False):
|
||||
return self.status.data.host.fwversion
|
||||
return self.firmware.data.get("version")
|
||||
|
||||
@property
|
||||
def release_url(self) -> str | None:
|
||||
"""Return the release url of the latest firmware."""
|
||||
return self.firmware.data.get("changelog")
|
||||
|
||||
async def async_install(
|
||||
self,
|
||||
version: str | None,
|
||||
backup: bool,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Handle the firmware update installation."""
|
||||
_LOGGER.debug("Starting firmware update")
|
||||
try:
|
||||
await self.status.airos_device.login()
|
||||
await self.status.airos_device.download()
|
||||
await self.status.airos_device.install()
|
||||
except AirOSConnectionAuthenticationError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_connection_authentication_error",
|
||||
) from err
|
||||
except AirOSException as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_error",
|
||||
) from err
|
||||
@@ -13,6 +13,9 @@ from homeassistant.helpers import (
|
||||
config_entry_oauth2_flow,
|
||||
device_registry as dr,
|
||||
)
|
||||
from homeassistant.helpers.config_entry_oauth2_flow import (
|
||||
ImplementationUnavailableError,
|
||||
)
|
||||
|
||||
from . import api
|
||||
from .const import CONFIG_FLOW_MINOR_VERSION, CONFIG_FLOW_VERSION, DOMAIN
|
||||
@@ -25,11 +28,17 @@ async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: AladdinConnectConfigEntry
|
||||
) -> bool:
|
||||
"""Set up Aladdin Connect Genie from a config entry."""
|
||||
implementation = (
|
||||
await config_entry_oauth2_flow.async_get_config_entry_implementation(
|
||||
hass, entry
|
||||
try:
|
||||
implementation = (
|
||||
await config_entry_oauth2_flow.async_get_config_entry_implementation(
|
||||
hass, entry
|
||||
)
|
||||
)
|
||||
)
|
||||
except ImplementationUnavailableError as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="oauth2_implementation_unavailable",
|
||||
) from err
|
||||
|
||||
session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation)
|
||||
|
||||
|
||||
@@ -37,6 +37,9 @@
|
||||
"close_door_failed": {
|
||||
"message": "Failed to close the garage door"
|
||||
},
|
||||
"oauth2_implementation_unavailable": {
|
||||
"message": "[%key:common::exceptions::oauth2_implementation_unavailable::message%]"
|
||||
},
|
||||
"open_door_failed": {
|
||||
"message": "Failed to open the garage door"
|
||||
}
|
||||
|
||||
@@ -3,10 +3,10 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
from typing import Any, cast
|
||||
|
||||
from adext import AdExt
|
||||
from alarmdecoder.devices import SerialDevice, SocketDevice
|
||||
from alarmdecoder.devices import Device, SerialDevice, SocketDevice
|
||||
from alarmdecoder.util import NoDeviceError
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -102,16 +102,21 @@ class AlarmDecoderFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
self._async_current_entries(), user_input, self.protocol
|
||||
):
|
||||
return self.async_abort(reason="already_configured")
|
||||
connection = {}
|
||||
connection: dict[str, Any] = {}
|
||||
baud = None
|
||||
device: Device
|
||||
if self.protocol == PROTOCOL_SOCKET:
|
||||
host = connection[CONF_HOST] = user_input[CONF_HOST]
|
||||
port = connection[CONF_PORT] = user_input[CONF_PORT]
|
||||
title = f"{host}:{port}"
|
||||
host = connection[CONF_HOST] = cast(str, user_input[CONF_HOST])
|
||||
port = connection[CONF_PORT] = cast(int, user_input[CONF_PORT])
|
||||
title: str = f"{host}:{port}"
|
||||
device = SocketDevice(interface=(host, port))
|
||||
if self.protocol == PROTOCOL_SERIAL:
|
||||
path = connection[CONF_DEVICE_PATH] = user_input[CONF_DEVICE_PATH]
|
||||
baud = connection[CONF_DEVICE_BAUD] = user_input[CONF_DEVICE_BAUD]
|
||||
path = connection[CONF_DEVICE_PATH] = cast(
|
||||
str, user_input[CONF_DEVICE_PATH]
|
||||
)
|
||||
baud = connection[CONF_DEVICE_BAUD] = cast(
|
||||
int, user_input[CONF_DEVICE_BAUD]
|
||||
)
|
||||
title = path
|
||||
device = SerialDevice(interface=path)
|
||||
|
||||
@@ -132,6 +137,7 @@ class AlarmDecoderFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
_LOGGER.exception("Unexpected exception during AlarmDecoder setup")
|
||||
errors["base"] = "unknown"
|
||||
|
||||
schema: vol.Schema
|
||||
if self.protocol == PROTOCOL_SOCKET:
|
||||
schema = vol.Schema(
|
||||
{
|
||||
|
||||
@@ -54,16 +54,7 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
||||
entry.data[CONF_PASSWORD],
|
||||
entry.data[CONF_LOGIN_DATA],
|
||||
)
|
||||
device_registry = dr.async_get(hass)
|
||||
self.previous_devices: set[str] = {
|
||||
identifier
|
||||
for device in device_registry.devices.get_devices_for_config_entry_id(
|
||||
entry.entry_id
|
||||
)
|
||||
if device.entry_type != dr.DeviceEntryType.SERVICE
|
||||
for identifier_domain, identifier in device.identifiers
|
||||
if identifier_domain == DOMAIN
|
||||
}
|
||||
self.previous_devices: set[str] = set()
|
||||
|
||||
async def _async_update_data(self) -> dict[str, AmazonDevice]:
|
||||
"""Update device data."""
|
||||
|
||||
@@ -92,7 +92,6 @@ class AnglianWaterUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
_LOGGER.debug("Updating statistics for the first time")
|
||||
usage_sum = 0.0
|
||||
last_stats_time = None
|
||||
allow_update_last_stored_hour = False
|
||||
else:
|
||||
if not meter.readings or len(meter.readings) == 0:
|
||||
_LOGGER.debug("No recent usage statistics found, skipping update")
|
||||
@@ -108,7 +107,6 @@ class AnglianWaterUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
continue
|
||||
start = dt_util.as_local(parsed_read_at) - timedelta(hours=1)
|
||||
_LOGGER.debug("Getting statistics at %s", start)
|
||||
stats: dict[str, list[Any]] = {}
|
||||
for end in (start + timedelta(seconds=1), None):
|
||||
stats = await get_instance(self.hass).async_add_executor_job(
|
||||
statistics_during_period,
|
||||
@@ -129,28 +127,15 @@ class AnglianWaterUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
"Not found, trying to find oldest statistic after %s",
|
||||
start,
|
||||
)
|
||||
assert stats
|
||||
|
||||
if not stats or not stats.get(usage_statistic_id):
|
||||
_LOGGER.debug(
|
||||
"Could not find existing statistics during period lookup for %s, "
|
||||
"falling back to last stored statistic",
|
||||
usage_statistic_id,
|
||||
)
|
||||
allow_update_last_stored_hour = True
|
||||
last_records = last_stat[usage_statistic_id]
|
||||
usage_sum = float(last_records[0].get("sum") or 0.0)
|
||||
last_stats_time = last_records[0]["start"]
|
||||
else:
|
||||
allow_update_last_stored_hour = False
|
||||
records = stats[usage_statistic_id]
|
||||
def _safe_get_sum(records: list[Any]) -> float:
|
||||
if records and "sum" in records[0]:
|
||||
return float(records[0]["sum"])
|
||||
return 0.0
|
||||
|
||||
def _safe_get_sum(records: list[Any]) -> float:
|
||||
if records and "sum" in records[0]:
|
||||
return float(records[0]["sum"])
|
||||
return 0.0
|
||||
|
||||
usage_sum = _safe_get_sum(records)
|
||||
last_stats_time = records[0]["start"]
|
||||
usage_sum = _safe_get_sum(stats.get(usage_statistic_id, []))
|
||||
last_stats_time = stats[usage_statistic_id][0]["start"]
|
||||
|
||||
usage_statistics = []
|
||||
|
||||
@@ -163,13 +148,7 @@ class AnglianWaterUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
)
|
||||
continue
|
||||
start = dt_util.as_local(parsed_read_at) - timedelta(hours=1)
|
||||
if last_stats_time is not None and (
|
||||
start.timestamp() < last_stats_time
|
||||
or (
|
||||
start.timestamp() == last_stats_time
|
||||
and not allow_update_last_stored_hour
|
||||
)
|
||||
):
|
||||
if last_stats_time is not None and start.timestamp() <= last_stats_time:
|
||||
continue
|
||||
usage_state = max(0, read["consumption"] / 1000)
|
||||
usage_sum = max(0, read["read"])
|
||||
|
||||
@@ -2,19 +2,15 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import anthropic
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigSubentry
|
||||
from homeassistant.config_entries import ConfigSubentry
|
||||
from homeassistant.const import CONF_API_KEY, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
issue_registry as ir,
|
||||
)
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import (
|
||||
@@ -24,12 +20,11 @@ from .const import (
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
)
|
||||
from .coordinator import AnthropicConfigEntry, AnthropicCoordinator
|
||||
|
||||
PLATFORMS = (Platform.AI_TASK, Platform.CONVERSATION)
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
type AnthropicConfigEntry = ConfigEntry[anthropic.AsyncClient]
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up Anthropic."""
|
||||
@@ -39,17 +34,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) -> bool:
|
||||
"""Set up Anthropic from a config entry."""
|
||||
client = anthropic.AsyncAnthropic(
|
||||
api_key=entry.data[CONF_API_KEY], http_client=get_async_client(hass)
|
||||
)
|
||||
try:
|
||||
await client.models.list(timeout=10.0)
|
||||
except anthropic.AuthenticationError as err:
|
||||
raise ConfigEntryAuthFailed(err) from err
|
||||
except anthropic.AnthropicError as err:
|
||||
raise ConfigEntryNotReady(err) from err
|
||||
|
||||
entry.runtime_data = client
|
||||
coordinator = AnthropicCoordinator(hass, entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@ from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util.json import json_loads
|
||||
|
||||
from .const import DOMAIN
|
||||
from .entity import AnthropicBaseLLMEntity
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -60,7 +61,7 @@ class AnthropicTaskEntity(
|
||||
|
||||
if not isinstance(chat_log.content[-1], conversation.AssistantContent):
|
||||
raise HomeAssistantError(
|
||||
"Last content in chat log is not an AssistantContent"
|
||||
translation_domain=DOMAIN, translation_key="response_not_found"
|
||||
)
|
||||
|
||||
text = chat_log.content[-1].content or ""
|
||||
@@ -78,7 +79,9 @@ class AnthropicTaskEntity(
|
||||
err,
|
||||
text,
|
||||
)
|
||||
raise HomeAssistantError("Error with Claude structured response") from err
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN, translation_key="json_parse_error"
|
||||
) from err
|
||||
|
||||
return ai_task.GenDataTaskResult(
|
||||
conversation_id=chat_log.conversation_id,
|
||||
|
||||
@@ -71,6 +71,16 @@ CODE_EXECUTION_UNSUPPORTED_MODELS = [
|
||||
"claude-3-haiku",
|
||||
]
|
||||
|
||||
PROGRAMMATIC_TOOL_CALLING_UNSUPPORTED_MODELS = [
|
||||
"claude-haiku-4-5",
|
||||
"claude-opus-4-1",
|
||||
"claude-opus-4-0",
|
||||
"claude-opus-4-20250514",
|
||||
"claude-sonnet-4-0",
|
||||
"claude-sonnet-4-20250514",
|
||||
"claude-3-haiku",
|
||||
]
|
||||
|
||||
DEPRECATED_MODELS = [
|
||||
"claude-3",
|
||||
]
|
||||
|
||||
78
homeassistant/components/anthropic/coordinator.py
Normal file
78
homeassistant/components/anthropic/coordinator.py
Normal file
@@ -0,0 +1,78 @@
|
||||
"""Coordinator for the Anthropic integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
import anthropic
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_KEY
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
|
||||
UPDATE_INTERVAL_CONNECTED = timedelta(hours=12)
|
||||
UPDATE_INTERVAL_DISCONNECTED = timedelta(minutes=1)
|
||||
|
||||
type AnthropicConfigEntry = ConfigEntry[AnthropicCoordinator]
|
||||
|
||||
|
||||
class AnthropicCoordinator(DataUpdateCoordinator[None]):
|
||||
"""DataUpdateCoordinator which uses different intervals after successful and unsuccessful updates."""
|
||||
|
||||
client: anthropic.AsyncAnthropic
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config_entry: AnthropicConfigEntry) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=config_entry.title,
|
||||
update_interval=UPDATE_INTERVAL_CONNECTED,
|
||||
update_method=self.async_update_data,
|
||||
always_update=False,
|
||||
)
|
||||
self.client = anthropic.AsyncAnthropic(
|
||||
api_key=config_entry.data[CONF_API_KEY], http_client=get_async_client(hass)
|
||||
)
|
||||
|
||||
@callback
|
||||
def async_set_updated_data(self, data: None) -> None:
|
||||
"""Manually update data, notify listeners and update refresh interval."""
|
||||
self.update_interval = UPDATE_INTERVAL_CONNECTED
|
||||
super().async_set_updated_data(data)
|
||||
|
||||
async def async_update_data(self) -> None:
|
||||
"""Fetch data from the API."""
|
||||
try:
|
||||
self.update_interval = UPDATE_INTERVAL_DISCONNECTED
|
||||
await self.client.models.list(timeout=10.0)
|
||||
self.update_interval = UPDATE_INTERVAL_CONNECTED
|
||||
except anthropic.APITimeoutError as err:
|
||||
raise TimeoutError(err.message or str(err)) from err
|
||||
except anthropic.AuthenticationError as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="api_authentication_error",
|
||||
translation_placeholders={"message": err.message},
|
||||
) from err
|
||||
except anthropic.APIError as err:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="api_error",
|
||||
translation_placeholders={"message": err.message},
|
||||
) from err
|
||||
|
||||
def mark_connection_error(self) -> None:
|
||||
"""Mark the connection as having an error and reschedule background check."""
|
||||
self.update_interval = UPDATE_INTERVAL_DISCONNECTED
|
||||
if self.last_update_success:
|
||||
self.last_update_success = False
|
||||
self.async_update_listeners()
|
||||
if self._listeners and not self.hass.is_stopping:
|
||||
self._schedule_refresh()
|
||||
64
homeassistant/components/anthropic/diagnostics.py
Normal file
64
homeassistant/components/anthropic/diagnostics.py
Normal file
@@ -0,0 +1,64 @@
|
||||
"""Diagnostics support for Anthropic."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from anthropic import __title__, __version__
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.const import CONF_API_KEY
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from .const import (
|
||||
CONF_PROMPT,
|
||||
CONF_WEB_SEARCH_CITY,
|
||||
CONF_WEB_SEARCH_COUNTRY,
|
||||
CONF_WEB_SEARCH_REGION,
|
||||
CONF_WEB_SEARCH_TIMEZONE,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import AnthropicConfigEntry
|
||||
|
||||
|
||||
TO_REDACT = {
|
||||
CONF_API_KEY,
|
||||
CONF_PROMPT,
|
||||
CONF_WEB_SEARCH_CITY,
|
||||
CONF_WEB_SEARCH_REGION,
|
||||
CONF_WEB_SEARCH_COUNTRY,
|
||||
CONF_WEB_SEARCH_TIMEZONE,
|
||||
}
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: AnthropicConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
|
||||
return {
|
||||
"client": f"{__title__}=={__version__}",
|
||||
"title": entry.title,
|
||||
"entry_id": entry.entry_id,
|
||||
"entry_version": f"{entry.version}.{entry.minor_version}",
|
||||
"state": entry.state.value,
|
||||
"data": async_redact_data(entry.data, TO_REDACT),
|
||||
"options": async_redact_data(entry.options, TO_REDACT),
|
||||
"subentries": {
|
||||
subentry.subentry_id: {
|
||||
"title": subentry.title,
|
||||
"subentry_type": subentry.subentry_type,
|
||||
"data": async_redact_data(subentry.data, TO_REDACT),
|
||||
}
|
||||
for subentry in entry.subentries.values()
|
||||
},
|
||||
"entities": {
|
||||
entity_entry.entity_id: entity_entry.extended_dict
|
||||
for entity_entry in er.async_entries_for_config_entry(
|
||||
er.async_get(hass), entry.entry_id
|
||||
)
|
||||
},
|
||||
}
|
||||
@@ -19,6 +19,8 @@ from anthropic.types import (
|
||||
CitationsWebSearchResultLocation,
|
||||
CitationWebSearchResultLocationParam,
|
||||
CodeExecutionTool20250825Param,
|
||||
CodeExecutionToolResultBlock,
|
||||
CodeExecutionToolResultBlockParamContentParam,
|
||||
Container,
|
||||
ContentBlockParam,
|
||||
DocumentBlockParam,
|
||||
@@ -61,15 +63,16 @@ from anthropic.types import (
|
||||
ToolUseBlockParam,
|
||||
Usage,
|
||||
WebSearchTool20250305Param,
|
||||
WebSearchTool20260209Param,
|
||||
WebSearchToolResultBlock,
|
||||
WebSearchToolResultBlockParamContentParam,
|
||||
)
|
||||
from anthropic.types.bash_code_execution_tool_result_block_param import (
|
||||
Content as BashCodeExecutionToolResultContentParam,
|
||||
Content as BashCodeExecutionToolResultBlockParamContentParam,
|
||||
)
|
||||
from anthropic.types.message_create_params import MessageCreateParamsStreaming
|
||||
from anthropic.types.text_editor_code_execution_tool_result_block_param import (
|
||||
Content as TextEditorCodeExecutionToolResultContentParam,
|
||||
Content as TextEditorCodeExecutionToolResultBlockParamContentParam,
|
||||
)
|
||||
import voluptuous as vol
|
||||
from voluptuous_openapi import convert
|
||||
@@ -79,12 +82,11 @@ from homeassistant.config_entries import ConfigSubentry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, llm
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.json import json_dumps
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from homeassistant.util import slugify
|
||||
from homeassistant.util.json import JsonObjectType
|
||||
|
||||
from . import AnthropicConfigEntry
|
||||
from .const import (
|
||||
CONF_CHAT_MODEL,
|
||||
CONF_CODE_EXECUTION,
|
||||
@@ -105,8 +107,10 @@ from .const import (
|
||||
MIN_THINKING_BUDGET,
|
||||
NON_ADAPTIVE_THINKING_MODELS,
|
||||
NON_THINKING_MODELS,
|
||||
PROGRAMMATIC_TOOL_CALLING_UNSUPPORTED_MODELS,
|
||||
UNSUPPORTED_STRUCTURED_OUTPUT_MODELS,
|
||||
)
|
||||
from .coordinator import AnthropicConfigEntry, AnthropicCoordinator
|
||||
|
||||
# Max number of back and forth with the LLM to generate a response
|
||||
MAX_TOOL_ITERATIONS = 10
|
||||
@@ -224,12 +228,22 @@ def _convert_content(
|
||||
},
|
||||
),
|
||||
}
|
||||
elif content.tool_name == "code_execution":
|
||||
tool_result_block = {
|
||||
"type": "code_execution_tool_result",
|
||||
"tool_use_id": content.tool_call_id,
|
||||
"content": cast(
|
||||
CodeExecutionToolResultBlockParamContentParam,
|
||||
content.tool_result,
|
||||
),
|
||||
}
|
||||
elif content.tool_name == "bash_code_execution":
|
||||
tool_result_block = {
|
||||
"type": "bash_code_execution_tool_result",
|
||||
"tool_use_id": content.tool_call_id,
|
||||
"content": cast(
|
||||
BashCodeExecutionToolResultContentParam, content.tool_result
|
||||
BashCodeExecutionToolResultBlockParamContentParam,
|
||||
content.tool_result,
|
||||
),
|
||||
}
|
||||
elif content.tool_name == "text_editor_code_execution":
|
||||
@@ -237,7 +251,7 @@ def _convert_content(
|
||||
"type": "text_editor_code_execution_tool_result",
|
||||
"tool_use_id": content.tool_call_id,
|
||||
"content": cast(
|
||||
TextEditorCodeExecutionToolResultContentParam,
|
||||
TextEditorCodeExecutionToolResultBlockParamContentParam,
|
||||
content.tool_result,
|
||||
),
|
||||
}
|
||||
@@ -368,6 +382,7 @@ def _convert_content(
|
||||
name=cast(
|
||||
Literal[
|
||||
"web_search",
|
||||
"code_execution",
|
||||
"bash_code_execution",
|
||||
"text_editor_code_execution",
|
||||
],
|
||||
@@ -379,6 +394,7 @@ def _convert_content(
|
||||
and tool_call.tool_name
|
||||
in [
|
||||
"web_search",
|
||||
"code_execution",
|
||||
"bash_code_execution",
|
||||
"text_editor_code_execution",
|
||||
]
|
||||
@@ -401,7 +417,11 @@ def _convert_content(
|
||||
messages[-1]["content"] = messages[-1]["content"][0]["text"]
|
||||
else:
|
||||
# Note: We don't pass SystemContent here as it's passed to the API as the prompt
|
||||
raise HomeAssistantError("Unexpected content type in chat log")
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="unexpected_chat_log_content",
|
||||
translation_placeholders={"type": type(content).__name__},
|
||||
)
|
||||
|
||||
return messages, container_id
|
||||
|
||||
@@ -443,7 +463,9 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
Each message could contain multiple blocks of the same type.
|
||||
"""
|
||||
if stream is None or not hasattr(stream, "__aiter__"):
|
||||
raise HomeAssistantError("Expected a stream of messages")
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN, translation_key="unexpected_stream_object"
|
||||
)
|
||||
|
||||
current_tool_block: ToolUseBlockParam | ServerToolUseBlockParam | None = None
|
||||
current_tool_args: str
|
||||
@@ -464,7 +486,7 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
type="tool_use",
|
||||
id=response.content_block.id,
|
||||
name=response.content_block.name,
|
||||
input={},
|
||||
input=response.content_block.input or {},
|
||||
)
|
||||
current_tool_args = ""
|
||||
if response.content_block.name == output_tool:
|
||||
@@ -526,13 +548,14 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
type="server_tool_use",
|
||||
id=response.content_block.id,
|
||||
name=response.content_block.name,
|
||||
input={},
|
||||
input=response.content_block.input or {},
|
||||
)
|
||||
current_tool_args = ""
|
||||
elif isinstance(
|
||||
response.content_block,
|
||||
(
|
||||
WebSearchToolResultBlock,
|
||||
CodeExecutionToolResultBlock,
|
||||
BashCodeExecutionToolResultBlock,
|
||||
TextEditorCodeExecutionToolResultBlock,
|
||||
),
|
||||
@@ -588,13 +611,13 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
current_tool_block = None
|
||||
continue
|
||||
tool_args = json.loads(current_tool_args) if current_tool_args else {}
|
||||
current_tool_block["input"] = tool_args
|
||||
current_tool_block["input"] |= tool_args
|
||||
yield {
|
||||
"tool_calls": [
|
||||
llm.ToolInput(
|
||||
id=current_tool_block["id"],
|
||||
tool_name=current_tool_block["name"],
|
||||
tool_args=tool_args,
|
||||
tool_args=current_tool_block["input"],
|
||||
external=current_tool_block["type"] == "server_tool_use",
|
||||
)
|
||||
]
|
||||
@@ -605,7 +628,9 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
chat_log.async_trace(_create_token_stats(input_usage, usage))
|
||||
content_details.container = response.delta.container
|
||||
if response.delta.stop_reason == "refusal":
|
||||
raise HomeAssistantError("Potential policy violation detected")
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN, translation_key="api_refusal"
|
||||
)
|
||||
elif isinstance(response, RawMessageStopEvent):
|
||||
if content_details:
|
||||
content_details.delete_empty()
|
||||
@@ -633,7 +658,7 @@ def _create_token_stats(
|
||||
}
|
||||
|
||||
|
||||
class AnthropicBaseLLMEntity(Entity):
|
||||
class AnthropicBaseLLMEntity(CoordinatorEntity[AnthropicCoordinator]):
|
||||
"""Anthropic base LLM entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
@@ -641,6 +666,7 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
|
||||
def __init__(self, entry: AnthropicConfigEntry, subentry: ConfigSubentry) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(entry.runtime_data)
|
||||
self.entry = entry
|
||||
self.subentry = subentry
|
||||
self._attr_unique_id = subentry.subentry_id
|
||||
@@ -664,7 +690,9 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
|
||||
system = chat_log.content[0]
|
||||
if not isinstance(system, conversation.SystemContent):
|
||||
raise HomeAssistantError("First message must be a system message")
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN, translation_key="system_message_not_found"
|
||||
)
|
||||
|
||||
# System prompt with caching enabled
|
||||
system_prompt: list[TextBlockParam] = [
|
||||
@@ -725,19 +753,34 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
]
|
||||
|
||||
if options.get(CONF_CODE_EXECUTION):
|
||||
tools.append(
|
||||
CodeExecutionTool20250825Param(
|
||||
name="code_execution",
|
||||
type="code_execution_20250825",
|
||||
),
|
||||
)
|
||||
# The `web_search_20260209` tool automatically enables `code_execution_20260120` tool
|
||||
if model.startswith(
|
||||
tuple(PROGRAMMATIC_TOOL_CALLING_UNSUPPORTED_MODELS)
|
||||
) or not options.get(CONF_WEB_SEARCH):
|
||||
tools.append(
|
||||
CodeExecutionTool20250825Param(
|
||||
name="code_execution",
|
||||
type="code_execution_20250825",
|
||||
),
|
||||
)
|
||||
|
||||
if options.get(CONF_WEB_SEARCH):
|
||||
web_search = WebSearchTool20250305Param(
|
||||
name="web_search",
|
||||
type="web_search_20250305",
|
||||
max_uses=options.get(CONF_WEB_SEARCH_MAX_USES),
|
||||
)
|
||||
if model.startswith(
|
||||
tuple(PROGRAMMATIC_TOOL_CALLING_UNSUPPORTED_MODELS)
|
||||
) or not options.get(CONF_CODE_EXECUTION):
|
||||
web_search: WebSearchTool20250305Param | WebSearchTool20260209Param = (
|
||||
WebSearchTool20250305Param(
|
||||
name="web_search",
|
||||
type="web_search_20250305",
|
||||
max_uses=options.get(CONF_WEB_SEARCH_MAX_USES),
|
||||
)
|
||||
)
|
||||
else:
|
||||
web_search = WebSearchTool20260209Param(
|
||||
name="web_search",
|
||||
type="web_search_20260209",
|
||||
max_uses=options.get(CONF_WEB_SEARCH_MAX_USES),
|
||||
)
|
||||
if options.get(CONF_WEB_SEARCH_USER_LOCATION):
|
||||
web_search["user_location"] = {
|
||||
"type": "approximate",
|
||||
@@ -754,7 +797,7 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
last_message = messages[-1]
|
||||
if last_message["role"] != "user":
|
||||
raise HomeAssistantError(
|
||||
"Last message must be a user message to add attachments"
|
||||
translation_domain=DOMAIN, translation_key="user_message_not_found"
|
||||
)
|
||||
if isinstance(last_message["content"], str):
|
||||
last_message["content"] = [
|
||||
@@ -835,7 +878,8 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
if tools:
|
||||
model_args["tools"] = tools
|
||||
|
||||
client = self.entry.runtime_data
|
||||
coordinator = self.entry.runtime_data
|
||||
client = coordinator.client
|
||||
|
||||
# To prevent infinite loops, we limit the number of iterations
|
||||
for _iteration in range(max_iterations):
|
||||
@@ -857,16 +901,36 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
)
|
||||
messages.extend(new_messages)
|
||||
except anthropic.AuthenticationError as err:
|
||||
self.entry.async_start_reauth(self.hass)
|
||||
# Trigger coordinator to confirm the auth failure and trigger the reauth flow.
|
||||
await coordinator.async_request_refresh()
|
||||
raise HomeAssistantError(
|
||||
"Authentication error with Anthropic API, reauthentication required"
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="api_authentication_error",
|
||||
translation_placeholders={"message": err.message},
|
||||
) from err
|
||||
except anthropic.APIConnectionError as err:
|
||||
LOGGER.info("Connection error while talking to Anthropic: %s", err)
|
||||
coordinator.mark_connection_error()
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="api_error",
|
||||
translation_placeholders={"message": err.message},
|
||||
) from err
|
||||
except anthropic.AnthropicError as err:
|
||||
# Non-connection error, mark connection as healthy
|
||||
coordinator.async_set_updated_data(None)
|
||||
raise HomeAssistantError(
|
||||
f"Sorry, I had a problem talking to Anthropic: {err}"
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="api_error",
|
||||
translation_placeholders={
|
||||
"message": err.message
|
||||
if isinstance(err, anthropic.APIError)
|
||||
else str(err)
|
||||
},
|
||||
) from err
|
||||
|
||||
if not chat_log.unresponded_tool_results:
|
||||
coordinator.async_set_updated_data(None)
|
||||
break
|
||||
|
||||
|
||||
@@ -883,15 +947,23 @@ async def async_prepare_files_for_prompt(
|
||||
|
||||
for file_path, mime_type in files:
|
||||
if not file_path.exists():
|
||||
raise HomeAssistantError(f"`{file_path}` does not exist")
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="wrong_file_path",
|
||||
translation_placeholders={"file_path": file_path.as_posix()},
|
||||
)
|
||||
|
||||
if mime_type is None:
|
||||
mime_type = guess_file_type(file_path)[0]
|
||||
|
||||
if not mime_type or not mime_type.startswith(("image/", "application/pdf")):
|
||||
raise HomeAssistantError(
|
||||
"Only images and PDF are supported by the Anthropic API,"
|
||||
f"`{file_path}` is not an image file or PDF"
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="wrong_file_type",
|
||||
translation_placeholders={
|
||||
"file_path": file_path.as_posix(),
|
||||
"mime_type": mime_type or "unknown",
|
||||
},
|
||||
)
|
||||
if mime_type == "image/jpg":
|
||||
mime_type = "image/jpeg"
|
||||
|
||||
@@ -35,9 +35,9 @@ rules:
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: todo
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
log-when-unavailable: done
|
||||
parallel-updates:
|
||||
status: exempt
|
||||
comment: |
|
||||
@@ -46,7 +46,7 @@ rules:
|
||||
test-coverage: done
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
diagnostics: done
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: |
|
||||
@@ -59,17 +59,11 @@ rules:
|
||||
status: exempt
|
||||
comment: |
|
||||
No data updates.
|
||||
docs-examples:
|
||||
status: todo
|
||||
comment: |
|
||||
To give examples of how people use the integration
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices:
|
||||
status: todo
|
||||
comment: |
|
||||
To write something about what models we support.
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: todo
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
@@ -88,7 +82,7 @@ rules:
|
||||
comment: |
|
||||
No entities disabled by default.
|
||||
entity-translations: todo
|
||||
exception-translations: todo
|
||||
exception-translations: done
|
||||
icon-translations: done
|
||||
reconfiguration-flow: done
|
||||
repair-issues: done
|
||||
|
||||
@@ -58,7 +58,7 @@ class ModelDeprecatedRepairFlow(RepairsFlow):
|
||||
if entry.entry_id in self._model_list_cache:
|
||||
model_list = self._model_list_cache[entry.entry_id]
|
||||
else:
|
||||
client = entry.runtime_data
|
||||
client = entry.runtime_data.client
|
||||
model_list = [
|
||||
model_option
|
||||
for model_option in await get_model_list(client)
|
||||
@@ -161,7 +161,9 @@ class ModelDeprecatedRepairFlow(RepairsFlow):
|
||||
is None
|
||||
or (subentry := entry.subentries.get(self._current_subentry_id)) is None
|
||||
):
|
||||
raise HomeAssistantError("Subentry not found")
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN, translation_key="subentry_not_found"
|
||||
)
|
||||
|
||||
updated_data = {
|
||||
**subentry.data,
|
||||
@@ -190,4 +192,6 @@ async def async_create_fix_flow(
|
||||
"""Create flow."""
|
||||
if issue_id == "model_deprecated":
|
||||
return ModelDeprecatedRepairFlow()
|
||||
raise HomeAssistantError("Unknown issue ID")
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN, translation_key="unknown_issue_id"
|
||||
)
|
||||
|
||||
@@ -149,6 +149,47 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"api_authentication_error": {
|
||||
"message": "Authentication error with Anthropic API: {message}. Reauthentication required."
|
||||
},
|
||||
"api_error": {
|
||||
"message": "Anthropic API error: {message}."
|
||||
},
|
||||
"api_refusal": {
|
||||
"message": "Potential policy violation detected."
|
||||
},
|
||||
"json_parse_error": {
|
||||
"message": "Error with Claude structured response."
|
||||
},
|
||||
"response_not_found": {
|
||||
"message": "Last content in chat log is not an AssistantContent."
|
||||
},
|
||||
"subentry_not_found": {
|
||||
"message": "Subentry not found."
|
||||
},
|
||||
"system_message_not_found": {
|
||||
"message": "First message must be a system message."
|
||||
},
|
||||
"unexpected_chat_log_content": {
|
||||
"message": "Unexpected content type in chat log: {type}."
|
||||
},
|
||||
"unexpected_stream_object": {
|
||||
"message": "Expected a stream of messages."
|
||||
},
|
||||
"unknown_issue_id": {
|
||||
"message": "Unknown issue ID."
|
||||
},
|
||||
"user_message_not_found": {
|
||||
"message": "Last message must be a user message to add attachments."
|
||||
},
|
||||
"wrong_file_path": {
|
||||
"message": "`{file_path}` does not exist."
|
||||
},
|
||||
"wrong_file_type": {
|
||||
"message": "Only images and PDF are supported by the Anthropic API, `{file_path}` ({mime_type}) is not an image file or PDF."
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"model_deprecated": {
|
||||
"fix_flow": {
|
||||
|
||||
@@ -30,9 +30,10 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import (
|
||||
CONF_CREDENTIALS,
|
||||
@@ -42,9 +43,12 @@ from .const import (
|
||||
SIGNAL_CONNECTED,
|
||||
SIGNAL_DISCONNECTED,
|
||||
)
|
||||
from .services import async_setup_services
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
DEFAULT_NAME_TV = "Apple TV"
|
||||
DEFAULT_NAME_HP = "HomePod"
|
||||
|
||||
@@ -77,6 +81,12 @@ DEVICE_EXCEPTIONS = (
|
||||
type AppleTvConfigEntry = ConfigEntry[AppleTVManager]
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Apple TV component."""
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AppleTvConfigEntry) -> bool:
|
||||
"""Set up a config entry for Apple TV."""
|
||||
manager = AppleTVManager(hass, entry)
|
||||
|
||||
@@ -9,3 +9,5 @@ CONF_START_OFF = "start_off"
|
||||
|
||||
SIGNAL_CONNECTED = "apple_tv_connected"
|
||||
SIGNAL_DISCONNECTED = "apple_tv_disconnected"
|
||||
|
||||
ATTR_TEXT = "text"
|
||||
|
||||
@@ -8,5 +8,16 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"append_keyboard_text": {
|
||||
"service": "mdi:keyboard"
|
||||
},
|
||||
"clear_keyboard_text": {
|
||||
"service": "mdi:keyboard-off"
|
||||
},
|
||||
"set_keyboard_text": {
|
||||
"service": "mdi:keyboard"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
130
homeassistant/components/apple_tv/services.py
Normal file
130
homeassistant/components/apple_tv/services.py
Normal file
@@ -0,0 +1,130 @@
|
||||
"""Define services for the Apple TV integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pyatv.const import KeyboardFocusState
|
||||
from pyatv.exceptions import NotSupportedError, ProtocolError
|
||||
from pyatv.interface import AppleTV as AppleTVInterface
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import ATTR_CONFIG_ENTRY_ID
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv, service
|
||||
|
||||
from .const import ATTR_TEXT, DOMAIN
|
||||
|
||||
SERVICE_SET_KEYBOARD_TEXT = "set_keyboard_text"
|
||||
SERVICE_SET_KEYBOARD_TEXT_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY_ID): cv.string,
|
||||
vol.Required(ATTR_TEXT): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
SERVICE_APPEND_KEYBOARD_TEXT = "append_keyboard_text"
|
||||
SERVICE_APPEND_KEYBOARD_TEXT_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY_ID): cv.string,
|
||||
vol.Required(ATTR_TEXT): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
SERVICE_CLEAR_KEYBOARD_TEXT = "clear_keyboard_text"
|
||||
SERVICE_CLEAR_KEYBOARD_TEXT_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY_ID): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def _get_atv(call: ServiceCall) -> AppleTVInterface:
|
||||
"""Get the AppleTVInterface for a service call."""
|
||||
entry = service.async_get_config_entry(
|
||||
call.hass, DOMAIN, call.data[ATTR_CONFIG_ENTRY_ID]
|
||||
)
|
||||
atv: AppleTVInterface | None = entry.runtime_data.atv
|
||||
if atv is None:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="not_connected",
|
||||
)
|
||||
return atv
|
||||
|
||||
|
||||
def _check_keyboard_focus(atv: AppleTVInterface) -> None:
|
||||
"""Check that keyboard is focused on the device."""
|
||||
try:
|
||||
focus_state = atv.keyboard.text_focus_state
|
||||
except NotSupportedError as err:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="keyboard_not_available",
|
||||
) from err
|
||||
if focus_state != KeyboardFocusState.Focused:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="keyboard_not_focused",
|
||||
)
|
||||
|
||||
|
||||
async def _async_set_keyboard_text(call: ServiceCall) -> None:
|
||||
"""Set text in the keyboard input field on an Apple TV."""
|
||||
atv = _get_atv(call)
|
||||
_check_keyboard_focus(atv)
|
||||
try:
|
||||
await atv.keyboard.text_set(call.data[ATTR_TEXT])
|
||||
except ProtocolError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="keyboard_error",
|
||||
) from err
|
||||
|
||||
|
||||
async def _async_append_keyboard_text(call: ServiceCall) -> None:
|
||||
"""Append text to the keyboard input field on an Apple TV."""
|
||||
atv = _get_atv(call)
|
||||
_check_keyboard_focus(atv)
|
||||
try:
|
||||
await atv.keyboard.text_append(call.data[ATTR_TEXT])
|
||||
except ProtocolError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="keyboard_error",
|
||||
) from err
|
||||
|
||||
|
||||
async def _async_clear_keyboard_text(call: ServiceCall) -> None:
|
||||
"""Clear text in the keyboard input field on an Apple TV."""
|
||||
atv = _get_atv(call)
|
||||
_check_keyboard_focus(atv)
|
||||
try:
|
||||
await atv.keyboard.text_clear()
|
||||
except ProtocolError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="keyboard_error",
|
||||
) from err
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the services for the Apple TV integration."""
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_SET_KEYBOARD_TEXT,
|
||||
_async_set_keyboard_text,
|
||||
schema=SERVICE_SET_KEYBOARD_TEXT_SCHEMA,
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_APPEND_KEYBOARD_TEXT,
|
||||
_async_append_keyboard_text,
|
||||
schema=SERVICE_APPEND_KEYBOARD_TEXT_SCHEMA,
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_CLEAR_KEYBOARD_TEXT,
|
||||
_async_clear_keyboard_text,
|
||||
schema=SERVICE_CLEAR_KEYBOARD_TEXT_SCHEMA,
|
||||
)
|
||||
31
homeassistant/components/apple_tv/services.yaml
Normal file
31
homeassistant/components/apple_tv/services.yaml
Normal file
@@ -0,0 +1,31 @@
|
||||
set_keyboard_text:
|
||||
fields:
|
||||
config_entry_id:
|
||||
required: true
|
||||
selector:
|
||||
config_entry:
|
||||
integration: apple_tv
|
||||
text:
|
||||
required: true
|
||||
selector:
|
||||
text:
|
||||
|
||||
append_keyboard_text:
|
||||
fields:
|
||||
config_entry_id:
|
||||
required: true
|
||||
selector:
|
||||
config_entry:
|
||||
integration: apple_tv
|
||||
text:
|
||||
required: true
|
||||
selector:
|
||||
text:
|
||||
|
||||
clear_keyboard_text:
|
||||
fields:
|
||||
config_entry_id:
|
||||
required: true
|
||||
selector:
|
||||
config_entry:
|
||||
integration: apple_tv
|
||||
@@ -69,6 +69,20 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"keyboard_error": {
|
||||
"message": "An error occurred while sending text to the Apple TV"
|
||||
},
|
||||
"keyboard_not_available": {
|
||||
"message": "Keyboard input is not supported by this device"
|
||||
},
|
||||
"keyboard_not_focused": {
|
||||
"message": "No text input field is currently focused on the Apple TV"
|
||||
},
|
||||
"not_connected": {
|
||||
"message": "Apple TV is not connected"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
@@ -78,5 +92,45 @@
|
||||
"description": "Configure general device settings"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"append_keyboard_text": {
|
||||
"description": "Appends text to the currently focused text input field on an Apple TV.",
|
||||
"fields": {
|
||||
"config_entry_id": {
|
||||
"description": "[%key:component::apple_tv::services::set_keyboard_text::fields::config_entry_id::description%]",
|
||||
"name": "[%key:component::apple_tv::services::set_keyboard_text::fields::config_entry_id::name%]"
|
||||
},
|
||||
"text": {
|
||||
"description": "The text to append.",
|
||||
"name": "[%key:component::apple_tv::services::set_keyboard_text::fields::text::name%]"
|
||||
}
|
||||
},
|
||||
"name": "Append keyboard text"
|
||||
},
|
||||
"clear_keyboard_text": {
|
||||
"description": "Clears the text in the currently focused text input field on an Apple TV.",
|
||||
"fields": {
|
||||
"config_entry_id": {
|
||||
"description": "[%key:component::apple_tv::services::set_keyboard_text::fields::config_entry_id::description%]",
|
||||
"name": "[%key:component::apple_tv::services::set_keyboard_text::fields::config_entry_id::name%]"
|
||||
}
|
||||
},
|
||||
"name": "Clear keyboard text"
|
||||
},
|
||||
"set_keyboard_text": {
|
||||
"description": "Sets the text in the currently focused text input field on an Apple TV.",
|
||||
"fields": {
|
||||
"config_entry_id": {
|
||||
"description": "The Apple TV to send text to.",
|
||||
"name": "Apple TV"
|
||||
},
|
||||
"text": {
|
||||
"description": "The text to set.",
|
||||
"name": "Text"
|
||||
}
|
||||
},
|
||||
"name": "Set keyboard text"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
|
||||
import asyncio
|
||||
from asyncio import timeout
|
||||
from contextlib import AsyncExitStack
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from arcam.fmj import ConnectionFailed
|
||||
from arcam.fmj.client import Client
|
||||
@@ -54,36 +54,31 @@ async def _run_client(
|
||||
client = runtime_data.client
|
||||
coordinators = runtime_data.coordinators
|
||||
|
||||
def _listen(_: Any) -> None:
|
||||
for coordinator in coordinators.values():
|
||||
coordinator.async_notify_data_updated()
|
||||
|
||||
while True:
|
||||
try:
|
||||
async with timeout(interval):
|
||||
await client.start()
|
||||
async with AsyncExitStack() as stack:
|
||||
async with timeout(interval):
|
||||
await client.start()
|
||||
stack.push_async_callback(client.stop)
|
||||
|
||||
_LOGGER.debug("Client connected %s", client.host)
|
||||
_LOGGER.debug("Client connected %s", client.host)
|
||||
|
||||
try:
|
||||
for coordinator in coordinators.values():
|
||||
await coordinator.state.start()
|
||||
|
||||
with client.listen(_listen):
|
||||
try:
|
||||
for coordinator in coordinators.values():
|
||||
coordinator.async_notify_connected()
|
||||
await client.process()
|
||||
finally:
|
||||
await client.stop()
|
||||
await stack.enter_async_context(
|
||||
coordinator.async_monitor_client()
|
||||
)
|
||||
|
||||
_LOGGER.debug("Client disconnected %s", client.host)
|
||||
for coordinator in coordinators.values():
|
||||
coordinator.async_notify_disconnected()
|
||||
await client.process()
|
||||
finally:
|
||||
_LOGGER.debug("Client disconnected %s", client.host)
|
||||
|
||||
except ConnectionFailed:
|
||||
await asyncio.sleep(interval)
|
||||
pass
|
||||
except TimeoutError:
|
||||
continue
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception, aborting arcam client")
|
||||
return
|
||||
|
||||
await asyncio.sleep(interval)
|
||||
|
||||
@@ -2,11 +2,13 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import AsyncGenerator
|
||||
from contextlib import asynccontextmanager
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
|
||||
from arcam.fmj import ConnectionFailed
|
||||
from arcam.fmj.client import Client
|
||||
from arcam.fmj.client import AmxDuetResponse, Client, ResponsePacket
|
||||
from arcam.fmj.state import State
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -51,7 +53,7 @@ class ArcamFmjCoordinator(DataUpdateCoordinator[None]):
|
||||
)
|
||||
self.client = client
|
||||
self.state = State(client, zone)
|
||||
self.last_update_success = False
|
||||
self.update_in_progress = False
|
||||
|
||||
name = config_entry.title
|
||||
unique_id = config_entry.unique_id or config_entry.entry_id
|
||||
@@ -74,24 +76,34 @@ class ArcamFmjCoordinator(DataUpdateCoordinator[None]):
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Fetch data for manual refresh."""
|
||||
try:
|
||||
self.update_in_progress = True
|
||||
await self.state.update()
|
||||
except ConnectionFailed as err:
|
||||
raise UpdateFailed(
|
||||
f"Connection failed during update for zone {self.state.zn}"
|
||||
) from err
|
||||
finally:
|
||||
self.update_in_progress = False
|
||||
|
||||
@callback
|
||||
def async_notify_data_updated(self) -> None:
|
||||
"""Notify that new data has been received from the device."""
|
||||
self.async_set_updated_data(None)
|
||||
def _async_notify_packet(self, packet: ResponsePacket | AmxDuetResponse) -> None:
|
||||
"""Packet callback to detect changes to state."""
|
||||
if (
|
||||
not isinstance(packet, ResponsePacket)
|
||||
or packet.zn != self.state.zn
|
||||
or self.update_in_progress
|
||||
):
|
||||
return
|
||||
|
||||
@callback
|
||||
def async_notify_connected(self) -> None:
|
||||
"""Handle client connected."""
|
||||
self.hass.async_create_task(self.async_refresh())
|
||||
|
||||
@callback
|
||||
def async_notify_disconnected(self) -> None:
|
||||
"""Handle client disconnected."""
|
||||
self.last_update_success = False
|
||||
self.async_update_listeners()
|
||||
|
||||
@asynccontextmanager
|
||||
async def async_monitor_client(self) -> AsyncGenerator[None]:
|
||||
"""Monitor a client and state for changes while connected."""
|
||||
async with self.state:
|
||||
self.hass.async_create_task(self.async_refresh())
|
||||
try:
|
||||
with self.client.listen(self._async_notify_packet):
|
||||
yield
|
||||
finally:
|
||||
self.hass.async_create_task(self.async_refresh())
|
||||
|
||||
@@ -26,3 +26,8 @@ class ArcamFmjEntity(CoordinatorEntity[ArcamFmjCoordinator]):
|
||||
if description is not None:
|
||||
self._attr_unique_id = f"{self._attr_unique_id}-{description.key}"
|
||||
self.entity_description = description
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return super().available and self.coordinator.client.connected
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["arcam"],
|
||||
"requirements": ["arcam-fmj==1.8.3"],
|
||||
"requirements": ["arcam-fmj==1.8.2"],
|
||||
"ssdp": [
|
||||
{
|
||||
"deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1",
|
||||
|
||||
@@ -91,7 +91,6 @@ SENSORS: tuple[ArcamFmjSensorEntityDescription, ...] = (
|
||||
value_fn=lambda state: (
|
||||
vp.colorspace.name.lower()
|
||||
if (vp := state.get_incoming_video_parameters()) is not None
|
||||
and vp.colorspace is not None
|
||||
else None
|
||||
),
|
||||
),
|
||||
|
||||
@@ -1 +1 @@
|
||||
"""The Arris TG2492LG component."""
|
||||
"""The Arris TG2492LG integration."""
|
||||
|
||||
@@ -1 +1 @@
|
||||
"""The aruba component."""
|
||||
"""The Aruba integration."""
|
||||
|
||||
@@ -75,7 +75,7 @@
|
||||
},
|
||||
"services": {
|
||||
"announce": {
|
||||
"description": "Lets an Assist satellite announce a message.",
|
||||
"description": "Lets a satellite announce a message.",
|
||||
"fields": {
|
||||
"media_id": {
|
||||
"description": "The media ID to announce instead of using text-to-speech.",
|
||||
@@ -94,10 +94,10 @@
|
||||
"name": "Preannounce media ID"
|
||||
}
|
||||
},
|
||||
"name": "Announce on satellite"
|
||||
"name": "Announce"
|
||||
},
|
||||
"ask_question": {
|
||||
"description": "Lets an Assist satellite ask a question and get the user's response.",
|
||||
"description": "Asks a question and gets the user's response.",
|
||||
"fields": {
|
||||
"answers": {
|
||||
"description": "Possible answers to the question.",
|
||||
@@ -124,10 +124,10 @@
|
||||
"name": "Question media ID"
|
||||
}
|
||||
},
|
||||
"name": "Ask question on satellite"
|
||||
"name": "Ask question"
|
||||
},
|
||||
"start_conversation": {
|
||||
"description": "Starts a conversation from an Assist satellite.",
|
||||
"description": "Starts a conversation from a satellite.",
|
||||
"fields": {
|
||||
"extra_system_prompt": {
|
||||
"description": "Provide background information to the AI about the request.",
|
||||
@@ -150,13 +150,13 @@
|
||||
"name": "Message"
|
||||
}
|
||||
},
|
||||
"name": "Start conversation on satellite"
|
||||
"name": "Start conversation"
|
||||
}
|
||||
},
|
||||
"title": "Assist satellite",
|
||||
"triggers": {
|
||||
"idle": {
|
||||
"description": "Triggers after one or more Assist satellites become idle after having processed a command.",
|
||||
"description": "Triggers after one or more voice assistant satellites become idle after having processed a command.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::assist_satellite::common::trigger_behavior_name%]"
|
||||
@@ -165,7 +165,7 @@
|
||||
"name": "Satellite became idle"
|
||||
},
|
||||
"listening": {
|
||||
"description": "Triggers after one or more Assist satellites start listening for a command from someone.",
|
||||
"description": "Triggers after one or more voice assistant satellites start listening for a command from someone.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::assist_satellite::common::trigger_behavior_name%]"
|
||||
@@ -174,7 +174,7 @@
|
||||
"name": "Satellite started listening"
|
||||
},
|
||||
"processing": {
|
||||
"description": "Triggers after one or more Assist satellites start processing a command after having heard it.",
|
||||
"description": "Triggers after one or more voice assistant satellites start processing a command after having heard it.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::assist_satellite::common::trigger_behavior_name%]"
|
||||
@@ -183,7 +183,7 @@
|
||||
"name": "Satellite started processing"
|
||||
},
|
||||
"responding": {
|
||||
"description": "Triggers after one or more Assist satellites start responding to a command after having processed it, or start announcing something.",
|
||||
"description": "Triggers after one or more voice assistant satellites start responding to a command after having processed it, or start announcing something.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::assist_satellite::common::trigger_behavior_name%]"
|
||||
|
||||
@@ -142,6 +142,13 @@ class WellKnownOAuthInfoView(HomeAssistantView):
|
||||
"authorization_endpoint": f"{url_prefix}/auth/authorize",
|
||||
"token_endpoint": f"{url_prefix}/auth/token",
|
||||
"revocation_endpoint": f"{url_prefix}/auth/revoke",
|
||||
# Home Assistant already accepts URL-based client_ids via
|
||||
# IndieAuth without prior registration, which is compatible with
|
||||
# draft-ietf-oauth-client-id-metadata-document. This flag
|
||||
# advertises that support to encourage clients to use it. The
|
||||
# metadata document is not actually fetched as IndieAuth doesn't
|
||||
# require it.
|
||||
"client_id_metadata_document_supported": True,
|
||||
"response_types_supported": ["code"],
|
||||
"service_documentation": (
|
||||
"https://developers.home-assistant.io/docs/auth_api"
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["axis"],
|
||||
"requirements": ["axis==68"],
|
||||
"requirements": ["axis==67"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "AXIS"
|
||||
|
||||
@@ -54,7 +54,7 @@
|
||||
"message": "Storage account {account_name} not found"
|
||||
},
|
||||
"cannot_connect": {
|
||||
"message": "Can not connect to storage account {account_name}"
|
||||
"message": "Cannot connect to storage account {account_name}"
|
||||
},
|
||||
"container_not_found": {
|
||||
"message": "Storage container {container_name} not found"
|
||||
|
||||
@@ -74,12 +74,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: BackblazeConfigEntry) ->
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_bucket_name",
|
||||
) from err
|
||||
except exception.BadRequest as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="bad_request",
|
||||
translation_placeholders={"error_message": str(err)},
|
||||
) from err
|
||||
except (
|
||||
exception.B2ConnectionError,
|
||||
exception.B2RequestTimeout,
|
||||
|
||||
@@ -101,7 +101,8 @@ def handle_b2_errors[T](
|
||||
try:
|
||||
return await func(*args, **kwargs)
|
||||
except B2Error as err:
|
||||
raise BackupAgentError(f"Failed during {func.__name__}: {err}") from err
|
||||
error_msg = f"Failed during {func.__name__}"
|
||||
raise BackupAgentError(error_msg) from err
|
||||
|
||||
return wrapper
|
||||
|
||||
@@ -169,7 +170,8 @@ class BackblazeBackupAgent(BackupAgent):
|
||||
async def _cleanup_failed_upload(self, filename: str) -> None:
|
||||
"""Clean up a partially uploaded file after upload failure."""
|
||||
_LOGGER.warning(
|
||||
"Attempting to delete partially uploaded backup file %s",
|
||||
"Attempting to delete partially uploaded main backup file %s "
|
||||
"due to metadata upload failure",
|
||||
filename,
|
||||
)
|
||||
try:
|
||||
@@ -178,10 +180,11 @@ class BackblazeBackupAgent(BackupAgent):
|
||||
)
|
||||
await self._hass.async_add_executor_job(uploaded_main_file_info.delete)
|
||||
except B2Error:
|
||||
_LOGGER.warning(
|
||||
"Failed to clean up partially uploaded backup file %s;"
|
||||
" manual deletion from Backblaze B2 may be required",
|
||||
_LOGGER.debug(
|
||||
"Failed to clean up partially uploaded main backup file %s. "
|
||||
"Manual intervention may be required to delete it from Backblaze B2",
|
||||
filename,
|
||||
exc_info=True,
|
||||
)
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
@@ -253,10 +256,9 @@ class BackblazeBackupAgent(BackupAgent):
|
||||
prefixed_metadata_filename,
|
||||
)
|
||||
|
||||
tar_uploaded = False
|
||||
upload_successful = False
|
||||
try:
|
||||
await self._upload_backup_file(prefixed_tar_filename, open_stream, {})
|
||||
tar_uploaded = True
|
||||
_LOGGER.debug(
|
||||
"Main backup file upload finished for %s", prefixed_tar_filename
|
||||
)
|
||||
@@ -268,14 +270,15 @@ class BackblazeBackupAgent(BackupAgent):
|
||||
_LOGGER.debug(
|
||||
"Metadata file upload finished for %s", prefixed_metadata_filename
|
||||
)
|
||||
_LOGGER.debug("Backup upload complete: %s", prefixed_tar_filename)
|
||||
self._invalidate_caches(
|
||||
backup.backup_id, prefixed_tar_filename, prefixed_metadata_filename
|
||||
)
|
||||
except B2Error:
|
||||
if tar_uploaded:
|
||||
upload_successful = True
|
||||
finally:
|
||||
if upload_successful:
|
||||
_LOGGER.debug("Backup upload complete: %s", prefixed_tar_filename)
|
||||
self._invalidate_caches(
|
||||
backup.backup_id, prefixed_tar_filename, prefixed_metadata_filename
|
||||
)
|
||||
else:
|
||||
await self._cleanup_failed_upload(prefixed_tar_filename)
|
||||
raise
|
||||
|
||||
def _upload_metadata_file_sync(
|
||||
self, metadata_content: bytes, filename: str
|
||||
|
||||
@@ -174,14 +174,6 @@ class BackblazeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"Backblaze B2 bucket '%s' does not exist", user_input[CONF_BUCKET]
|
||||
)
|
||||
errors[CONF_BUCKET] = "invalid_bucket_name"
|
||||
except exception.BadRequest as err:
|
||||
_LOGGER.error(
|
||||
"Backblaze B2 API rejected the request for Key ID '%s': %s",
|
||||
user_input[CONF_KEY_ID],
|
||||
err,
|
||||
)
|
||||
errors["base"] = "bad_request"
|
||||
placeholders["error_message"] = str(err)
|
||||
except (
|
||||
exception.B2ConnectionError,
|
||||
exception.B2RequestTimeout,
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["b2sdk"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["b2sdk==2.10.4"]
|
||||
"requirements": ["b2sdk==2.10.1"]
|
||||
}
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
},
|
||||
"error": {
|
||||
"bad_request": "The Backblaze B2 API rejected the request: {error_message}",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_bucket_name": "[%key:component::backblaze_b2::exceptions::invalid_bucket_name::message%]",
|
||||
"invalid_capability": "[%key:component::backblaze_b2::exceptions::invalid_capability::message%]",
|
||||
@@ -61,9 +60,6 @@
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"bad_request": {
|
||||
"message": "The Backblaze B2 API rejected the request: {error_message}"
|
||||
},
|
||||
"cannot_connect": {
|
||||
"message": "Cannot connect to endpoint"
|
||||
},
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "calculated",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["cronsim==2.7", "securetar==2026.4.0"],
|
||||
"requirements": ["cronsim==2.7", "securetar==2026.2.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -22,7 +22,6 @@ from securetar import (
|
||||
SecureTarFile,
|
||||
SecureTarReadError,
|
||||
SecureTarRootKeyContext,
|
||||
get_archive_max_ciphertext_size,
|
||||
)
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -432,9 +431,7 @@ class _CipherBackupStreamer:
|
||||
|
||||
def size(self) -> int:
|
||||
"""Return the maximum size of the decrypted or encrypted backup."""
|
||||
return get_archive_max_ciphertext_size( # type: ignore[no-any-return]
|
||||
self._backup.size, SECURETAR_CREATE_VERSION, self._num_tar_files()
|
||||
)
|
||||
return self._backup.size + self._num_tar_files() * tarfile.RECORDSIZE
|
||||
|
||||
def _num_tar_files(self) -> int:
|
||||
"""Return the number of inner tar files."""
|
||||
|
||||
@@ -1 +1 @@
|
||||
"""The bbox component."""
|
||||
"""The Bbox integration."""
|
||||
|
||||
@@ -1 +1 @@
|
||||
"""The bitcoin component."""
|
||||
"""The Bitcoin integration."""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"domain": "blebox",
|
||||
"name": "BleBox devices",
|
||||
"codeowners": ["@bbx-a", "@swistakm"],
|
||||
"codeowners": ["@bbx-a", "@swistakm", "@bkobus-bbx"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/blebox",
|
||||
"integration_type": "device",
|
||||
|
||||
@@ -1 +1 @@
|
||||
"""The blinksticklight component."""
|
||||
"""The BlinkStick integration."""
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Support for Blinkstick lights."""
|
||||
"""Support for BlinkStick lights."""
|
||||
|
||||
# mypy: ignore-errors
|
||||
from __future__ import annotations
|
||||
@@ -40,7 +40,7 @@ def setup_platform(
|
||||
add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up Blinkstick device specified by serial number."""
|
||||
"""Set up BlinkStick device specified by serial number."""
|
||||
|
||||
name = config[CONF_NAME]
|
||||
serial = config[CONF_SERIAL]
|
||||
|
||||
@@ -52,7 +52,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Rotate the access token."""
|
||||
access_tokens.append(hex(_RND.getrandbits(256))[2:])
|
||||
|
||||
async_track_time_interval(hass, _rotate_token, TOKEN_CHANGE_INTERVAL)
|
||||
async_track_time_interval(
|
||||
hass, _rotate_token, TOKEN_CHANGE_INTERVAL, cancel_on_shutdown=True
|
||||
)
|
||||
|
||||
hass.http.register_view(BrandsIntegrationView(hass))
|
||||
hass.http.register_view(BrandsHardwareView(hass))
|
||||
|
||||
@@ -10,7 +10,6 @@ from bsblan import (
|
||||
BSBLAN,
|
||||
BSBLANAuthError,
|
||||
BSBLANConnectionError,
|
||||
BSBLANError,
|
||||
HotWaterConfig,
|
||||
HotWaterSchedule,
|
||||
HotWaterState,
|
||||
@@ -51,7 +50,7 @@ class BSBLanFastData:
|
||||
|
||||
state: State
|
||||
sensor: Sensor
|
||||
dhw: HotWaterState | None = None
|
||||
dhw: HotWaterState
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -112,6 +111,7 @@ class BSBLanFastCoordinator(BSBLanCoordinator[BSBLanFastData]):
|
||||
# This reduces response time significantly (~0.2s per parameter)
|
||||
state = await self.client.state(include=STATE_INCLUDE)
|
||||
sensor = await self.client.sensor(include=SENSOR_INCLUDE)
|
||||
dhw = await self.client.hot_water_state(include=DHW_STATE_INCLUDE)
|
||||
|
||||
except BSBLANAuthError as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
@@ -126,19 +126,6 @@ class BSBLanFastCoordinator(BSBLanCoordinator[BSBLanFastData]):
|
||||
translation_placeholders={"host": host},
|
||||
) from err
|
||||
|
||||
# Fetch DHW state separately - device may not support hot water
|
||||
dhw: HotWaterState | None = None
|
||||
try:
|
||||
dhw = await self.client.hot_water_state(include=DHW_STATE_INCLUDE)
|
||||
except BSBLANError:
|
||||
# Preserve last known DHW state if available (entity may depend on it)
|
||||
if self.data:
|
||||
dhw = self.data.dhw
|
||||
LOGGER.debug(
|
||||
"DHW (Domestic Hot Water) state not available on device at %s",
|
||||
self.config_entry.data[CONF_HOST],
|
||||
)
|
||||
|
||||
return BSBLanFastData(
|
||||
state=state,
|
||||
sensor=sensor,
|
||||
@@ -172,6 +159,13 @@ class BSBLanSlowCoordinator(BSBLanCoordinator[BSBLanSlowData]):
|
||||
dhw_config = await self.client.hot_water_config(include=DHW_CONFIG_INCLUDE)
|
||||
dhw_schedule = await self.client.hot_water_schedule()
|
||||
|
||||
except AttributeError:
|
||||
# Device does not support DHW functionality
|
||||
LOGGER.debug(
|
||||
"DHW (Domestic Hot Water) not available on device at %s",
|
||||
self.config_entry.data[CONF_HOST],
|
||||
)
|
||||
return BSBLanSlowData()
|
||||
except (BSBLANConnectionError, BSBLANAuthError) as err:
|
||||
# If config update fails, keep existing data
|
||||
LOGGER.debug(
|
||||
@@ -183,13 +177,6 @@ class BSBLanSlowCoordinator(BSBLanCoordinator[BSBLanSlowData]):
|
||||
return self.data
|
||||
# First fetch failed, return empty data
|
||||
return BSBLanSlowData()
|
||||
except BSBLANError, AttributeError:
|
||||
# Device does not support DHW functionality
|
||||
LOGGER.debug(
|
||||
"DHW (Domestic Hot Water) not available on device at %s",
|
||||
self.config_entry.data[CONF_HOST],
|
||||
)
|
||||
return BSBLanSlowData()
|
||||
|
||||
return BSBLanSlowData(
|
||||
dhw_config=dhw_config,
|
||||
|
||||
@@ -22,9 +22,7 @@ async def async_get_config_entry_diagnostics(
|
||||
"fast_coordinator_data": {
|
||||
"state": data.fast_coordinator.data.state.model_dump(),
|
||||
"sensor": data.fast_coordinator.data.sensor.model_dump(),
|
||||
"dhw": data.fast_coordinator.data.dhw.model_dump()
|
||||
if data.fast_coordinator.data.dhw
|
||||
else None,
|
||||
"dhw": data.fast_coordinator.data.dhw.model_dump(),
|
||||
},
|
||||
"static": data.static.model_dump() if data.static is not None else None,
|
||||
}
|
||||
|
||||
@@ -2,9 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from yarl import URL
|
||||
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT
|
||||
from homeassistant.helpers.device_registry import (
|
||||
CONNECTION_NETWORK_MAC,
|
||||
DeviceInfo,
|
||||
@@ -13,7 +10,7 @@ from homeassistant.helpers.device_registry import (
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import BSBLanData
|
||||
from .const import DEFAULT_PORT, DOMAIN
|
||||
from .const import DOMAIN
|
||||
from .coordinator import BSBLanCoordinator, BSBLanFastCoordinator, BSBLanSlowCoordinator
|
||||
|
||||
|
||||
@@ -25,8 +22,7 @@ class BSBLanEntityBase[_T: BSBLanCoordinator](CoordinatorEntity[_T]):
|
||||
def __init__(self, coordinator: _T, data: BSBLanData) -> None:
|
||||
"""Initialize BSBLan entity with device info."""
|
||||
super().__init__(coordinator)
|
||||
host = coordinator.config_entry.data[CONF_HOST]
|
||||
port = coordinator.config_entry.data.get(CONF_PORT, DEFAULT_PORT)
|
||||
host = coordinator.config_entry.data["host"]
|
||||
mac = data.device.MAC
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, mac)},
|
||||
@@ -48,7 +44,7 @@ class BSBLanEntityBase[_T: BSBLanCoordinator](CoordinatorEntity[_T]):
|
||||
else None
|
||||
),
|
||||
sw_version=data.device.version,
|
||||
configuration_url=str(URL.build(scheme="http", host=host, port=port)),
|
||||
configuration_url=f"http://{host}",
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["bsblan"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["python-bsblan==5.1.4"],
|
||||
"requirements": ["python-bsblan==5.1.3"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"name": "bsb-lan*",
|
||||
|
||||
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from bsblan import BSBLANError, HotWaterState, SetHotWaterParam
|
||||
from bsblan import BSBLANError, SetHotWaterParam
|
||||
|
||||
from homeassistant.components.water_heater import (
|
||||
STATE_ECO,
|
||||
@@ -46,10 +46,8 @@ async def async_setup_entry(
|
||||
data = entry.runtime_data
|
||||
|
||||
# Only create water heater entity if DHW (Domestic Hot Water) is available
|
||||
# Check if we have any DHW-related data indicating water heater support
|
||||
dhw_data = data.fast_coordinator.data.dhw
|
||||
if dhw_data is None:
|
||||
# Device does not support DHW, skip water heater setup
|
||||
return
|
||||
if (
|
||||
dhw_data.operating_mode is None
|
||||
and dhw_data.nominal_setpoint is None
|
||||
@@ -109,21 +107,11 @@ class BSBLANWaterHeater(BSBLanDualCoordinatorEntity, WaterHeaterEntity):
|
||||
else:
|
||||
self._attr_max_temp = 65.0 # Default maximum
|
||||
|
||||
@property
|
||||
def _dhw(self) -> HotWaterState:
|
||||
"""Return DHW state data.
|
||||
|
||||
This entity is only created when DHW data is available.
|
||||
"""
|
||||
dhw = self.coordinator.data.dhw
|
||||
assert dhw is not None
|
||||
return dhw
|
||||
|
||||
@property
|
||||
def current_operation(self) -> str | None:
|
||||
"""Return current operation."""
|
||||
if (
|
||||
operating_mode := self._dhw.operating_mode
|
||||
operating_mode := self.coordinator.data.dhw.operating_mode
|
||||
) is None or operating_mode.value is None:
|
||||
return None
|
||||
return BSBLAN_TO_HA_OPERATION_MODE.get(operating_mode.value)
|
||||
@@ -131,14 +119,16 @@ class BSBLANWaterHeater(BSBLanDualCoordinatorEntity, WaterHeaterEntity):
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
if (current_temp := self._dhw.dhw_actual_value_top_temperature) is None:
|
||||
if (
|
||||
current_temp := self.coordinator.data.dhw.dhw_actual_value_top_temperature
|
||||
) is None:
|
||||
return None
|
||||
return current_temp.value
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the temperature we try to reach."""
|
||||
if (target_temp := self._dhw.nominal_setpoint) is None:
|
||||
if (target_temp := self.coordinator.data.dhw.nominal_setpoint) is None:
|
||||
return None
|
||||
return target_temp.value
|
||||
|
||||
|
||||
@@ -578,13 +578,13 @@ class CalendarEntity(Entity):
|
||||
return STATE_OFF
|
||||
|
||||
@callback
|
||||
def async_write_ha_state(self) -> None:
|
||||
def _async_write_ha_state(self) -> None:
|
||||
"""Write the state to the state machine.
|
||||
|
||||
This sets up listeners to handle state transitions for start or end of
|
||||
the current or upcoming event.
|
||||
"""
|
||||
super().async_write_ha_state()
|
||||
super()._async_write_ha_state()
|
||||
if self._alarm_unsubs is None:
|
||||
self._alarm_unsubs = []
|
||||
_LOGGER.debug(
|
||||
|
||||
@@ -760,12 +760,12 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
return CameraCapabilities(frontend_stream_types)
|
||||
|
||||
@callback
|
||||
def async_write_ha_state(self) -> None:
|
||||
def _async_write_ha_state(self) -> None:
|
||||
"""Write the state to the state machine.
|
||||
|
||||
Schedules async_refresh_providers if support of streams have changed.
|
||||
"""
|
||||
super().async_write_ha_state()
|
||||
super()._async_write_ha_state()
|
||||
if self.__supports_stream != (
|
||||
supports_stream := self.supported_features & CameraEntityFeature.STREAM
|
||||
):
|
||||
|
||||
@@ -11,7 +11,13 @@ from homeassistant.exceptions import ConfigEntryNotReady
|
||||
|
||||
from .coordinator import CasperGlowConfigEntry, CasperGlowCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.BUTTON, Platform.LIGHT]
|
||||
PLATFORMS: list[Platform] = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.BUTTON,
|
||||
Platform.LIGHT,
|
||||
Platform.SELECT,
|
||||
Platform.SENSOR,
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: CasperGlowConfigEntry) -> bool:
|
||||
|
||||
@@ -4,7 +4,11 @@ from __future__ import annotations
|
||||
|
||||
from pycasperglow import GlowState
|
||||
|
||||
from homeassistant.components.binary_sensor import BinarySensorEntity
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
)
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
@@ -21,7 +25,12 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the binary sensor platform for Casper Glow."""
|
||||
async_add_entities([CasperGlowPausedBinarySensor(entry.runtime_data)])
|
||||
async_add_entities(
|
||||
[
|
||||
CasperGlowPausedBinarySensor(entry.runtime_data),
|
||||
CasperGlowChargingBinarySensor(entry.runtime_data),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class CasperGlowPausedBinarySensor(CasperGlowEntity, BinarySensorEntity):
|
||||
@@ -46,6 +55,34 @@ class CasperGlowPausedBinarySensor(CasperGlowEntity, BinarySensorEntity):
|
||||
@callback
|
||||
def _async_handle_state_update(self, state: GlowState) -> None:
|
||||
"""Handle a state update from the device."""
|
||||
if state.is_paused is not None:
|
||||
if state.is_paused is not None and state.is_paused != self._attr_is_on:
|
||||
self._attr_is_on = state.is_paused
|
||||
self.async_write_ha_state()
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
class CasperGlowChargingBinarySensor(CasperGlowEntity, BinarySensorEntity):
|
||||
"""Binary sensor indicating whether the Casper Glow is charging."""
|
||||
|
||||
_attr_device_class = BinarySensorDeviceClass.BATTERY_CHARGING
|
||||
_attr_entity_category = EntityCategory.DIAGNOSTIC
|
||||
|
||||
def __init__(self, coordinator: CasperGlowCoordinator) -> None:
|
||||
"""Initialize the charging binary sensor."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_unique_id = f"{format_mac(coordinator.device.address)}_charging"
|
||||
if coordinator.device.state.is_charging is not None:
|
||||
self._attr_is_on = coordinator.device.state.is_charging
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register state update callback when entity is added."""
|
||||
await super().async_added_to_hass()
|
||||
self.async_on_remove(
|
||||
self._device.register_callback(self._async_handle_state_update)
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_handle_state_update(self, state: GlowState) -> None:
|
||||
"""Handle a state update from the device."""
|
||||
if state.is_charging is not None and state.is_charging != self._attr_is_on:
|
||||
self._attr_is_on = state.is_charging
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -12,5 +12,7 @@ SORTED_BRIGHTNESS_LEVELS = sorted(BRIGHTNESS_LEVELS)
|
||||
|
||||
DEFAULT_DIMMING_TIME_MINUTES: int = DIMMING_TIME_MINUTES[0]
|
||||
|
||||
DIMMING_TIME_OPTIONS: tuple[str, ...] = tuple(str(m) for m in DIMMING_TIME_MINUTES)
|
||||
|
||||
# Interval between periodic state polls to catch externally-triggered changes.
|
||||
STATE_POLL_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
@@ -19,7 +19,7 @@ from homeassistant.components.bluetooth.active_update_coordinator import (
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from .const import STATE_POLL_INTERVAL
|
||||
from .const import SORTED_BRIGHTNESS_LEVELS, STATE_POLL_INTERVAL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -51,6 +51,15 @@ class CasperGlowCoordinator(ActiveBluetoothDataUpdateCoordinator[None]):
|
||||
)
|
||||
self.title = title
|
||||
|
||||
# The device API couples brightness and dimming time into a
|
||||
# single command (set_brightness_and_dimming_time), so both
|
||||
# values must be tracked here for cross-entity use.
|
||||
self.last_brightness_pct: int = (
|
||||
device.state.brightness_level
|
||||
if device.state.brightness_level is not None
|
||||
else SORTED_BRIGHTNESS_LEVELS[0]
|
||||
)
|
||||
|
||||
@callback
|
||||
def _needs_poll(
|
||||
self,
|
||||
|
||||
31
homeassistant/components/casper_glow/diagnostics.py
Normal file
31
homeassistant/components/casper_glow/diagnostics.py
Normal file
@@ -0,0 +1,31 @@
|
||||
"""Diagnostics support for the Casper Glow integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components import bluetooth
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import CasperGlowConfigEntry
|
||||
|
||||
SERVICE_INFO_TO_REDACT = frozenset({"address", "name", "source", "device"})
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: CasperGlowConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
service_info = bluetooth.async_last_service_info(
|
||||
hass, coordinator.device.address, connectable=True
|
||||
)
|
||||
|
||||
return {
|
||||
"service_info": async_redact_data(
|
||||
service_info.as_dict() if service_info else None,
|
||||
SERVICE_INFO_TO_REDACT,
|
||||
),
|
||||
}
|
||||
@@ -12,6 +12,11 @@
|
||||
"resume": {
|
||||
"default": "mdi:play"
|
||||
}
|
||||
},
|
||||
"select": {
|
||||
"dimming_time": {
|
||||
"default": "mdi:timer-outline"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -71,6 +71,7 @@ class CasperGlowLight(CasperGlowEntity, LightEntity):
|
||||
self._attr_color_mode = ColorMode.BRIGHTNESS
|
||||
if state.brightness_level is not None:
|
||||
self._attr_brightness = _device_pct_to_ha_brightness(state.brightness_level)
|
||||
self.coordinator.last_brightness_pct = state.brightness_level
|
||||
|
||||
@callback
|
||||
def _async_handle_state_update(self, state: GlowState) -> None:
|
||||
@@ -97,6 +98,7 @@ class CasperGlowLight(CasperGlowEntity, LightEntity):
|
||||
)
|
||||
)
|
||||
self._attr_brightness = _device_pct_to_ha_brightness(brightness_pct)
|
||||
self.coordinator.last_brightness_pct = brightness_pct
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the light off."""
|
||||
|
||||
@@ -15,5 +15,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pycasperglow"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["pycasperglow==1.1.0"]
|
||||
"requirements": ["pycasperglow==1.2.0"]
|
||||
}
|
||||
|
||||
@@ -39,7 +39,7 @@ rules:
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
diagnostics: done
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: No network discovery.
|
||||
@@ -52,14 +52,16 @@ rules:
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: todo
|
||||
dynamic-devices: todo
|
||||
entity-category: todo
|
||||
entity-device-class: todo
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: todo
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations: done
|
||||
reconfiguration-flow: todo
|
||||
repair-issues: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: Integration does not register repair issues.
|
||||
stale-devices: todo
|
||||
|
||||
# Platinum
|
||||
|
||||
92
homeassistant/components/casper_glow/select.py
Normal file
92
homeassistant/components/casper_glow/select.py
Normal file
@@ -0,0 +1,92 @@
|
||||
"""Casper Glow integration select platform for dimming time."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pycasperglow import GlowState
|
||||
|
||||
from homeassistant.components.select import SelectEntity
|
||||
from homeassistant.const import EntityCategory, UnitOfTime
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
|
||||
from .const import DIMMING_TIME_OPTIONS
|
||||
from .coordinator import CasperGlowConfigEntry, CasperGlowCoordinator
|
||||
from .entity import CasperGlowEntity
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: CasperGlowConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the select platform for Casper Glow."""
|
||||
async_add_entities([CasperGlowDimmingTimeSelect(entry.runtime_data)])
|
||||
|
||||
|
||||
class CasperGlowDimmingTimeSelect(CasperGlowEntity, SelectEntity, RestoreEntity):
|
||||
"""Select entity for Casper Glow dimming time."""
|
||||
|
||||
_attr_translation_key = "dimming_time"
|
||||
_attr_entity_category = EntityCategory.CONFIG
|
||||
_attr_options = list(DIMMING_TIME_OPTIONS)
|
||||
_attr_unit_of_measurement = UnitOfTime.MINUTES
|
||||
|
||||
def __init__(self, coordinator: CasperGlowCoordinator) -> None:
|
||||
"""Initialize the dimming time select entity."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_unique_id = f"{format_mac(coordinator.device.address)}_dimming_time"
|
||||
|
||||
@property
|
||||
def current_option(self) -> str | None:
|
||||
"""Return the currently selected dimming time from the coordinator."""
|
||||
if self.coordinator.last_dimming_time_minutes is None:
|
||||
return None
|
||||
return str(self.coordinator.last_dimming_time_minutes)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Restore last known dimming time and register state update callback."""
|
||||
await super().async_added_to_hass()
|
||||
if self.coordinator.last_dimming_time_minutes is None and (
|
||||
last_state := await self.async_get_last_state()
|
||||
):
|
||||
if last_state.state in DIMMING_TIME_OPTIONS:
|
||||
self.coordinator.last_dimming_time_minutes = int(last_state.state)
|
||||
self.async_on_remove(
|
||||
self._device.register_callback(self._async_handle_state_update)
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_handle_state_update(self, state: GlowState) -> None:
|
||||
"""Handle a state update from the device."""
|
||||
if state.brightness_level is not None:
|
||||
self.coordinator.last_brightness_pct = state.brightness_level
|
||||
if (
|
||||
state.configured_dimming_time_minutes is not None
|
||||
and self.coordinator.last_dimming_time_minutes is None
|
||||
):
|
||||
self.coordinator.last_dimming_time_minutes = (
|
||||
state.configured_dimming_time_minutes
|
||||
)
|
||||
# Dimming time is not part of the device state
|
||||
# that is provided via BLE update. Therefore
|
||||
# we need to trigger a state update for the select entity
|
||||
# to update the current state.
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Set the dimming time."""
|
||||
await self._async_command(
|
||||
self._device.set_brightness_and_dimming_time(
|
||||
self.coordinator.last_brightness_pct, int(option)
|
||||
)
|
||||
)
|
||||
self.coordinator.last_dimming_time_minutes = int(option)
|
||||
# Dimming time is not part of the device state
|
||||
# that is provided via BLE update. Therefore
|
||||
# we need to trigger a state update for the select entity
|
||||
# to update the current state.
|
||||
self.async_write_ha_state()
|
||||
61
homeassistant/components/casper_glow/sensor.py
Normal file
61
homeassistant/components/casper_glow/sensor.py
Normal file
@@ -0,0 +1,61 @@
|
||||
"""Casper Glow integration sensor platform."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pycasperglow import GlowState
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import PERCENTAGE, EntityCategory
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import CasperGlowConfigEntry, CasperGlowCoordinator
|
||||
from .entity import CasperGlowEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: CasperGlowConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the sensor platform for Casper Glow."""
|
||||
async_add_entities([CasperGlowBatterySensor(entry.runtime_data)])
|
||||
|
||||
|
||||
class CasperGlowBatterySensor(CasperGlowEntity, SensorEntity):
|
||||
"""Sensor entity for Casper Glow battery level."""
|
||||
|
||||
_attr_device_class = SensorDeviceClass.BATTERY
|
||||
_attr_native_unit_of_measurement = PERCENTAGE
|
||||
_attr_state_class = SensorStateClass.MEASUREMENT
|
||||
_attr_entity_category = EntityCategory.DIAGNOSTIC
|
||||
|
||||
def __init__(self, coordinator: CasperGlowCoordinator) -> None:
|
||||
"""Initialize the battery sensor."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_unique_id = f"{format_mac(coordinator.device.address)}_battery"
|
||||
if coordinator.device.state.battery_level is not None:
|
||||
self._attr_native_value = coordinator.device.state.battery_level.percentage
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register state update callback when entity is added."""
|
||||
await super().async_added_to_hass()
|
||||
self.async_on_remove(
|
||||
self._device.register_callback(self._async_handle_state_update)
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_handle_state_update(self, state: GlowState) -> None:
|
||||
"""Handle a state update from the device."""
|
||||
if state.battery_level is not None:
|
||||
new_value = state.battery_level.percentage
|
||||
if new_value != self._attr_native_value:
|
||||
self._attr_native_value = new_value
|
||||
self.async_write_ha_state()
|
||||
@@ -39,6 +39,11 @@
|
||||
"resume": {
|
||||
"name": "Resume dimming"
|
||||
}
|
||||
},
|
||||
"select": {
|
||||
"dimming_time": {
|
||||
"name": "Dimming time"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
|
||||
@@ -39,9 +39,7 @@ class ChessConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
else:
|
||||
await self.async_set_unique_id(str(user.player_id))
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=user.name or user.username, data=user_input
|
||||
)
|
||||
return self.async_create_entry(title=user.name, data=user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
|
||||
@@ -112,7 +112,7 @@ class ComelitAlarmEntity(
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if alarm is available."""
|
||||
if self._area.human_status == AlarmAreaState.UNKNOWN:
|
||||
if self._area.human_status in [AlarmAreaState.ANOMALY, AlarmAreaState.UNKNOWN]:
|
||||
return False
|
||||
return super().available
|
||||
|
||||
@@ -151,7 +151,7 @@ class ComelitAlarmEntity(
|
||||
if code != str(self.coordinator.api.device_pin):
|
||||
return
|
||||
await self.coordinator.api.set_zone_status(
|
||||
self._area.index, ALARM_ACTIONS[DISABLE], self._area.anomaly
|
||||
self._area.index, ALARM_ACTIONS[DISABLE]
|
||||
)
|
||||
await self._async_update_state(
|
||||
AlarmAreaState.DISARMED, ALARM_AREA_ARMED_STATUS[DISABLE]
|
||||
@@ -160,7 +160,7 @@ class ComelitAlarmEntity(
|
||||
async def async_alarm_arm_away(self, code: str | None = None) -> None:
|
||||
"""Send arm away command."""
|
||||
await self.coordinator.api.set_zone_status(
|
||||
self._area.index, ALARM_ACTIONS[AWAY], self._area.anomaly
|
||||
self._area.index, ALARM_ACTIONS[AWAY]
|
||||
)
|
||||
await self._async_update_state(
|
||||
AlarmAreaState.ARMED, ALARM_AREA_ARMED_STATUS[AWAY]
|
||||
@@ -169,7 +169,7 @@ class ComelitAlarmEntity(
|
||||
async def async_alarm_arm_home(self, code: str | None = None) -> None:
|
||||
"""Send arm home command."""
|
||||
await self.coordinator.api.set_zone_status(
|
||||
self._area.index, ALARM_ACTIONS[HOME], self._area.anomaly
|
||||
self._area.index, ALARM_ACTIONS[HOME]
|
||||
)
|
||||
await self._async_update_state(
|
||||
AlarmAreaState.ARMED, ALARM_AREA_ARMED_STATUS[HOME_P1]
|
||||
@@ -178,7 +178,7 @@ class ComelitAlarmEntity(
|
||||
async def async_alarm_arm_night(self, code: str | None = None) -> None:
|
||||
"""Send arm night command."""
|
||||
await self.coordinator.api.set_zone_status(
|
||||
self._area.index, ALARM_ACTIONS[NIGHT], self._area.anomaly
|
||||
self._area.index, ALARM_ACTIONS[NIGHT]
|
||||
)
|
||||
await self._async_update_state(
|
||||
AlarmAreaState.ARMED, ALARM_AREA_ARMED_STATUS[NIGHT]
|
||||
|
||||
@@ -9,7 +9,6 @@ from aiocomelit import ComelitSerialBridgeObject
|
||||
from aiocomelit.const import CLIMATE
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
DOMAIN as CLIMATE_DOMAIN,
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACAction,
|
||||
@@ -92,7 +91,7 @@ async def async_setup_entry(
|
||||
|
||||
entities: list[ClimateEntity] = []
|
||||
for device in coordinator.data[CLIMATE].values():
|
||||
values = load_api_data(device, CLIMATE_DOMAIN)
|
||||
values = load_api_data(device, "climate")
|
||||
if values[0] == 0 and values[4] == 0:
|
||||
# No climate data, device is only a humidifier/dehumidifier
|
||||
|
||||
@@ -140,7 +139,7 @@ class ComelitClimateEntity(ComelitBridgeBaseEntity, ClimateEntity):
|
||||
def _update_attributes(self) -> None:
|
||||
"""Update class attributes."""
|
||||
device = self.coordinator.data[CLIMATE][self._device.index]
|
||||
values = load_api_data(device, CLIMATE_DOMAIN)
|
||||
values = load_api_data(device, "climate")
|
||||
|
||||
_active = values[1]
|
||||
_mode = values[2] # Values from API: "O", "L", "U"
|
||||
|
||||
@@ -9,7 +9,6 @@ from aiocomelit import ComelitSerialBridgeObject
|
||||
from aiocomelit.const import CLIMATE
|
||||
|
||||
from homeassistant.components.humidifier import (
|
||||
DOMAIN as HUMIDIFIER_DOMAIN,
|
||||
MODE_AUTO,
|
||||
MODE_NORMAL,
|
||||
HumidifierAction,
|
||||
@@ -68,7 +67,7 @@ async def async_setup_entry(
|
||||
|
||||
entities: list[ComelitHumidifierEntity] = []
|
||||
for device in coordinator.data[CLIMATE].values():
|
||||
values = load_api_data(device, HUMIDIFIER_DOMAIN)
|
||||
values = load_api_data(device, "humidifier")
|
||||
if values[0] == 0 and values[4] == 0:
|
||||
# No humidity data, device is only a climate
|
||||
|
||||
@@ -142,7 +141,7 @@ class ComelitHumidifierEntity(ComelitBridgeBaseEntity, HumidifierEntity):
|
||||
def _update_attributes(self) -> None:
|
||||
"""Update class attributes."""
|
||||
device = self.coordinator.data[CLIMATE][self._device.index]
|
||||
values = load_api_data(device, HUMIDIFIER_DOMAIN)
|
||||
values = load_api_data(device, "humidifier")
|
||||
|
||||
_active = values[1]
|
||||
_mode = values[2] # Values from API: "O", "L", "U"
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aiocomelit"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiocomelit==2.0.2"]
|
||||
"requirements": ["aiocomelit==2.0.1"]
|
||||
}
|
||||
|
||||
@@ -113,9 +113,6 @@
|
||||
"humidity_while_off": {
|
||||
"message": "Cannot change humidity while off"
|
||||
},
|
||||
"invalid_clima_data": {
|
||||
"message": "Invalid 'clima' data"
|
||||
},
|
||||
"update_failed": {
|
||||
"message": "Failed to update data: {error}"
|
||||
}
|
||||
|
||||
@@ -2,13 +2,12 @@
|
||||
|
||||
from collections.abc import Awaitable, Callable, Coroutine
|
||||
from functools import wraps
|
||||
from typing import TYPE_CHECKING, Any, Concatenate
|
||||
from typing import TYPE_CHECKING, Any, Concatenate, Literal
|
||||
|
||||
from aiocomelit.api import ComelitSerialBridgeObject
|
||||
from aiocomelit.exceptions import CannotAuthenticate, CannotConnect, CannotRetrieveData
|
||||
from aiohttp import ClientSession, CookieJar
|
||||
|
||||
from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
@@ -30,17 +29,19 @@ async def async_client_session(hass: HomeAssistant) -> ClientSession:
|
||||
)
|
||||
|
||||
|
||||
def load_api_data(device: ComelitSerialBridgeObject, domain: str) -> list[Any]:
|
||||
def load_api_data(
|
||||
device: ComelitSerialBridgeObject,
|
||||
domain: Literal["climate", "humidifier"],
|
||||
) -> list[Any]:
|
||||
"""Load data from the API."""
|
||||
# This function is called when the data is loaded from the API
|
||||
if not isinstance(device.val, list):
|
||||
raise HomeAssistantError(
|
||||
translation_domain=domain, translation_key="invalid_clima_data"
|
||||
)
|
||||
# This function is called when the data is loaded from the API.
|
||||
# For climate and humidifier device.val is always a list.
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(device.val, list)
|
||||
# CLIMATE has a 2 item tuple:
|
||||
# - first for Clima
|
||||
# - second for Humidifier
|
||||
return device.val[0] if domain == CLIMATE_DOMAIN else device.val[1]
|
||||
return device.val[0] if domain == "climate" else device.val[1]
|
||||
|
||||
|
||||
async def cleanup_stale_entity(
|
||||
|
||||
@@ -210,7 +210,7 @@ def websocket_update_entity(
|
||||
)
|
||||
return
|
||||
|
||||
changes = {}
|
||||
changes: dict[str, Any] = {}
|
||||
|
||||
for key in (
|
||||
"area_id",
|
||||
|
||||
@@ -44,18 +44,18 @@ class DemoRemote(RemoteEntity):
|
||||
return {"last_command_sent": self._last_command_sent}
|
||||
return None
|
||||
|
||||
def turn_on(self, **kwargs: Any) -> None:
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the remote on."""
|
||||
self._attr_is_on = True
|
||||
self.schedule_update_ha_state()
|
||||
self.async_write_ha_state()
|
||||
|
||||
def turn_off(self, **kwargs: Any) -> None:
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the remote off."""
|
||||
self._attr_is_on = False
|
||||
self.schedule_update_ha_state()
|
||||
self.async_write_ha_state()
|
||||
|
||||
def send_command(self, command: Iterable[str], **kwargs: Any) -> None:
|
||||
async def async_send_command(self, command: Iterable[str], **kwargs: Any) -> None:
|
||||
"""Send a command to a device."""
|
||||
for com in command:
|
||||
self._last_command_sent = com
|
||||
self.schedule_update_ha_state()
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -61,12 +61,12 @@ class DemoSwitch(SwitchEntity):
|
||||
name=device_name,
|
||||
)
|
||||
|
||||
def turn_on(self, **kwargs: Any) -> None:
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch on."""
|
||||
self._attr_is_on = True
|
||||
self.schedule_update_ha_state()
|
||||
self.async_write_ha_state()
|
||||
|
||||
def turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the device off."""
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch off."""
|
||||
self._attr_is_on = False
|
||||
self.schedule_update_ha_state()
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -1 +1 @@
|
||||
"""The denon component."""
|
||||
"""The Denon Network Receivers integration."""
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""The denonavr component."""
|
||||
"""The Denon AVR Network Receivers integration."""
|
||||
|
||||
import logging
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@ from .const import ( # noqa: F401
|
||||
ATTR_DEV_ID,
|
||||
ATTR_GPS,
|
||||
ATTR_HOST_NAME,
|
||||
ATTR_IN_ZONES,
|
||||
ATTR_IP,
|
||||
ATTR_LOCATION_NAME,
|
||||
ATTR_MAC,
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from typing import final
|
||||
from typing import Any, final
|
||||
|
||||
from propcache.api import cached_property
|
||||
|
||||
@@ -18,7 +18,7 @@ from homeassistant.const import (
|
||||
STATE_NOT_HOME,
|
||||
EntityCategory,
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.core import Event, HomeAssistant, State, callback
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.helpers.device_registry import (
|
||||
DeviceInfo,
|
||||
@@ -33,6 +33,7 @@ from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
from .const import (
|
||||
ATTR_HOST_NAME,
|
||||
ATTR_IN_ZONES,
|
||||
ATTR_IP,
|
||||
ATTR_MAC,
|
||||
ATTR_SOURCE_TYPE,
|
||||
@@ -223,6 +224,9 @@ class TrackerEntity(
|
||||
_attr_longitude: float | None = None
|
||||
_attr_source_type: SourceType = SourceType.GPS
|
||||
|
||||
__active_zone: State | None = None
|
||||
__in_zones: list[str] | None = None
|
||||
|
||||
@cached_property
|
||||
def should_poll(self) -> bool:
|
||||
"""No polling for entities that have location pushed."""
|
||||
@@ -256,6 +260,18 @@ class TrackerEntity(
|
||||
"""Return longitude value of the device."""
|
||||
return self._attr_longitude
|
||||
|
||||
@callback
|
||||
def _async_write_ha_state(self) -> None:
|
||||
"""Calculate active zones."""
|
||||
if self.latitude is not None and self.longitude is not None:
|
||||
self.__active_zone, self.__in_zones = zone.async_in_zones(
|
||||
self.hass, self.latitude, self.longitude, self.location_accuracy
|
||||
)
|
||||
else:
|
||||
self.__active_zone = None
|
||||
self.__in_zones = None
|
||||
super()._async_write_ha_state()
|
||||
|
||||
@property
|
||||
def state(self) -> str | None:
|
||||
"""Return the state of the device."""
|
||||
@@ -263,9 +279,7 @@ class TrackerEntity(
|
||||
return self.location_name
|
||||
|
||||
if self.latitude is not None and self.longitude is not None:
|
||||
zone_state = zone.async_active_zone(
|
||||
self.hass, self.latitude, self.longitude, self.location_accuracy
|
||||
)
|
||||
zone_state = self.__active_zone
|
||||
if zone_state is None:
|
||||
state = STATE_NOT_HOME
|
||||
elif zone_state.entity_id == zone.ENTITY_ID_HOME:
|
||||
@@ -278,12 +292,13 @@ class TrackerEntity(
|
||||
|
||||
@final
|
||||
@property
|
||||
def state_attributes(self) -> dict[str, StateType]:
|
||||
def state_attributes(self) -> dict[str, Any]:
|
||||
"""Return the device state attributes."""
|
||||
attr: dict[str, StateType] = {}
|
||||
attr: dict[str, Any] = {ATTR_IN_ZONES: []}
|
||||
attr.update(super().state_attributes)
|
||||
|
||||
if self.latitude is not None and self.longitude is not None:
|
||||
attr[ATTR_IN_ZONES] = self.__in_zones or []
|
||||
attr[ATTR_LATITUDE] = self.latitude
|
||||
attr[ATTR_LONGITUDE] = self.longitude
|
||||
attr[ATTR_GPS_ACCURACY] = self.location_accuracy
|
||||
|
||||
@@ -43,6 +43,7 @@ ATTR_BATTERY: Final = "battery"
|
||||
ATTR_DEV_ID: Final = "dev_id"
|
||||
ATTR_GPS: Final = "gps"
|
||||
ATTR_HOST_NAME: Final = "host_name"
|
||||
ATTR_IN_ZONES: Final = "in_zones"
|
||||
ATTR_LOCATION_NAME: Final = "location_name"
|
||||
ATTR_MAC: Final = "mac"
|
||||
ATTR_SOURCE_TYPE: Final = "source_type"
|
||||
|
||||
@@ -24,8 +24,14 @@
|
||||
"entered_home": {
|
||||
"trigger": "mdi:account-arrow-left"
|
||||
},
|
||||
"entered_zone": {
|
||||
"trigger": "mdi:map-marker-plus"
|
||||
},
|
||||
"left_home": {
|
||||
"trigger": "mdi:account-arrow-right"
|
||||
},
|
||||
"left_zone": {
|
||||
"trigger": "mdi:map-marker-minus"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -130,6 +130,19 @@
|
||||
},
|
||||
"name": "Entered home"
|
||||
},
|
||||
"entered_zone": {
|
||||
"description": "Triggers when one or more device trackers enter a zone.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::device_tracker::common::trigger_behavior_name%]"
|
||||
},
|
||||
"zone": {
|
||||
"description": "The zones to trigger on.",
|
||||
"name": "Zones"
|
||||
}
|
||||
},
|
||||
"name": "Entered zone"
|
||||
},
|
||||
"left_home": {
|
||||
"description": "Triggers when one or more device trackers leave home.",
|
||||
"fields": {
|
||||
@@ -138,6 +151,19 @@
|
||||
}
|
||||
},
|
||||
"name": "Left home"
|
||||
},
|
||||
"left_zone": {
|
||||
"description": "Triggers when one or more device trackers leave a zone.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"name": "[%key:component::device_tracker::common::trigger_behavior_name%]"
|
||||
},
|
||||
"zone": {
|
||||
"description": "The zones to trigger on.",
|
||||
"name": "Zones"
|
||||
}
|
||||
},
|
||||
"name": "Left zone"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,18 +1,93 @@
|
||||
"""Provides triggers for device_trackers."""
|
||||
|
||||
from homeassistant.const import STATE_HOME
|
||||
from homeassistant.core import HomeAssistant
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
CONF_OPTIONS,
|
||||
CONF_ZONE,
|
||||
STATE_HOME,
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, State
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.automation import DomainSpec
|
||||
from homeassistant.helpers.trigger import (
|
||||
ENTITY_STATE_TRIGGER_SCHEMA_FIRST_LAST,
|
||||
EntityTriggerBase,
|
||||
Trigger,
|
||||
TriggerConfig,
|
||||
make_entity_origin_state_trigger,
|
||||
make_entity_target_state_trigger,
|
||||
)
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import ATTR_IN_ZONES, DOMAIN
|
||||
|
||||
ZONE_TRIGGER_SCHEMA = ENTITY_STATE_TRIGGER_SCHEMA_FIRST_LAST.extend(
|
||||
{
|
||||
vol.Required(CONF_OPTIONS): {
|
||||
vol.Required(CONF_ZONE): vol.All(
|
||||
cv.ensure_list,
|
||||
vol.Length(min=1),
|
||||
[cv.entity_domain("zone")],
|
||||
),
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
_IN_ZONES_SPEC = {DOMAIN: DomainSpec(value_source=ATTR_IN_ZONES)}
|
||||
|
||||
|
||||
class ZoneTriggerBase(EntityTriggerBase):
|
||||
"""Base for zone-based device tracker triggers."""
|
||||
|
||||
_domain_specs = _IN_ZONES_SPEC
|
||||
_schema = ZONE_TRIGGER_SCHEMA
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize the trigger."""
|
||||
super().__init__(hass, config)
|
||||
self._zones: set[str] = set(self._options[CONF_ZONE])
|
||||
|
||||
def _is_valid(self, state: State) -> bool:
|
||||
"""Check if the state is valid (not unavailable/unknown)."""
|
||||
return state.state not in (STATE_UNAVAILABLE, STATE_UNKNOWN)
|
||||
|
||||
def _in_target_zones(self, state: State) -> bool:
|
||||
"""Check if the device is in any of the selected zones."""
|
||||
in_zones = set(self._get_tracked_value(state) or [])
|
||||
return bool(in_zones.intersection(self._zones))
|
||||
|
||||
|
||||
class EnteredZoneTrigger(ZoneTriggerBase):
|
||||
"""Trigger when a device tracker enters one of the selected zones."""
|
||||
|
||||
def is_valid_transition(self, from_state: State, to_state: State) -> bool:
|
||||
"""Check that the device was not already in any of the selected zones."""
|
||||
return self._is_valid(from_state) and not self._in_target_zones(from_state)
|
||||
|
||||
def is_valid_state(self, state: State) -> bool:
|
||||
"""Check that the device is now in at least one of the selected zones."""
|
||||
return self._is_valid(state) and self._in_target_zones(state)
|
||||
|
||||
|
||||
class LeftZoneTrigger(ZoneTriggerBase):
|
||||
"""Trigger when a device tracker leaves all of the selected zones."""
|
||||
|
||||
def is_valid_transition(self, from_state: State, to_state: State) -> bool:
|
||||
"""Check that the device was previously in at least one of the selected zones."""
|
||||
return self._is_valid(from_state) and self._in_target_zones(from_state)
|
||||
|
||||
def is_valid_state(self, state: State) -> bool:
|
||||
"""Check that the device is no longer in any of the selected zones."""
|
||||
return self._is_valid(state) and not self._in_target_zones(state)
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"entered_home": make_entity_target_state_trigger(DOMAIN, STATE_HOME),
|
||||
"entered_zone": EnteredZoneTrigger,
|
||||
"left_home": make_entity_origin_state_trigger(DOMAIN, from_state=STATE_HOME),
|
||||
"left_zone": LeftZoneTrigger,
|
||||
}
|
||||
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user