forked from home-assistant/core
Compare commits
380 Commits
master
...
via_device
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ad6ae9abcd | ||
|
|
d324d0b4dd | ||
|
|
94db72d744 | ||
|
|
c01f521199 | ||
|
|
4a15f12a0b | ||
|
|
8d24d775f1 | ||
|
|
aca0e69081 | ||
|
|
f4e5036275 | ||
|
|
59aba339d8 | ||
|
|
864e440685 | ||
|
|
2f6fcb5801 | ||
|
|
bdb6124aa3 | ||
|
|
613e2fd4b3 | ||
|
|
0e71ef3861 | ||
|
|
5076c10959 | ||
|
|
ab2fc4e9a6 | ||
|
|
e39edcc234 | ||
|
|
54c8e59bcd | ||
|
|
c806555879 | ||
|
|
4836930cb1 | ||
|
|
4a8faad62e | ||
|
|
ba69301dda | ||
|
|
724c349194 | ||
|
|
9346f8d658 | ||
|
|
0af41d9cb1 | ||
|
|
b02c0419b4 | ||
|
|
0bc6408137 | ||
|
|
3f1d2b1b71 | ||
|
|
bcfdee23e3 | ||
|
|
4a50f4ffc1 | ||
|
|
9ee45518e9 | ||
|
|
09a5ac5979 | ||
|
|
296b5c627a | ||
|
|
120338d510 | ||
|
|
9b4ab60adb | ||
|
|
51b0642789 | ||
|
|
cb9c213496 | ||
|
|
cb42d99c28 | ||
|
|
cf5cdf3cdb | ||
|
|
acf31f609a | ||
|
|
42377ff7ac | ||
|
|
3e0aab55a8 | ||
|
|
0362012bb3 | ||
|
|
ba5d0f2723 | ||
|
|
167e688139 | ||
|
|
c49d95b230 | ||
|
|
c4c8f88765 | ||
|
|
f908e0cf4d | ||
|
|
29c720a66d | ||
|
|
4e628dbd9f | ||
|
|
37d904dfdc | ||
|
|
a53997dfc7 | ||
|
|
dd216ac15b | ||
|
|
2afdec4711 | ||
|
|
5b4c309170 | ||
|
|
8deec55204 | ||
|
|
f0a2c4e30a | ||
|
|
e9a71a8d7f | ||
|
|
1462366764 | ||
|
|
33528eb6bd | ||
|
|
776a014ab0 | ||
|
|
ea202eff66 | ||
|
|
b7404f5a05 | ||
|
|
d015dff855 | ||
|
|
2f1977fa0c | ||
|
|
26fe23eb5c | ||
|
|
dbfecf99dc | ||
|
|
4d28992f2b | ||
|
|
7a428a66bd | ||
|
|
481bf2694b | ||
|
|
5cc9cc3c99 | ||
|
|
87ce683b39 | ||
|
|
936d56f9af | ||
|
|
d71ddcf69e | ||
|
|
3af2746fea | ||
|
|
5b6d7142fb | ||
|
|
7aa9301038 | ||
|
|
627831dfaf | ||
|
|
db8a6f8583 | ||
|
|
014010acbd | ||
|
|
9b90ed04e5 | ||
|
|
0f27d0bf4a | ||
|
|
1fa55f96f8 | ||
|
|
2d60115ec6 | ||
|
|
3b81480091 | ||
|
|
255acfa8c0 | ||
|
|
4617cc4e0a | ||
|
|
b9e8cfb291 | ||
|
|
7da1671b06 | ||
|
|
6c5f7eabff | ||
|
|
f448f488ba | ||
|
|
20b5d5a755 | ||
|
|
bb38a3a8ac | ||
|
|
d0d1fb2da7 | ||
|
|
d82be09ed4 | ||
|
|
110627e16e | ||
|
|
b77ef7304a | ||
|
|
16a0b7f44e | ||
|
|
4fdbb9c0e2 | ||
|
|
c32a988838 | ||
|
|
927c9d3480 | ||
|
|
bf776d33b2 | ||
|
|
279539265b | ||
|
|
4acad77437 | ||
|
|
0c5b7401b9 | ||
|
|
ce739fd9b6 | ||
|
|
11d9014be0 | ||
|
|
c9dcb1c11b | ||
|
|
ef7f32a28d | ||
|
|
4f5cf5797f | ||
|
|
4c5485ad04 | ||
|
|
5ad96dedfa | ||
|
|
0c18fe35e5 | ||
|
|
6a23ad96ca | ||
|
|
def0384608 | ||
|
|
a4d12694da | ||
|
|
2278e3f06f | ||
|
|
0144a0bb1f | ||
|
|
7cc8f91bf9 | ||
|
|
d58157ca9e | ||
|
|
f401ffb08c | ||
|
|
8f7b831b94 | ||
|
|
9ed6b591a5 | ||
|
|
98ea067285 | ||
|
|
7e507dd378 | ||
|
|
8e87223c40 | ||
|
|
0cce4d1b81 | ||
|
|
46dcc91510 | ||
|
|
b1a2af9fd3 | ||
|
|
5d58cdd98e | ||
|
|
a8aebbce9a | ||
|
|
f1244c182a | ||
|
|
560eeac457 | ||
|
|
d33080d79e | ||
|
|
25f02c5b38 | ||
|
|
cb01af9f92 | ||
|
|
9a6ebb0848 | ||
|
|
fd30dd0aee | ||
|
|
4a5e261709 | ||
|
|
2842f55460 | ||
|
|
7573a74cb0 | ||
|
|
636b484d9d | ||
|
|
a979f884f9 | ||
|
|
990ea78dec | ||
|
|
ee6db3bd23 | ||
|
|
ae5606aa2f | ||
|
|
7f9f106729 | ||
|
|
44c63ce6f1 | ||
|
|
cbf7ca6a9a | ||
|
|
eb892df65a | ||
|
|
24b5886d88 | ||
|
|
d5e902a170 | ||
|
|
d907e4c10b | ||
|
|
c4be3c4de2 | ||
|
|
626591f832 | ||
|
|
2bd3196183 | ||
|
|
fd93cf375d | ||
|
|
6bf8b84d26 | ||
|
|
c72fea57a1 | ||
|
|
17dad7d8ae | ||
|
|
14664719d9 | ||
|
|
b14cd1e14b | ||
|
|
fd38d9788d | ||
|
|
0b3b641328 | ||
|
|
6ef77f8243 | ||
|
|
3a27143012 | ||
|
|
9a6c642bdf | ||
|
|
38b8d0b018 | ||
|
|
4d3443dbf5 | ||
|
|
4f99e54402 | ||
|
|
d6615e3d44 | ||
|
|
9c23331ead | ||
|
|
5fb2802bf4 | ||
|
|
b4864e6a8a | ||
|
|
04c34877f4 | ||
|
|
bdeb61fafc | ||
|
|
76d4257f51 | ||
|
|
c6c7e7eae1 | ||
|
|
07557e27b0 | ||
|
|
f211da60e0 | ||
|
|
64b74d00f7 | ||
|
|
96cb645644 | ||
|
|
9b0db3bd51 | ||
|
|
ffdefd1e0f | ||
|
|
59ad0268a9 | ||
|
|
f28851e76f | ||
|
|
4f5c1d544b | ||
|
|
a8ccf1c6fc | ||
|
|
e3f7e5706b | ||
|
|
7ad1e756e7 | ||
|
|
8868f214f3 | ||
|
|
3ecff19a45 | ||
|
|
74421db747 | ||
|
|
1cccfac3dc | ||
|
|
c254548a64 | ||
|
|
7f8b782e95 | ||
|
|
cd518d4a46 | ||
|
|
c5db07e84d | ||
|
|
d1e0225520 | ||
|
|
d439bb68eb | ||
|
|
980dbf364d | ||
|
|
842e7ce171 | ||
|
|
8afec8ada9 | ||
|
|
7b699f7733 | ||
|
|
d448ef9f16 | ||
|
|
03912a1704 | ||
|
|
54c20d5d5a | ||
|
|
2dbf24e798 | ||
|
|
791654a420 | ||
|
|
5fe07e49e4 | ||
|
|
0bd287788c | ||
|
|
40e0c0f98d | ||
|
|
85b608912b | ||
|
|
987753dd1c | ||
|
|
5df05fb6dd | ||
|
|
f295ca27af | ||
|
|
8f75cc6a33 | ||
|
|
19c71f0f49 | ||
|
|
22c2028c00 | ||
|
|
39f687e3a3 | ||
|
|
6692b9b71f | ||
|
|
2f5787e7be | ||
|
|
bbda1761bf | ||
|
|
ecc10e9793 | ||
|
|
9e1e889fd7 | ||
|
|
eefe1e6f0f | ||
|
|
397ed87f2d | ||
|
|
15830f383e | ||
|
|
87395efc6e | ||
|
|
27d79bb10a | ||
|
|
7427db70aa | ||
|
|
77d5bffa85 | ||
|
|
ab7c7b8d89 | ||
|
|
93b8cc38d8 | ||
|
|
e5f95b3aff | ||
|
|
613728ad3b | ||
|
|
cb1bfe6ebe | ||
|
|
434179ab3f | ||
|
|
eb53277fcc | ||
|
|
850ddb3667 | ||
|
|
5a727a4fa3 | ||
|
|
33fc700952 | ||
|
|
ad493e077e | ||
|
|
a2b2f6f20a | ||
|
|
ee57fd413a | ||
|
|
f5d585e0f0 | ||
|
|
1899388f35 | ||
|
|
4d833e9b1c | ||
|
|
6d827cd412 | ||
|
|
ebfbea39ff | ||
|
|
89a40f1c48 | ||
|
|
664eb7af10 | ||
|
|
33b99b6627 | ||
|
|
0cf2ee0bcb | ||
|
|
85a86c3f11 | ||
|
|
de4a5fa30b | ||
|
|
43ac550ca0 | ||
|
|
c3c4d224b2 | ||
|
|
6f865beacd | ||
|
|
de25195383 | ||
|
|
0139d2cabf | ||
|
|
17542614b5 | ||
|
|
885367e690 | ||
|
|
f8c44aad25 | ||
|
|
2323cc2869 | ||
|
|
7f0249bbf7 | ||
|
|
7a23b778a4 | ||
|
|
d910924032 | ||
|
|
0b93a8c2f2 | ||
|
|
5e377b89fc | ||
|
|
dd85a1e5f0 | ||
|
|
b96a7aebcd | ||
|
|
3cfcf382da | ||
|
|
ed9fd2c643 | ||
|
|
a007e8dc26 | ||
|
|
b318644998 | ||
|
|
0434eea3ab | ||
|
|
c19b984660 | ||
|
|
0d6bb8a325 | ||
|
|
094b969301 | ||
|
|
ddef6fdb98 | ||
|
|
cabf7860b3 | ||
|
|
0c0a2403e5 | ||
|
|
be6c3d8bbd | ||
|
|
c01536ee58 | ||
|
|
a9f36a50e4 | ||
|
|
6d11c0395f | ||
|
|
66bb638dd0 | ||
|
|
0d72bfef70 | ||
|
|
6e44552d41 | ||
|
|
9ec02633b3 | ||
|
|
5d340332bf | ||
|
|
1e973c1d74 | ||
|
|
618ada64f8 | ||
|
|
2d6802e06a | ||
|
|
9687a34a70 | ||
|
|
5ba0ceb6c2 | ||
|
|
d8e3e88c63 | ||
|
|
d1d1bca29d | ||
|
|
80189495c5 | ||
|
|
cad6c72cfa | ||
|
|
23ac22e213 | ||
|
|
55e664fc0d | ||
|
|
881ce45afa | ||
|
|
b80195df81 | ||
|
|
e57ce0a9df | ||
|
|
ff66ad7705 | ||
|
|
33e98ebffa | ||
|
|
8fd9e2046e | ||
|
|
32c2f47ab5 | ||
|
|
e2fc2dce84 | ||
|
|
afa97f8ec1 | ||
|
|
2708c1c94c | ||
|
|
d76ed6a3c2 | ||
|
|
695f69bd90 | ||
|
|
7da8e24e21 | ||
|
|
9d0fc0d513 | ||
|
|
ca567aa7fc | ||
|
|
27af2d8ec6 | ||
|
|
59ea6f375a | ||
|
|
6c365c94ed | ||
|
|
6693fc764f | ||
|
|
e855b6c2bc | ||
|
|
23a1dddc23 | ||
|
|
bd5fef1ddb | ||
|
|
c3ade400fb | ||
|
|
1889f0ef66 | ||
|
|
6b28af8282 | ||
|
|
f59001d45f | ||
|
|
a857461059 | ||
|
|
e4cc842584 | ||
|
|
bb52058920 | ||
|
|
c1676570da | ||
|
|
4858b2171e | ||
|
|
192aa76cd7 | ||
|
|
ddf611bfdf | ||
|
|
3164394982 | ||
|
|
b250a03ff5 | ||
|
|
2dd7f035f6 | ||
|
|
2c08b3f30c | ||
|
|
c3ec30ce3b | ||
|
|
9d4375ca76 | ||
|
|
3870b87db9 | ||
|
|
ff2fd7e9ef | ||
|
|
719dd09eb3 | ||
|
|
2cf2613dbd | ||
|
|
181a3d142e | ||
|
|
c20ad5fde1 | ||
|
|
4fcebf18dc | ||
|
|
a6e04be076 | ||
|
|
330a8e197d | ||
|
|
4300e846e6 | ||
|
|
07fd1f99df | ||
|
|
481639bcf9 | ||
|
|
376008940b | ||
|
|
b2c2db3394 | ||
|
|
a636e38d24 | ||
|
|
ae1294830c | ||
|
|
d87fdf028b | ||
|
|
6f5d5d4cdb | ||
|
|
12fdd7034a | ||
|
|
f295d72cd9 | ||
|
|
2605fda185 | ||
|
|
2189dc3e2a | ||
|
|
8364d8a2e3 | ||
|
|
96c9636086 | ||
|
|
7b1dfc35d1 | ||
|
|
2e94730491 | ||
|
|
11c6998bf2 | ||
|
|
055a024d10 | ||
|
|
f73afd71fd | ||
|
|
ec64194ab9 | ||
|
|
d49a613c62 | ||
|
|
6fc064fa6a | ||
|
|
b36b591ccf | ||
|
|
d25ba79427 | ||
|
|
df35f30321 | ||
|
|
1e3d06a993 | ||
|
|
2ee6bf7340 | ||
|
|
13a8e5e021 |
8
.github/workflows/builder.yml
vendored
8
.github/workflows/builder.yml
vendored
@@ -94,7 +94,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@v9
|
||||
uses: dawidd6/action-download-artifact@v10
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: home-assistant/frontend
|
||||
@@ -105,7 +105,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of intents
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@v9
|
||||
uses: dawidd6/action-download-artifact@v10
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: home-assistant/intents-package
|
||||
@@ -509,7 +509,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build Docker image
|
||||
uses: docker/build-push-action@1dc73863535b631f98b2378be8619f83b136f4a0 # v6.17.0
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
@@ -522,7 +522,7 @@ jobs:
|
||||
- name: Push Docker image
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
id: push
|
||||
uses: docker/build-push-action@1dc73863535b631f98b2378be8619f83b136f4a0 # v6.17.0
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
|
||||
4
.github/workflows/ci.yaml
vendored
4
.github/workflows/ci.yaml
vendored
@@ -40,7 +40,7 @@ env:
|
||||
CACHE_VERSION: 2
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2025.6"
|
||||
HA_SHORT_VERSION: "2025.7"
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
ALL_PYTHON_VERSIONS: "['3.13']"
|
||||
# 10.3 is the oldest supported version
|
||||
@@ -360,7 +360,7 @@ jobs:
|
||||
- name: Run ruff
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual ruff --all-files --show-diff-on-failure
|
||||
pre-commit run --hook-stage manual ruff-check --all-files --show-diff-on-failure
|
||||
env:
|
||||
RUFF_OUTPUT_FORMAT: github
|
||||
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.28.18
|
||||
uses: github/codeql-action/init@v3.28.19
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.28.18
|
||||
uses: github/codeql-action/analyze@v3.28.19
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
374
.github/workflows/detect-duplicate-issues.yml
vendored
Normal file
374
.github/workflows/detect-duplicate-issues.yml
vendored
Normal file
@@ -0,0 +1,374 @@
|
||||
name: Auto-detect duplicate issues
|
||||
|
||||
# yamllint disable-line rule:truthy
|
||||
on:
|
||||
issues:
|
||||
types: [labeled]
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
models: read
|
||||
|
||||
jobs:
|
||||
detect-duplicates:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Check if integration label was added and extract details
|
||||
id: extract
|
||||
uses: actions/github-script@v7.0.1
|
||||
with:
|
||||
script: |
|
||||
// Debug: Log the event payload
|
||||
console.log('Event name:', context.eventName);
|
||||
console.log('Event action:', context.payload.action);
|
||||
console.log('Event payload keys:', Object.keys(context.payload));
|
||||
|
||||
// Check the specific label that was added
|
||||
const addedLabel = context.payload.label;
|
||||
if (!addedLabel) {
|
||||
console.log('No label found in labeled event payload');
|
||||
core.setOutput('should_continue', 'false');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Label added: ${addedLabel.name}`);
|
||||
|
||||
if (!addedLabel.name.startsWith('integration:')) {
|
||||
console.log('Added label is not an integration label, skipping duplicate detection');
|
||||
core.setOutput('should_continue', 'false');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Integration label added: ${addedLabel.name}`);
|
||||
|
||||
let currentIssue;
|
||||
let integrationLabels = [];
|
||||
|
||||
try {
|
||||
const issue = await github.rest.issues.get({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.payload.issue.number
|
||||
});
|
||||
|
||||
currentIssue = issue.data;
|
||||
|
||||
// Check if potential-duplicate label already exists
|
||||
const hasPotentialDuplicateLabel = currentIssue.labels
|
||||
.some(label => label.name === 'potential-duplicate');
|
||||
|
||||
if (hasPotentialDuplicateLabel) {
|
||||
console.log('Issue already has potential-duplicate label, skipping duplicate detection');
|
||||
core.setOutput('should_continue', 'false');
|
||||
return;
|
||||
}
|
||||
|
||||
integrationLabels = currentIssue.labels
|
||||
.filter(label => label.name.startsWith('integration:'))
|
||||
.map(label => label.name);
|
||||
} catch (error) {
|
||||
core.error(`Failed to fetch issue #${context.payload.issue.number}:`, error.message);
|
||||
core.setOutput('should_continue', 'false');
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if we've already posted a duplicate detection comment recently
|
||||
let comments;
|
||||
try {
|
||||
comments = await github.rest.issues.listComments({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.payload.issue.number,
|
||||
per_page: 10
|
||||
});
|
||||
} catch (error) {
|
||||
core.error('Failed to fetch comments:', error.message);
|
||||
// Continue anyway, worst case we might post a duplicate comment
|
||||
comments = { data: [] };
|
||||
}
|
||||
|
||||
// Check if we've already posted a duplicate detection comment
|
||||
const recentDuplicateComment = comments.data.find(comment =>
|
||||
comment.user && comment.user.login === 'github-actions[bot]' &&
|
||||
comment.body.includes('<!-- workflow: detect-duplicate-issues -->')
|
||||
);
|
||||
|
||||
if (recentDuplicateComment) {
|
||||
console.log('Already posted duplicate detection comment, skipping');
|
||||
core.setOutput('should_continue', 'false');
|
||||
return;
|
||||
}
|
||||
|
||||
core.setOutput('should_continue', 'true');
|
||||
core.setOutput('current_number', currentIssue.number);
|
||||
core.setOutput('current_title', currentIssue.title);
|
||||
core.setOutput('current_body', currentIssue.body);
|
||||
core.setOutput('current_url', currentIssue.html_url);
|
||||
core.setOutput('integration_labels', JSON.stringify(integrationLabels));
|
||||
|
||||
console.log(`Current issue: #${currentIssue.number}`);
|
||||
console.log(`Integration labels: ${integrationLabels.join(', ')}`);
|
||||
|
||||
- name: Fetch similar issues
|
||||
id: fetch_similar
|
||||
if: steps.extract.outputs.should_continue == 'true'
|
||||
uses: actions/github-script@v7.0.1
|
||||
env:
|
||||
INTEGRATION_LABELS: ${{ steps.extract.outputs.integration_labels }}
|
||||
CURRENT_NUMBER: ${{ steps.extract.outputs.current_number }}
|
||||
with:
|
||||
script: |
|
||||
const integrationLabels = JSON.parse(process.env.INTEGRATION_LABELS);
|
||||
const currentNumber = parseInt(process.env.CURRENT_NUMBER);
|
||||
|
||||
if (integrationLabels.length === 0) {
|
||||
console.log('No integration labels found, skipping duplicate detection');
|
||||
core.setOutput('has_similar', 'false');
|
||||
return;
|
||||
}
|
||||
|
||||
// Use GitHub search API to find issues with matching integration labels
|
||||
console.log(`Searching for issues with integration labels: ${integrationLabels.join(', ')}`);
|
||||
|
||||
// Build search query for issues with any of the current integration labels
|
||||
const labelQueries = integrationLabels.map(label => `label:"${label}"`);
|
||||
let searchQuery;
|
||||
|
||||
if (labelQueries.length === 1) {
|
||||
searchQuery = `repo:${context.repo.owner}/${context.repo.repo} is:issue ${labelQueries[0]}`;
|
||||
} else {
|
||||
searchQuery = `repo:${context.repo.owner}/${context.repo.repo} is:issue (${labelQueries.join(' OR ')})`;
|
||||
}
|
||||
|
||||
console.log(`Search query: ${searchQuery}`);
|
||||
|
||||
let result;
|
||||
try {
|
||||
result = await github.rest.search.issuesAndPullRequests({
|
||||
q: searchQuery,
|
||||
per_page: 15,
|
||||
sort: 'updated',
|
||||
order: 'desc'
|
||||
});
|
||||
} catch (error) {
|
||||
core.error('Failed to search for similar issues:', error.message);
|
||||
if (error.status === 403 && error.message.includes('rate limit')) {
|
||||
core.error('GitHub API rate limit exceeded');
|
||||
}
|
||||
core.setOutput('has_similar', 'false');
|
||||
return;
|
||||
}
|
||||
|
||||
// Filter out the current issue, pull requests, and newer issues (higher numbers)
|
||||
const similarIssues = result.data.items
|
||||
.filter(item =>
|
||||
item.number !== currentNumber &&
|
||||
!item.pull_request &&
|
||||
item.number < currentNumber // Only include older issues (lower numbers)
|
||||
)
|
||||
.map(item => ({
|
||||
number: item.number,
|
||||
title: item.title,
|
||||
body: item.body,
|
||||
url: item.html_url,
|
||||
state: item.state,
|
||||
createdAt: item.created_at,
|
||||
updatedAt: item.updated_at,
|
||||
comments: item.comments,
|
||||
labels: item.labels.map(l => l.name)
|
||||
}));
|
||||
|
||||
console.log(`Found ${similarIssues.length} issues with matching integration labels`);
|
||||
console.log('Raw similar issues:', JSON.stringify(similarIssues.slice(0, 3), null, 2));
|
||||
|
||||
if (similarIssues.length === 0) {
|
||||
console.log('No similar issues found, setting has_similar to false');
|
||||
core.setOutput('has_similar', 'false');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('Similar issues found, setting has_similar to true');
|
||||
core.setOutput('has_similar', 'true');
|
||||
|
||||
// Clean the issue data to prevent JSON parsing issues
|
||||
const cleanedIssues = similarIssues.slice(0, 15).map(item => {
|
||||
// Handle body with improved truncation and null handling
|
||||
let cleanBody = '';
|
||||
if (item.body && typeof item.body === 'string') {
|
||||
// Remove control characters
|
||||
const cleaned = item.body.replace(/[\u0000-\u001F\u007F-\u009F]/g, '');
|
||||
// Truncate to 1000 characters and add ellipsis if needed
|
||||
cleanBody = cleaned.length > 1000
|
||||
? cleaned.substring(0, 1000) + '...'
|
||||
: cleaned;
|
||||
}
|
||||
|
||||
return {
|
||||
number: item.number,
|
||||
title: item.title.replace(/[\u0000-\u001F\u007F-\u009F]/g, ''), // Remove control characters
|
||||
body: cleanBody,
|
||||
url: item.url,
|
||||
state: item.state,
|
||||
createdAt: item.createdAt,
|
||||
updatedAt: item.updatedAt,
|
||||
comments: item.comments,
|
||||
labels: item.labels
|
||||
};
|
||||
});
|
||||
|
||||
console.log(`Cleaned issues count: ${cleanedIssues.length}`);
|
||||
console.log('First cleaned issue:', JSON.stringify(cleanedIssues[0], null, 2));
|
||||
|
||||
core.setOutput('similar_issues', JSON.stringify(cleanedIssues));
|
||||
|
||||
- name: Detect duplicates using AI
|
||||
id: ai_detection
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/ai-inference@v1.1.0
|
||||
with:
|
||||
model: openai/gpt-4o-mini
|
||||
system-prompt: |
|
||||
You are a Home Assistant issue duplicate detector. Your task is to identify potential duplicate issues based on their content.
|
||||
|
||||
Important considerations:
|
||||
- Open issues are more relevant than closed ones for duplicate detection
|
||||
- Recently updated issues may indicate ongoing work or discussion
|
||||
- Issues with more comments are generally more relevant and active
|
||||
- Higher comment count often indicates community engagement and importance
|
||||
- Older closed issues might be resolved differently than newer approaches
|
||||
- Consider the time between issues - very old issues may have different contexts
|
||||
|
||||
Rules:
|
||||
1. Compare the current issue with the provided similar issues
|
||||
2. Look for issues that report the same problem or request the same functionality
|
||||
3. Consider different wording but same underlying issue as duplicates
|
||||
4. For CLOSED issues, only mark as duplicate if they describe the EXACT same problem
|
||||
5. For OPEN issues, use a lower threshold (70%+ similarity)
|
||||
6. Prioritize issues with higher comment counts as they indicate more activity/relevance
|
||||
7. Return ONLY a JSON array of issue numbers that are potential duplicates
|
||||
8. If no duplicates are found, return an empty array: []
|
||||
9. Maximum 5 potential duplicates, prioritize open issues with comments
|
||||
10. Consider the age of issues - prefer recent duplicates over very old ones
|
||||
|
||||
Example response format:
|
||||
[1234, 5678, 9012]
|
||||
|
||||
prompt: |
|
||||
Current issue (just created):
|
||||
Title: ${{ steps.extract.outputs.current_title }}
|
||||
Body: ${{ steps.extract.outputs.current_body }}
|
||||
|
||||
Similar issues to compare against (each includes state, creation date, last update, and comment count):
|
||||
${{ steps.fetch_similar.outputs.similar_issues }}
|
||||
|
||||
Analyze these issues and identify which ones are potential duplicates of the current issue. Consider their state (open/closed), how recently they were updated, and their comment count (higher = more relevant).
|
||||
|
||||
max-tokens: 100
|
||||
|
||||
- name: Post duplicate detection results
|
||||
id: post_results
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/github-script@v7.0.1
|
||||
env:
|
||||
AI_RESPONSE: ${{ steps.ai_detection.outputs.response }}
|
||||
SIMILAR_ISSUES: ${{ steps.fetch_similar.outputs.similar_issues }}
|
||||
with:
|
||||
script: |
|
||||
const aiResponse = process.env.AI_RESPONSE;
|
||||
|
||||
console.log('Raw AI response:', JSON.stringify(aiResponse));
|
||||
|
||||
let duplicateNumbers = [];
|
||||
try {
|
||||
// Clean the response of any potential control characters
|
||||
const cleanResponse = aiResponse.trim().replace(/[\u0000-\u001F\u007F-\u009F]/g, '');
|
||||
console.log('Cleaned AI response:', cleanResponse);
|
||||
|
||||
duplicateNumbers = JSON.parse(cleanResponse);
|
||||
|
||||
// Ensure it's an array and contains only numbers
|
||||
if (!Array.isArray(duplicateNumbers)) {
|
||||
console.log('AI response is not an array, trying to extract numbers');
|
||||
const numberMatches = cleanResponse.match(/\d+/g);
|
||||
duplicateNumbers = numberMatches ? numberMatches.map(n => parseInt(n)) : [];
|
||||
}
|
||||
|
||||
// Filter to only valid numbers
|
||||
duplicateNumbers = duplicateNumbers.filter(n => typeof n === 'number' && !isNaN(n));
|
||||
|
||||
} catch (error) {
|
||||
console.log('Failed to parse AI response as JSON:', error.message);
|
||||
console.log('Raw response:', aiResponse);
|
||||
|
||||
// Fallback: try to extract numbers from the response
|
||||
const numberMatches = aiResponse.match(/\d+/g);
|
||||
duplicateNumbers = numberMatches ? numberMatches.map(n => parseInt(n)) : [];
|
||||
console.log('Extracted numbers as fallback:', duplicateNumbers);
|
||||
}
|
||||
|
||||
if (!Array.isArray(duplicateNumbers) || duplicateNumbers.length === 0) {
|
||||
console.log('No duplicates detected by AI');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`AI detected ${duplicateNumbers.length} potential duplicates: ${duplicateNumbers.join(', ')}`);
|
||||
|
||||
// Get details of detected duplicates
|
||||
const similarIssues = JSON.parse(process.env.SIMILAR_ISSUES);
|
||||
const duplicates = similarIssues.filter(issue => duplicateNumbers.includes(issue.number));
|
||||
|
||||
if (duplicates.length === 0) {
|
||||
console.log('No matching issues found for detected numbers');
|
||||
return;
|
||||
}
|
||||
|
||||
// Create comment with duplicate detection results
|
||||
const duplicateLinks = duplicates.map(issue => `- [#${issue.number}: ${issue.title}](${issue.url})`).join('\n');
|
||||
|
||||
const commentBody = [
|
||||
'<!-- workflow: detect-duplicate-issues -->',
|
||||
'### 🔍 **Potential duplicate detection**',
|
||||
'',
|
||||
'I\'ve analyzed similar issues and found the following potential duplicates:',
|
||||
'',
|
||||
duplicateLinks,
|
||||
'',
|
||||
'**What to do next:**',
|
||||
'1. Please review these issues to see if they match your issue',
|
||||
'2. If you find an existing issue that covers your problem:',
|
||||
' - Consider closing this issue',
|
||||
' - Add your findings or 👍 on the existing issue instead',
|
||||
'3. If your issue is different or adds new aspects, please clarify how it differs',
|
||||
'',
|
||||
'This helps keep our issues organized and ensures similar issues are consolidated for better visibility.',
|
||||
'',
|
||||
'*This message was generated automatically by our duplicate detection system.*'
|
||||
].join('\n');
|
||||
|
||||
try {
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.payload.issue.number,
|
||||
body: commentBody
|
||||
});
|
||||
|
||||
console.log(`Posted duplicate detection comment with ${duplicates.length} potential duplicates`);
|
||||
|
||||
// Add the potential-duplicate label
|
||||
await github.rest.issues.addLabels({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.payload.issue.number,
|
||||
labels: ['potential-duplicate']
|
||||
});
|
||||
|
||||
console.log('Added potential-duplicate label to the issue');
|
||||
} catch (error) {
|
||||
core.error('Failed to post duplicate detection comment or add label:', error.message);
|
||||
if (error.status === 403) {
|
||||
core.error('Permission denied or rate limit exceeded');
|
||||
}
|
||||
// Don't throw - we've done the analysis, just couldn't post the result
|
||||
}
|
||||
184
.github/workflows/detect-non-english-issues.yml
vendored
Normal file
184
.github/workflows/detect-non-english-issues.yml
vendored
Normal file
@@ -0,0 +1,184 @@
|
||||
name: Auto-detect non-English issues
|
||||
|
||||
# yamllint disable-line rule:truthy
|
||||
on:
|
||||
issues:
|
||||
types: [opened]
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
models: read
|
||||
|
||||
jobs:
|
||||
detect-language:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Check issue language
|
||||
id: detect_language
|
||||
uses: actions/github-script@v7.0.1
|
||||
env:
|
||||
ISSUE_NUMBER: ${{ github.event.issue.number }}
|
||||
ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||
ISSUE_BODY: ${{ github.event.issue.body }}
|
||||
ISSUE_USER_TYPE: ${{ github.event.issue.user.type }}
|
||||
with:
|
||||
script: |
|
||||
// Get the issue details from environment variables
|
||||
const issueNumber = process.env.ISSUE_NUMBER;
|
||||
const issueTitle = process.env.ISSUE_TITLE || '';
|
||||
const issueBody = process.env.ISSUE_BODY || '';
|
||||
const userType = process.env.ISSUE_USER_TYPE;
|
||||
|
||||
// Skip language detection for bot users
|
||||
if (userType === 'Bot') {
|
||||
console.log('Skipping language detection for bot user');
|
||||
core.setOutput('should_continue', 'false');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Checking language for issue #${issueNumber}`);
|
||||
console.log(`Title: ${issueTitle}`);
|
||||
|
||||
// Combine title and body for language detection
|
||||
const fullText = `${issueTitle}\n\n${issueBody}`;
|
||||
|
||||
// Check if the text is too short to reliably detect language
|
||||
if (fullText.trim().length < 20) {
|
||||
console.log('Text too short for reliable language detection');
|
||||
core.setOutput('should_continue', 'false'); // Skip processing for very short text
|
||||
return;
|
||||
}
|
||||
|
||||
core.setOutput('issue_number', issueNumber);
|
||||
core.setOutput('issue_text', fullText);
|
||||
core.setOutput('should_continue', 'true');
|
||||
|
||||
- name: Detect language using AI
|
||||
id: ai_language_detection
|
||||
if: steps.detect_language.outputs.should_continue == 'true'
|
||||
uses: actions/ai-inference@v1.1.0
|
||||
with:
|
||||
model: openai/gpt-4o-mini
|
||||
system-prompt: |
|
||||
You are a language detection system. Your task is to determine if the provided text is written in English or another language.
|
||||
|
||||
Rules:
|
||||
1. Analyze the text and determine the primary language
|
||||
2. IGNORE markdown headers (lines starting with #, ##, ###, etc.) as these are from issue templates, not user input
|
||||
3. IGNORE all code blocks (text between ``` or ` markers) as they may contain system-generated error messages in other languages
|
||||
4. Consider technical terms, code snippets, and URLs as neutral (they don't indicate non-English)
|
||||
5. Focus on the actual sentences and descriptions written by the user
|
||||
6. Return ONLY a JSON object with two fields:
|
||||
- "is_english": boolean (true if the text is primarily in English, false otherwise)
|
||||
- "detected_language": string (the name of the detected language, e.g., "English", "Spanish", "Chinese", etc.)
|
||||
7. Be lenient - if the text is mostly English with minor non-English elements, consider it English
|
||||
8. Common programming terms, error messages, and technical jargon should not be considered as non-English
|
||||
|
||||
Example response:
|
||||
{"is_english": false, "detected_language": "Spanish"}
|
||||
|
||||
prompt: |
|
||||
Please analyze the following issue text and determine if it is written in English:
|
||||
|
||||
${{ steps.detect_language.outputs.issue_text }}
|
||||
|
||||
max-tokens: 50
|
||||
|
||||
- name: Process non-English issues
|
||||
if: steps.detect_language.outputs.should_continue == 'true'
|
||||
uses: actions/github-script@v7.0.1
|
||||
env:
|
||||
AI_RESPONSE: ${{ steps.ai_language_detection.outputs.response }}
|
||||
ISSUE_NUMBER: ${{ steps.detect_language.outputs.issue_number }}
|
||||
with:
|
||||
script: |
|
||||
const issueNumber = parseInt(process.env.ISSUE_NUMBER);
|
||||
const aiResponse = process.env.AI_RESPONSE;
|
||||
|
||||
console.log('AI language detection response:', aiResponse);
|
||||
|
||||
let languageResult;
|
||||
try {
|
||||
languageResult = JSON.parse(aiResponse.trim());
|
||||
|
||||
// Validate the response structure
|
||||
if (!languageResult || typeof languageResult.is_english !== 'boolean') {
|
||||
throw new Error('Invalid response structure');
|
||||
}
|
||||
} catch (error) {
|
||||
core.error(`Failed to parse AI response: ${error.message}`);
|
||||
console.log('Raw AI response:', aiResponse);
|
||||
|
||||
// Log more details for debugging
|
||||
core.warning('Defaulting to English due to parsing error');
|
||||
|
||||
// Default to English if we can't parse the response
|
||||
return;
|
||||
}
|
||||
|
||||
if (languageResult.is_english) {
|
||||
console.log('Issue is in English, no action needed');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Issue detected as non-English: ${languageResult.detected_language}`);
|
||||
|
||||
// Post comment explaining the language requirement
|
||||
const commentBody = [
|
||||
'<!-- workflow: detect-non-english-issues -->',
|
||||
'### 🌐 Non-English issue detected',
|
||||
'',
|
||||
`This issue appears to be written in **${languageResult.detected_language}** rather than English.`,
|
||||
'',
|
||||
'The Home Assistant project uses English as the primary language for issues to ensure that everyone in our international community can participate and help resolve issues. This allows any of our thousands of contributors to jump in and provide assistance.',
|
||||
'',
|
||||
'**What to do:**',
|
||||
'1. Re-create the issue using the English language',
|
||||
'2. If you need help with translation, consider using:',
|
||||
' - Translation tools like Google Translate',
|
||||
' - AI assistants like ChatGPT or Claude',
|
||||
'',
|
||||
'This helps our community provide the best possible support and ensures your issue gets the attention it deserves from our global contributor base.',
|
||||
'',
|
||||
'Thank you for your understanding! 🙏'
|
||||
].join('\n');
|
||||
|
||||
try {
|
||||
// Add comment
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: issueNumber,
|
||||
body: commentBody
|
||||
});
|
||||
|
||||
console.log('Posted language requirement comment');
|
||||
|
||||
// Add non-english label
|
||||
await github.rest.issues.addLabels({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: issueNumber,
|
||||
labels: ['non-english']
|
||||
});
|
||||
|
||||
console.log('Added non-english label');
|
||||
|
||||
// Close the issue
|
||||
await github.rest.issues.update({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: issueNumber,
|
||||
state: 'closed',
|
||||
state_reason: 'not_planned'
|
||||
});
|
||||
|
||||
console.log('Closed the issue');
|
||||
|
||||
} catch (error) {
|
||||
core.error('Failed to process non-English issue:', error.message);
|
||||
if (error.status === 403) {
|
||||
core.error('Permission denied or rate limit exceeded');
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,8 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.11.0
|
||||
rev: v0.11.12
|
||||
hooks:
|
||||
- id: ruff
|
||||
- id: ruff-check
|
||||
args:
|
||||
- --fix
|
||||
- id: ruff-format
|
||||
@@ -30,7 +30,7 @@ repos:
|
||||
- --branch=master
|
||||
- --branch=rc
|
||||
- repo: https://github.com/adrienverge/yamllint.git
|
||||
rev: v1.35.1
|
||||
rev: v1.37.1
|
||||
hooks:
|
||||
- id: yamllint
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
|
||||
2
.vscode/tasks.json
vendored
2
.vscode/tasks.json
vendored
@@ -45,7 +45,7 @@
|
||||
{
|
||||
"label": "Ruff",
|
||||
"type": "shell",
|
||||
"command": "pre-commit run ruff --all-files",
|
||||
"command": "pre-commit run ruff-check --all-files",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
"""Enum backports from standard lib.
|
||||
|
||||
This file contained the backport of the StrEnum of Python 3.11.
|
||||
|
||||
Since we have dropped support for Python 3.10, we can remove this backport.
|
||||
This file is kept for now to avoid breaking custom components that might
|
||||
import it.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import StrEnum as _StrEnum
|
||||
from functools import partial
|
||||
|
||||
from homeassistant.helpers.deprecation import (
|
||||
DeprecatedAlias,
|
||||
all_with_deprecated_constants,
|
||||
check_if_deprecated_constant,
|
||||
dir_with_deprecated_constants,
|
||||
)
|
||||
|
||||
# StrEnum deprecated as of 2024.5 use enum.StrEnum instead.
|
||||
_DEPRECATED_StrEnum = DeprecatedAlias(_StrEnum, "enum.StrEnum", "2025.5")
|
||||
|
||||
__getattr__ = partial(check_if_deprecated_constant, module_globals=globals())
|
||||
__dir__ = partial(
|
||||
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
|
||||
)
|
||||
__all__ = all_with_deprecated_constants(globals())
|
||||
@@ -1,31 +0,0 @@
|
||||
"""Functools backports from standard lib.
|
||||
|
||||
This file contained the backport of the cached_property implementation of Python 3.12.
|
||||
|
||||
Since we have dropped support for Python 3.11, we can remove this backport.
|
||||
This file is kept for now to avoid breaking custom components that might
|
||||
import it.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
# pylint: disable-next=hass-deprecated-import
|
||||
from functools import cached_property as _cached_property, partial
|
||||
|
||||
from homeassistant.helpers.deprecation import (
|
||||
DeprecatedAlias,
|
||||
all_with_deprecated_constants,
|
||||
check_if_deprecated_constant,
|
||||
dir_with_deprecated_constants,
|
||||
)
|
||||
|
||||
# cached_property deprecated as of 2024.5 use functools.cached_property instead.
|
||||
_DEPRECATED_cached_property = DeprecatedAlias(
|
||||
_cached_property, "functools.cached_property", "2025.5"
|
||||
)
|
||||
|
||||
__getattr__ = partial(check_if_deprecated_constant, module_globals=globals())
|
||||
__dir__ = partial(
|
||||
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
|
||||
)
|
||||
__all__ = all_with_deprecated_constants(globals())
|
||||
@@ -171,8 +171,6 @@ FRONTEND_INTEGRATIONS = {
|
||||
# Stage 0 is divided into substages. Each substage has a name, a set of integrations and a timeout.
|
||||
# The substage containing recorder should have no timeout, as it could cancel a database migration.
|
||||
# Recorder freezes "recorder" timeout during a migration, but it does not freeze other timeouts.
|
||||
# The substages preceding it should also have no timeout, until we ensure that the recorder
|
||||
# is not accidentally promoted as a dependency of any of the integrations in them.
|
||||
# If we add timeouts to the frontend substages, we should make sure they don't apply in recovery mode.
|
||||
STAGE_0_INTEGRATIONS = (
|
||||
# Load logging and http deps as soon as possible
|
||||
@@ -929,7 +927,11 @@ async def _async_set_up_integrations(
|
||||
await _async_setup_multi_components(hass, stage_all_domains, config)
|
||||
continue
|
||||
try:
|
||||
async with hass.timeout.async_timeout(timeout, cool_down=COOLDOWN_TIME):
|
||||
async with hass.timeout.async_timeout(
|
||||
timeout,
|
||||
cool_down=COOLDOWN_TIME,
|
||||
cancel_message=f"Bootstrap stage {name} timeout",
|
||||
):
|
||||
await _async_setup_multi_components(hass, stage_all_domains, config)
|
||||
except TimeoutError:
|
||||
_LOGGER.warning(
|
||||
@@ -941,7 +943,11 @@ async def _async_set_up_integrations(
|
||||
# Wrap up startup
|
||||
_LOGGER.debug("Waiting for startup to wrap up")
|
||||
try:
|
||||
async with hass.timeout.async_timeout(WRAP_UP_TIMEOUT, cool_down=COOLDOWN_TIME):
|
||||
async with hass.timeout.async_timeout(
|
||||
WRAP_UP_TIMEOUT,
|
||||
cool_down=COOLDOWN_TIME,
|
||||
cancel_message="Bootstrap startup wrap up timeout",
|
||||
):
|
||||
await hass.async_block_till_done()
|
||||
except TimeoutError:
|
||||
_LOGGER.warning(
|
||||
|
||||
@@ -14,30 +14,24 @@ from jaraco.abode.exceptions import (
|
||||
)
|
||||
from jaraco.abode.helpers.timeline import Groups as GROUPS
|
||||
from requests.exceptions import ConnectTimeout, HTTPError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
ATTR_DATE,
|
||||
ATTR_DEVICE_ID,
|
||||
ATTR_ENTITY_ID,
|
||||
ATTR_TIME,
|
||||
CONF_PASSWORD,
|
||||
CONF_USERNAME,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import CALLBACK_TYPE, Event, HomeAssistant, ServiceCall
|
||||
from homeassistant.core import CALLBACK_TYPE, Event, HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.dispatcher import dispatcher_send
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import CONF_POLLING, DOMAIN, LOGGER
|
||||
|
||||
SERVICE_SETTINGS = "change_setting"
|
||||
SERVICE_CAPTURE_IMAGE = "capture_image"
|
||||
SERVICE_TRIGGER_AUTOMATION = "trigger_automation"
|
||||
from .services import async_setup_services
|
||||
|
||||
ATTR_DEVICE_NAME = "device_name"
|
||||
ATTR_DEVICE_TYPE = "device_type"
|
||||
@@ -45,22 +39,12 @@ ATTR_EVENT_CODE = "event_code"
|
||||
ATTR_EVENT_NAME = "event_name"
|
||||
ATTR_EVENT_TYPE = "event_type"
|
||||
ATTR_EVENT_UTC = "event_utc"
|
||||
ATTR_SETTING = "setting"
|
||||
ATTR_USER_NAME = "user_name"
|
||||
ATTR_APP_TYPE = "app_type"
|
||||
ATTR_EVENT_BY = "event_by"
|
||||
ATTR_VALUE = "value"
|
||||
|
||||
CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False)
|
||||
|
||||
CHANGE_SETTING_SCHEMA = vol.Schema(
|
||||
{vol.Required(ATTR_SETTING): cv.string, vol.Required(ATTR_VALUE): cv.string}
|
||||
)
|
||||
|
||||
CAPTURE_IMAGE_SCHEMA = vol.Schema({ATTR_ENTITY_ID: cv.entity_ids})
|
||||
|
||||
AUTOMATION_SCHEMA = vol.Schema({ATTR_ENTITY_ID: cv.entity_ids})
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.ALARM_CONTROL_PANEL,
|
||||
Platform.BINARY_SENSOR,
|
||||
@@ -85,7 +69,7 @@ class AbodeSystem:
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Abode component."""
|
||||
setup_hass_services(hass)
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
@@ -138,60 +122,6 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
return unload_ok
|
||||
|
||||
|
||||
def setup_hass_services(hass: HomeAssistant) -> None:
|
||||
"""Home Assistant services."""
|
||||
|
||||
def change_setting(call: ServiceCall) -> None:
|
||||
"""Change an Abode system setting."""
|
||||
setting = call.data[ATTR_SETTING]
|
||||
value = call.data[ATTR_VALUE]
|
||||
|
||||
try:
|
||||
hass.data[DOMAIN].abode.set_setting(setting, value)
|
||||
except AbodeException as ex:
|
||||
LOGGER.warning(ex)
|
||||
|
||||
def capture_image(call: ServiceCall) -> None:
|
||||
"""Capture a new image."""
|
||||
entity_ids = call.data[ATTR_ENTITY_ID]
|
||||
|
||||
target_entities = [
|
||||
entity_id
|
||||
for entity_id in hass.data[DOMAIN].entity_ids
|
||||
if entity_id in entity_ids
|
||||
]
|
||||
|
||||
for entity_id in target_entities:
|
||||
signal = f"abode_camera_capture_{entity_id}"
|
||||
dispatcher_send(hass, signal)
|
||||
|
||||
def trigger_automation(call: ServiceCall) -> None:
|
||||
"""Trigger an Abode automation."""
|
||||
entity_ids = call.data[ATTR_ENTITY_ID]
|
||||
|
||||
target_entities = [
|
||||
entity_id
|
||||
for entity_id in hass.data[DOMAIN].entity_ids
|
||||
if entity_id in entity_ids
|
||||
]
|
||||
|
||||
for entity_id in target_entities:
|
||||
signal = f"abode_trigger_automation_{entity_id}"
|
||||
dispatcher_send(hass, signal)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_SETTINGS, change_setting, schema=CHANGE_SETTING_SCHEMA
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_CAPTURE_IMAGE, capture_image, schema=CAPTURE_IMAGE_SCHEMA
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_TRIGGER_AUTOMATION, trigger_automation, schema=AUTOMATION_SCHEMA
|
||||
)
|
||||
|
||||
|
||||
async def setup_hass_events(hass: HomeAssistant) -> None:
|
||||
"""Home Assistant start and stop callbacks."""
|
||||
|
||||
|
||||
89
homeassistant/components/abode/services.py
Normal file
89
homeassistant/components/abode/services.py
Normal file
@@ -0,0 +1,89 @@
|
||||
"""Support for the Abode Security System."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from jaraco.abode.exceptions import Exception as AbodeException
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import ATTR_ENTITY_ID
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.dispatcher import dispatcher_send
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
|
||||
SERVICE_SETTINGS = "change_setting"
|
||||
SERVICE_CAPTURE_IMAGE = "capture_image"
|
||||
SERVICE_TRIGGER_AUTOMATION = "trigger_automation"
|
||||
|
||||
ATTR_SETTING = "setting"
|
||||
ATTR_VALUE = "value"
|
||||
|
||||
|
||||
CHANGE_SETTING_SCHEMA = vol.Schema(
|
||||
{vol.Required(ATTR_SETTING): cv.string, vol.Required(ATTR_VALUE): cv.string}
|
||||
)
|
||||
|
||||
CAPTURE_IMAGE_SCHEMA = vol.Schema({ATTR_ENTITY_ID: cv.entity_ids})
|
||||
|
||||
AUTOMATION_SCHEMA = vol.Schema({ATTR_ENTITY_ID: cv.entity_ids})
|
||||
|
||||
|
||||
def _change_setting(call: ServiceCall) -> None:
|
||||
"""Change an Abode system setting."""
|
||||
setting = call.data[ATTR_SETTING]
|
||||
value = call.data[ATTR_VALUE]
|
||||
|
||||
try:
|
||||
call.hass.data[DOMAIN].abode.set_setting(setting, value)
|
||||
except AbodeException as ex:
|
||||
LOGGER.warning(ex)
|
||||
|
||||
|
||||
def _capture_image(call: ServiceCall) -> None:
|
||||
"""Capture a new image."""
|
||||
entity_ids = call.data[ATTR_ENTITY_ID]
|
||||
|
||||
target_entities = [
|
||||
entity_id
|
||||
for entity_id in call.hass.data[DOMAIN].entity_ids
|
||||
if entity_id in entity_ids
|
||||
]
|
||||
|
||||
for entity_id in target_entities:
|
||||
signal = f"abode_camera_capture_{entity_id}"
|
||||
dispatcher_send(call.hass, signal)
|
||||
|
||||
|
||||
def _trigger_automation(call: ServiceCall) -> None:
|
||||
"""Trigger an Abode automation."""
|
||||
entity_ids = call.data[ATTR_ENTITY_ID]
|
||||
|
||||
target_entities = [
|
||||
entity_id
|
||||
for entity_id in call.hass.data[DOMAIN].entity_ids
|
||||
if entity_id in entity_ids
|
||||
]
|
||||
|
||||
for entity_id in target_entities:
|
||||
signal = f"abode_trigger_automation_{entity_id}"
|
||||
dispatcher_send(call.hass, signal)
|
||||
|
||||
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Home Assistant services."""
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_SETTINGS, _change_setting, schema=CHANGE_SETTING_SCHEMA
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_CAPTURE_IMAGE, _capture_image, schema=CAPTURE_IMAGE_SCHEMA
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_TRIGGER_AUTOMATION,
|
||||
_trigger_automation,
|
||||
schema=AUTOMATION_SCHEMA,
|
||||
)
|
||||
@@ -8,7 +8,7 @@ from homeassistant.core import HomeAssistant
|
||||
from .const import CONNECTION_TYPE, LOCAL
|
||||
from .coordinator import AdaxCloudCoordinator, AdaxConfigEntry, AdaxLocalCoordinator
|
||||
|
||||
PLATFORMS = [Platform.CLIMATE]
|
||||
PLATFORMS = [Platform.CLIMATE, Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AdaxConfigEntry) -> bool:
|
||||
|
||||
@@ -41,7 +41,30 @@ class AdaxCloudCoordinator(DataUpdateCoordinator[dict[str, dict[str, Any]]]):
|
||||
|
||||
async def _async_update_data(self) -> dict[str, dict[str, Any]]:
|
||||
"""Fetch data from the Adax."""
|
||||
rooms = await self.adax_data_handler.get_rooms() or []
|
||||
try:
|
||||
if hasattr(self.adax_data_handler, "fetch_rooms_info"):
|
||||
rooms = await self.adax_data_handler.fetch_rooms_info() or []
|
||||
_LOGGER.debug("fetch_rooms_info returned: %s", rooms)
|
||||
else:
|
||||
_LOGGER.debug("fetch_rooms_info method not available, using get_rooms")
|
||||
rooms = []
|
||||
|
||||
if not rooms:
|
||||
_LOGGER.debug(
|
||||
"No rooms from fetch_rooms_info, trying get_rooms as fallback"
|
||||
)
|
||||
rooms = await self.adax_data_handler.get_rooms() or []
|
||||
_LOGGER.debug("get_rooms fallback returned: %s", rooms)
|
||||
|
||||
if not rooms:
|
||||
raise UpdateFailed("No rooms available from Adax API")
|
||||
|
||||
except OSError as e:
|
||||
raise UpdateFailed(f"Error communicating with API: {e}") from e
|
||||
|
||||
for room in rooms:
|
||||
room["energyWh"] = int(room.get("energyWh", 0))
|
||||
|
||||
return {r["id"]: r for r in rooms}
|
||||
|
||||
|
||||
|
||||
77
homeassistant/components/adax/sensor.py
Normal file
77
homeassistant/components/adax/sensor.py
Normal file
@@ -0,0 +1,77 @@
|
||||
"""Support for Adax energy sensors."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import cast
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import UnitOfEnergy
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import AdaxConfigEntry
|
||||
from .const import CONNECTION_TYPE, DOMAIN, LOCAL
|
||||
from .coordinator import AdaxCloudCoordinator
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AdaxConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Adax energy sensors with config flow."""
|
||||
if entry.data.get(CONNECTION_TYPE) != LOCAL:
|
||||
cloud_coordinator = cast(AdaxCloudCoordinator, entry.runtime_data)
|
||||
|
||||
# Create individual energy sensors for each device
|
||||
async_add_entities(
|
||||
AdaxEnergySensor(cloud_coordinator, device_id)
|
||||
for device_id in cloud_coordinator.data
|
||||
)
|
||||
|
||||
|
||||
class AdaxEnergySensor(CoordinatorEntity[AdaxCloudCoordinator], SensorEntity):
|
||||
"""Representation of an Adax energy sensor."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_translation_key = "energy"
|
||||
_attr_device_class = SensorDeviceClass.ENERGY
|
||||
_attr_native_unit_of_measurement = UnitOfEnergy.WATT_HOUR
|
||||
_attr_suggested_unit_of_measurement = UnitOfEnergy.KILO_WATT_HOUR
|
||||
_attr_state_class = SensorStateClass.TOTAL_INCREASING
|
||||
_attr_suggested_display_precision = 3
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AdaxCloudCoordinator,
|
||||
device_id: str,
|
||||
) -> None:
|
||||
"""Initialize the energy sensor."""
|
||||
super().__init__(coordinator)
|
||||
self._device_id = device_id
|
||||
room = coordinator.data[device_id]
|
||||
|
||||
self._attr_unique_id = f"{room['homeId']}_{device_id}_energy"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, device_id)},
|
||||
name=room["name"],
|
||||
manufacturer="Adax",
|
||||
)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return (
|
||||
super().available and "energyWh" in self.coordinator.data[self._device_id]
|
||||
)
|
||||
|
||||
@property
|
||||
def native_value(self) -> int:
|
||||
"""Return the native value of the sensor."""
|
||||
return int(self.coordinator.data[self._device_id]["energyWh"])
|
||||
@@ -15,7 +15,7 @@ from homeassistant.helpers.entity_platform import (
|
||||
)
|
||||
|
||||
from . import AgentDVRConfigEntry
|
||||
from .const import ATTRIBUTION, CAMERA_SCAN_INTERVAL_SECS, DOMAIN as AGENT_DOMAIN
|
||||
from .const import ATTRIBUTION, CAMERA_SCAN_INTERVAL_SECS, DOMAIN
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=CAMERA_SCAN_INTERVAL_SECS)
|
||||
|
||||
@@ -82,7 +82,7 @@ class AgentCamera(MjpegCamera):
|
||||
still_image_url=f"{device.client._server_url}{device.still_image_url}&size={device.mjpegStreamWidth}x{device.mjpegStreamHeight}", # noqa: SLF001
|
||||
)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(AGENT_DOMAIN, self.unique_id)},
|
||||
identifiers={(DOMAIN, self.unique_id)},
|
||||
manufacturer="Agent",
|
||||
model="Camera",
|
||||
name=f"{device.client.name} {device.name}",
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairq"],
|
||||
"requirements": ["aioairq==0.4.4"]
|
||||
"requirements": ["aioairq==0.4.6"]
|
||||
}
|
||||
|
||||
@@ -5,23 +5,22 @@ from __future__ import annotations
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from airthings import Airthings, AirthingsDevice, AirthingsError
|
||||
from airthings import Airthings
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ID, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import CONF_SECRET, DOMAIN
|
||||
from .const import CONF_SECRET
|
||||
from .coordinator import AirthingsDataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
SCAN_INTERVAL = timedelta(minutes=6)
|
||||
|
||||
type AirthingsDataCoordinatorType = DataUpdateCoordinator[dict[str, AirthingsDevice]]
|
||||
type AirthingsConfigEntry = ConfigEntry[AirthingsDataCoordinatorType]
|
||||
type AirthingsConfigEntry = ConfigEntry[AirthingsDataUpdateCoordinator]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AirthingsConfigEntry) -> bool:
|
||||
@@ -32,21 +31,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirthingsConfigEntry) ->
|
||||
async_get_clientsession(hass),
|
||||
)
|
||||
|
||||
async def _update_method() -> dict[str, AirthingsDevice]:
|
||||
"""Get the latest data from Airthings."""
|
||||
try:
|
||||
return await airthings.update_devices() # type: ignore[no-any-return]
|
||||
except AirthingsError as err:
|
||||
raise UpdateFailed(f"Unable to fetch data: {err}") from err
|
||||
coordinator = AirthingsDataUpdateCoordinator(hass, airthings)
|
||||
|
||||
coordinator = DataUpdateCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name=DOMAIN,
|
||||
update_method=_update_method,
|
||||
update_interval=SCAN_INTERVAL,
|
||||
)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
36
homeassistant/components/airthings/coordinator.py
Normal file
36
homeassistant/components/airthings/coordinator.py
Normal file
@@ -0,0 +1,36 @@
|
||||
"""The Airthings integration."""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from airthings import Airthings, AirthingsDevice, AirthingsError
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
SCAN_INTERVAL = timedelta(minutes=6)
|
||||
|
||||
|
||||
class AirthingsDataUpdateCoordinator(DataUpdateCoordinator[dict[str, AirthingsDevice]]):
|
||||
"""Coordinator for Airthings data updates."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, airthings: Airthings) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=DOMAIN,
|
||||
update_method=self._update_method,
|
||||
update_interval=SCAN_INTERVAL,
|
||||
)
|
||||
self.airthings = airthings
|
||||
|
||||
async def _update_method(self) -> dict[str, AirthingsDevice]:
|
||||
"""Get the latest data from Airthings."""
|
||||
try:
|
||||
return await self.airthings.update_devices() # type: ignore[no-any-return]
|
||||
except AirthingsError as err:
|
||||
raise UpdateFailed(f"Unable to fetch data: {err}") from err
|
||||
@@ -19,6 +19,7 @@ from homeassistant.const import (
|
||||
SIGNAL_STRENGTH_DECIBELS,
|
||||
EntityCategory,
|
||||
UnitOfPressure,
|
||||
UnitOfSoundPressure,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -27,32 +28,44 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import AirthingsConfigEntry, AirthingsDataCoordinatorType
|
||||
from . import AirthingsConfigEntry
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AirthingsDataUpdateCoordinator
|
||||
|
||||
SENSORS: dict[str, SensorEntityDescription] = {
|
||||
"radonShortTermAvg": SensorEntityDescription(
|
||||
key="radonShortTermAvg",
|
||||
native_unit_of_measurement="Bq/m³",
|
||||
translation_key="radon",
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"temp": SensorEntityDescription(
|
||||
key="temp",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
"humidity": SensorEntityDescription(
|
||||
key="humidity",
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"pressure": SensorEntityDescription(
|
||||
key="pressure",
|
||||
device_class=SensorDeviceClass.ATMOSPHERIC_PRESSURE,
|
||||
native_unit_of_measurement=UnitOfPressure.MBAR,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
"sla": SensorEntityDescription(
|
||||
key="sla",
|
||||
device_class=SensorDeviceClass.SOUND_PRESSURE,
|
||||
native_unit_of_measurement=UnitOfSoundPressure.WEIGHTED_DECIBEL_A,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"battery": SensorEntityDescription(
|
||||
key="battery",
|
||||
@@ -60,40 +73,47 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"co2": SensorEntityDescription(
|
||||
key="co2",
|
||||
device_class=SensorDeviceClass.CO2,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"voc": SensorEntityDescription(
|
||||
key="voc",
|
||||
device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS_PARTS,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_BILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"light": SensorEntityDescription(
|
||||
key="light",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
translation_key="light",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"lux": SensorEntityDescription(
|
||||
key="lux",
|
||||
device_class=SensorDeviceClass.ILLUMINANCE,
|
||||
native_unit_of_measurement=LIGHT_LUX,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"virusRisk": SensorEntityDescription(
|
||||
key="virusRisk",
|
||||
translation_key="virus_risk",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"mold": SensorEntityDescription(
|
||||
key="mold",
|
||||
translation_key="mold",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"rssi": SensorEntityDescription(
|
||||
key="rssi",
|
||||
@@ -102,18 +122,21 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"pm1": SensorEntityDescription(
|
||||
key="pm1",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
device_class=SensorDeviceClass.PM1,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"pm25": SensorEntityDescription(
|
||||
key="pm25",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
device_class=SensorDeviceClass.PM25,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
}
|
||||
|
||||
@@ -140,7 +163,7 @@ async def async_setup_entry(
|
||||
|
||||
|
||||
class AirthingsHeaterEnergySensor(
|
||||
CoordinatorEntity[AirthingsDataCoordinatorType], SensorEntity
|
||||
CoordinatorEntity[AirthingsDataUpdateCoordinator], SensorEntity
|
||||
):
|
||||
"""Representation of a Airthings Sensor device."""
|
||||
|
||||
@@ -149,7 +172,7 @@ class AirthingsHeaterEnergySensor(
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirthingsDataCoordinatorType,
|
||||
coordinator: AirthingsDataUpdateCoordinator,
|
||||
airthings_device: AirthingsDevice,
|
||||
entity_description: SensorEntityDescription,
|
||||
) -> None:
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airtouch5",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["airtouch5py"],
|
||||
"requirements": ["airtouch5py==0.2.11"]
|
||||
"requirements": ["airtouch5py==0.3.0"]
|
||||
}
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["aioamazondevices==3.1.2"]
|
||||
"requirements": ["aioamazondevices==3.0.6"]
|
||||
}
|
||||
|
||||
@@ -7,7 +7,6 @@ from dataclasses import dataclass
|
||||
from typing import Any, Final
|
||||
|
||||
from aioamazondevices.api import AmazonDevice, AmazonEchoApi
|
||||
from aioamazondevices.const import SPEAKER_GROUP_FAMILY
|
||||
|
||||
from homeassistant.components.notify import NotifyEntity, NotifyEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -23,7 +22,6 @@ PARALLEL_UPDATES = 1
|
||||
class AmazonNotifyEntityDescription(NotifyEntityDescription):
|
||||
"""Alexa Devices notify entity description."""
|
||||
|
||||
is_supported: Callable[[AmazonDevice], bool] = lambda _device: True
|
||||
method: Callable[[AmazonEchoApi, AmazonDevice, str], Awaitable[None]]
|
||||
subkey: str
|
||||
|
||||
@@ -33,7 +31,6 @@ NOTIFY: Final = (
|
||||
key="speak",
|
||||
translation_key="speak",
|
||||
subkey="AUDIO_PLAYER",
|
||||
is_supported=lambda _device: _device.device_family != SPEAKER_GROUP_FAMILY,
|
||||
method=lambda api, device, message: api.call_alexa_speak(device, message),
|
||||
),
|
||||
AmazonNotifyEntityDescription(
|
||||
@@ -61,7 +58,6 @@ async def async_setup_entry(
|
||||
for sensor_desc in NOTIFY
|
||||
for serial_num in coordinator.data
|
||||
if sensor_desc.subkey in coordinator.data[serial_num].capabilities
|
||||
and sensor_desc.is_supported(coordinator.data[serial_num])
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -16,10 +16,7 @@ from amcrest import AmcrestError, ApiWrapper, LoginError
|
||||
import httpx
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.auth.models import User
|
||||
from homeassistant.auth.permissions.const import POLICY_CONTROL
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
CONF_AUTHENTICATION,
|
||||
CONF_BINARY_SENSORS,
|
||||
CONF_HOST,
|
||||
@@ -30,21 +27,17 @@ from homeassistant.const import (
|
||||
CONF_SENSORS,
|
||||
CONF_SWITCHES,
|
||||
CONF_USERNAME,
|
||||
ENTITY_MATCH_ALL,
|
||||
ENTITY_MATCH_NONE,
|
||||
HTTP_BASIC_AUTHENTICATION,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import Unauthorized, UnknownUser
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, discovery
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send, dispatcher_send
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers.service import async_extract_entity_ids
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .binary_sensor import BINARY_SENSOR_KEYS, BINARY_SENSORS, check_binary_sensors
|
||||
from .camera import CAMERA_SERVICES, STREAM_SOURCE_LIST
|
||||
from .camera import STREAM_SOURCE_LIST
|
||||
from .const import (
|
||||
CAMERAS,
|
||||
COMM_RETRIES,
|
||||
@@ -58,6 +51,7 @@ from .const import (
|
||||
)
|
||||
from .helpers import service_signal
|
||||
from .sensor import SENSOR_KEYS
|
||||
from .services import async_setup_services
|
||||
from .switch import SWITCH_KEYS
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -455,47 +449,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
if not hass.data[DATA_AMCREST][DEVICES]:
|
||||
return False
|
||||
|
||||
def have_permission(user: User | None, entity_id: str) -> bool:
|
||||
return not user or user.permissions.check_entity(entity_id, POLICY_CONTROL)
|
||||
|
||||
async def async_extract_from_service(call: ServiceCall) -> list[str]:
|
||||
if call.context.user_id:
|
||||
user = await hass.auth.async_get_user(call.context.user_id)
|
||||
if user is None:
|
||||
raise UnknownUser(context=call.context)
|
||||
else:
|
||||
user = None
|
||||
|
||||
if call.data.get(ATTR_ENTITY_ID) == ENTITY_MATCH_ALL:
|
||||
# Return all entity_ids user has permission to control.
|
||||
return [
|
||||
entity_id
|
||||
for entity_id in hass.data[DATA_AMCREST][CAMERAS]
|
||||
if have_permission(user, entity_id)
|
||||
]
|
||||
|
||||
if call.data.get(ATTR_ENTITY_ID) == ENTITY_MATCH_NONE:
|
||||
return []
|
||||
|
||||
call_ids = await async_extract_entity_ids(hass, call)
|
||||
entity_ids = []
|
||||
for entity_id in hass.data[DATA_AMCREST][CAMERAS]:
|
||||
if entity_id not in call_ids:
|
||||
continue
|
||||
if not have_permission(user, entity_id):
|
||||
raise Unauthorized(
|
||||
context=call.context, entity_id=entity_id, permission=POLICY_CONTROL
|
||||
)
|
||||
entity_ids.append(entity_id)
|
||||
return entity_ids
|
||||
|
||||
async def async_service_handler(call: ServiceCall) -> None:
|
||||
args = [call.data[arg] for arg in CAMERA_SERVICES[call.service][2]]
|
||||
for entity_id in await async_extract_from_service(call):
|
||||
async_dispatcher_send(hass, service_signal(call.service, entity_id), *args)
|
||||
|
||||
for service, params in CAMERA_SERVICES.items():
|
||||
hass.services.async_register(DOMAIN, service, async_service_handler, params[0])
|
||||
async_setup_services(hass)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
61
homeassistant/components/amcrest/services.py
Normal file
61
homeassistant/components/amcrest/services.py
Normal file
@@ -0,0 +1,61 @@
|
||||
"""Support for Amcrest IP cameras."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.auth.models import User
|
||||
from homeassistant.auth.permissions.const import POLICY_CONTROL
|
||||
from homeassistant.const import ATTR_ENTITY_ID, ENTITY_MATCH_ALL, ENTITY_MATCH_NONE
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.exceptions import Unauthorized, UnknownUser
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.service import async_extract_entity_ids
|
||||
|
||||
from .camera import CAMERA_SERVICES
|
||||
from .const import CAMERAS, DATA_AMCREST, DOMAIN
|
||||
from .helpers import service_signal
|
||||
|
||||
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the Amcrest IP Camera services."""
|
||||
|
||||
def have_permission(user: User | None, entity_id: str) -> bool:
|
||||
return not user or user.permissions.check_entity(entity_id, POLICY_CONTROL)
|
||||
|
||||
async def async_extract_from_service(call: ServiceCall) -> list[str]:
|
||||
if call.context.user_id:
|
||||
user = await hass.auth.async_get_user(call.context.user_id)
|
||||
if user is None:
|
||||
raise UnknownUser(context=call.context)
|
||||
else:
|
||||
user = None
|
||||
|
||||
if call.data.get(ATTR_ENTITY_ID) == ENTITY_MATCH_ALL:
|
||||
# Return all entity_ids user has permission to control.
|
||||
return [
|
||||
entity_id
|
||||
for entity_id in hass.data[DATA_AMCREST][CAMERAS]
|
||||
if have_permission(user, entity_id)
|
||||
]
|
||||
|
||||
if call.data.get(ATTR_ENTITY_ID) == ENTITY_MATCH_NONE:
|
||||
return []
|
||||
|
||||
call_ids = await async_extract_entity_ids(hass, call)
|
||||
entity_ids = []
|
||||
for entity_id in hass.data[DATA_AMCREST][CAMERAS]:
|
||||
if entity_id not in call_ids:
|
||||
continue
|
||||
if not have_permission(user, entity_id):
|
||||
raise Unauthorized(
|
||||
context=call.context, entity_id=entity_id, permission=POLICY_CONTROL
|
||||
)
|
||||
entity_ids.append(entity_id)
|
||||
return entity_ids
|
||||
|
||||
async def async_service_handler(call: ServiceCall) -> None:
|
||||
args = [call.data[arg] for arg in CAMERA_SERVICES[call.service][2]]
|
||||
for entity_id in await async_extract_from_service(call):
|
||||
async_dispatcher_send(hass, service_signal(call.service, entity_id), *args)
|
||||
|
||||
for service, params in CAMERA_SERVICES.items():
|
||||
hass.services.async_register(DOMAIN, service, async_service_handler, params[0])
|
||||
@@ -24,7 +24,7 @@ from homeassistant.components.recorder import (
|
||||
get_instance as get_recorder_instance,
|
||||
)
|
||||
from homeassistant.config_entries import SOURCE_IGNORE
|
||||
from homeassistant.const import ATTR_DOMAIN, __version__ as HA_VERSION
|
||||
from homeassistant.const import ATTR_DOMAIN, BASE_PLATFORMS, __version__ as HA_VERSION
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
@@ -225,7 +225,8 @@ class Analytics:
|
||||
LOGGER.error(err)
|
||||
return
|
||||
|
||||
configuration_set = set(yaml_configuration)
|
||||
configuration_set = _domains_from_yaml_config(yaml_configuration)
|
||||
|
||||
er_platforms = {
|
||||
entity.platform
|
||||
for entity in ent_reg.entities.values()
|
||||
@@ -370,3 +371,13 @@ class Analytics:
|
||||
for entry in entries
|
||||
if entry.source != SOURCE_IGNORE and entry.disabled_by is None
|
||||
)
|
||||
|
||||
|
||||
def _domains_from_yaml_config(yaml_configuration: dict[str, Any]) -> set[str]:
|
||||
"""Extract domains from the YAML configuration."""
|
||||
domains = set(yaml_configuration)
|
||||
for platforms in conf_util.extract_platform_integrations(
|
||||
yaml_configuration, BASE_PLATFORMS
|
||||
).values():
|
||||
domains.update(platforms)
|
||||
return domains
|
||||
|
||||
@@ -89,7 +89,7 @@ class ArubaDeviceScanner(DeviceScanner):
|
||||
def get_aruba_data(self) -> dict[str, dict[str, str]] | None:
|
||||
"""Retrieve data from Aruba Access Point and return parsed result."""
|
||||
|
||||
connect = f"ssh {self.username}@{self.host}"
|
||||
connect = f"ssh {self.username}@{self.host} -o HostKeyAlgorithms=ssh-rsa"
|
||||
ssh: pexpect.spawn[str] = pexpect.spawn(connect, encoding="utf-8")
|
||||
query = ssh.expect(
|
||||
[
|
||||
|
||||
@@ -1207,6 +1207,15 @@ class PipelineRun:
|
||||
|
||||
self._streamed_response_text = True
|
||||
|
||||
self.process_event(
|
||||
PipelineEvent(
|
||||
PipelineEventType.INTENT_PROGRESS,
|
||||
{
|
||||
"tts_start_streaming": True,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
async def tts_input_stream_generator() -> AsyncGenerator[str]:
|
||||
"""Yield TTS input stream."""
|
||||
while (tts_input := await tts_input_stream.get()) is not None:
|
||||
|
||||
@@ -6,6 +6,7 @@ from homeassistant.components.water_heater import (
|
||||
STATE_ECO,
|
||||
STATE_PERFORMANCE,
|
||||
WaterHeaterEntity,
|
||||
WaterHeaterEntityFeature,
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, STATE_OFF, Platform, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -32,6 +33,7 @@ class AtagWaterHeater(AtagEntity, WaterHeaterEntity):
|
||||
"""Representation of an ATAG water heater."""
|
||||
|
||||
_attr_operation_list = OPERATION_LIST
|
||||
_attr_supported_features = WaterHeaterEntityFeature.TARGET_TEMPERATURE
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
|
||||
@property
|
||||
|
||||
@@ -11,7 +11,7 @@ from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, format_mac
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
|
||||
from ..const import ATTR_MANUFACTURER, DOMAIN as AXIS_DOMAIN
|
||||
from ..const import ATTR_MANUFACTURER, DOMAIN
|
||||
from .config import AxisConfig
|
||||
from .entity_loader import AxisEntityLoader
|
||||
from .event_source import AxisEventSource
|
||||
@@ -79,7 +79,7 @@ class AxisHub:
|
||||
config_entry_id=self.config.entry.entry_id,
|
||||
configuration_url=self.api.config.url,
|
||||
connections={(CONNECTION_NETWORK_MAC, self.unique_id)},
|
||||
identifiers={(AXIS_DOMAIN, self.unique_id)},
|
||||
identifiers={(DOMAIN, self.unique_id)},
|
||||
manufacturer=ATTR_MANUFACTURER,
|
||||
model=f"{self.config.model} {self.product_type}",
|
||||
name=self.config.name,
|
||||
|
||||
@@ -25,7 +25,7 @@ from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DEFAULT_SCAN_INTERVAL, DOMAIN, PLATFORMS
|
||||
from .coordinator import BlinkConfigEntry, BlinkUpdateCoordinator
|
||||
from .services import setup_services
|
||||
from .services import async_setup_services
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -72,7 +72,7 @@ async def async_migrate_entry(hass: HomeAssistant, entry: BlinkConfigEntry) -> b
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up Blink."""
|
||||
|
||||
setup_services(hass)
|
||||
async_setup_services(hass)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import CONF_PIN
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
@@ -21,34 +21,36 @@ SERVICE_SEND_PIN_SCHEMA = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
def setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the services for the Blink integration."""
|
||||
|
||||
async def send_pin(call: ServiceCall):
|
||||
"""Call blink to send new pin."""
|
||||
config_entry: BlinkConfigEntry | None
|
||||
for entry_id in call.data[ATTR_CONFIG_ENTRY_ID]:
|
||||
if not (config_entry := hass.config_entries.async_get_entry(entry_id)):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="integration_not_found",
|
||||
translation_placeholders={"target": DOMAIN},
|
||||
)
|
||||
if config_entry.state != ConfigEntryState.LOADED:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="not_loaded",
|
||||
translation_placeholders={"target": config_entry.title},
|
||||
)
|
||||
coordinator = config_entry.runtime_data
|
||||
await coordinator.api.auth.send_auth_key(
|
||||
coordinator.api,
|
||||
call.data[CONF_PIN],
|
||||
async def _send_pin(call: ServiceCall) -> None:
|
||||
"""Call blink to send new pin."""
|
||||
config_entry: BlinkConfigEntry | None
|
||||
for entry_id in call.data[ATTR_CONFIG_ENTRY_ID]:
|
||||
if not (config_entry := call.hass.config_entries.async_get_entry(entry_id)):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="integration_not_found",
|
||||
translation_placeholders={"target": DOMAIN},
|
||||
)
|
||||
if config_entry.state != ConfigEntryState.LOADED:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="not_loaded",
|
||||
translation_placeholders={"target": config_entry.title},
|
||||
)
|
||||
coordinator = config_entry.runtime_data
|
||||
await coordinator.api.auth.send_auth_key(
|
||||
coordinator.api,
|
||||
call.data[CONF_PIN],
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the services for the Blink integration."""
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_SEND_PIN,
|
||||
send_pin,
|
||||
_send_pin,
|
||||
schema=SERVICE_SEND_PIN_SCHEMA,
|
||||
)
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["bsblan"],
|
||||
"requirements": ["python-bsblan==1.2.1"]
|
||||
"requirements": ["python-bsblan==2.1.0"]
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/camera",
|
||||
"integration_type": "entity",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["PyTurboJPEG==1.7.5"]
|
||||
"requirements": ["PyTurboJPEG==1.8.0"]
|
||||
}
|
||||
|
||||
@@ -3,7 +3,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
import socket
|
||||
|
||||
@@ -26,8 +27,18 @@ from .const import CONF_RECORDS, DEFAULT_UPDATE_INTERVAL, DOMAIN, SERVICE_UPDATE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type CloudflareConfigEntry = ConfigEntry[CloudflareRuntimeData]
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
@dataclass
|
||||
class CloudflareRuntimeData:
|
||||
"""Runtime data for Cloudflare config entry."""
|
||||
|
||||
client: pycfdns.Client
|
||||
dns_zone: pycfdns.ZoneModel
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: CloudflareConfigEntry) -> bool:
|
||||
"""Set up Cloudflare from a config entry."""
|
||||
session = async_get_clientsession(hass)
|
||||
client = pycfdns.Client(
|
||||
@@ -45,12 +56,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
except pycfdns.ComunicationException as error:
|
||||
raise ConfigEntryNotReady from error
|
||||
|
||||
async def update_records(now):
|
||||
entry.runtime_data = CloudflareRuntimeData(client, dns_zone)
|
||||
|
||||
async def update_records(now: datetime) -> None:
|
||||
"""Set up recurring update."""
|
||||
try:
|
||||
await _async_update_cloudflare(
|
||||
hass, client, dns_zone, entry.data[CONF_RECORDS]
|
||||
)
|
||||
await _async_update_cloudflare(hass, entry)
|
||||
except (
|
||||
pycfdns.AuthenticationException,
|
||||
pycfdns.ComunicationException,
|
||||
@@ -60,9 +71,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def update_records_service(call: ServiceCall) -> None:
|
||||
"""Set up service for manual trigger."""
|
||||
try:
|
||||
await _async_update_cloudflare(
|
||||
hass, client, dns_zone, entry.data[CONF_RECORDS]
|
||||
)
|
||||
await _async_update_cloudflare(hass, entry)
|
||||
except (
|
||||
pycfdns.AuthenticationException,
|
||||
pycfdns.ComunicationException,
|
||||
@@ -79,7 +88,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: CloudflareConfigEntry) -> bool:
|
||||
"""Unload Cloudflare config entry."""
|
||||
|
||||
return True
|
||||
@@ -87,10 +96,12 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
async def _async_update_cloudflare(
|
||||
hass: HomeAssistant,
|
||||
client: pycfdns.Client,
|
||||
dns_zone: pycfdns.ZoneModel,
|
||||
target_records: list[str],
|
||||
entry: CloudflareConfigEntry,
|
||||
) -> None:
|
||||
client = entry.runtime_data.client
|
||||
dns_zone = entry.runtime_data.dns_zone
|
||||
target_records: list[str] = entry.data[CONF_RECORDS]
|
||||
|
||||
_LOGGER.debug("Starting update for zone %s", dns_zone["name"])
|
||||
|
||||
records = await client.list_dns_records(zone_id=dns_zone["id"], type="A")
|
||||
|
||||
@@ -9,12 +9,11 @@ from typing import Any
|
||||
from homeassistant.components.notify import BaseNotificationService
|
||||
from homeassistant.const import CONF_COMMAND
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers.template import Template
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util.process import kill_subprocess
|
||||
|
||||
from .const import CONF_COMMAND_TIMEOUT, LOGGER
|
||||
from .utils import render_template_args
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -45,28 +44,10 @@ class CommandLineNotificationService(BaseNotificationService):
|
||||
|
||||
def send_message(self, message: str = "", **kwargs: Any) -> None:
|
||||
"""Send a message to a command line."""
|
||||
command = self.command
|
||||
if " " not in command:
|
||||
prog = command
|
||||
args = None
|
||||
args_compiled = None
|
||||
else:
|
||||
prog, args = command.split(" ", 1)
|
||||
args_compiled = Template(args, self.hass)
|
||||
if not (command := render_template_args(self.hass, self.command)):
|
||||
return
|
||||
|
||||
rendered_args = None
|
||||
if args_compiled:
|
||||
args_to_render = {"arguments": args}
|
||||
try:
|
||||
rendered_args = args_compiled.async_render(args_to_render)
|
||||
except TemplateError as ex:
|
||||
LOGGER.exception("Error rendering command template: %s", ex)
|
||||
return
|
||||
|
||||
if rendered_args != args:
|
||||
command = f"{prog} {rendered_args}"
|
||||
|
||||
LOGGER.debug("Running command: %s, with message: %s", command, message)
|
||||
LOGGER.debug("Running with message: %s", message)
|
||||
|
||||
with subprocess.Popen( # noqa: S602 # shell by design
|
||||
command,
|
||||
|
||||
@@ -19,7 +19,6 @@ from homeassistant.const import (
|
||||
CONF_VALUE_TEMPLATE,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers.template import Template
|
||||
@@ -37,7 +36,7 @@ from .const import (
|
||||
LOGGER,
|
||||
TRIGGER_ENTITY_OPTIONS,
|
||||
)
|
||||
from .utils import async_check_output_or_log
|
||||
from .utils import async_check_output_or_log, render_template_args
|
||||
|
||||
DEFAULT_NAME = "Command Sensor"
|
||||
|
||||
@@ -222,32 +221,6 @@ class CommandSensorData:
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Get the latest data with a shell command."""
|
||||
command = self.command
|
||||
|
||||
if " " not in command:
|
||||
prog = command
|
||||
args = None
|
||||
args_compiled = None
|
||||
else:
|
||||
prog, args = command.split(" ", 1)
|
||||
args_compiled = Template(args, self.hass)
|
||||
|
||||
if args_compiled:
|
||||
try:
|
||||
args_to_render = {"arguments": args}
|
||||
rendered_args = args_compiled.async_render(args_to_render)
|
||||
except TemplateError as ex:
|
||||
LOGGER.exception("Error rendering command template: %s", ex)
|
||||
return
|
||||
else:
|
||||
rendered_args = None
|
||||
|
||||
if rendered_args == args:
|
||||
# No template used. default behavior
|
||||
pass
|
||||
else:
|
||||
# Template used. Construct the string used in the shell
|
||||
command = f"{prog} {rendered_args}"
|
||||
|
||||
LOGGER.debug("Running command: %s", command)
|
||||
if not (command := render_template_args(self.hass, self.command)):
|
||||
return
|
||||
self.value = await async_check_output_or_log(command, self.timeout)
|
||||
|
||||
@@ -3,9 +3,13 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers.template import Template
|
||||
|
||||
from .const import LOGGER
|
||||
|
||||
_EXEC_FAILED_CODE = 127
|
||||
|
||||
|
||||
@@ -18,7 +22,7 @@ async def async_call_shell_with_timeout(
|
||||
return code is returned.
|
||||
"""
|
||||
try:
|
||||
_LOGGER.debug("Running command: %s", command)
|
||||
LOGGER.debug("Running command: %s", command)
|
||||
proc = await asyncio.create_subprocess_shell( # shell by design
|
||||
command,
|
||||
close_fds=False, # required for posix_spawn
|
||||
@@ -26,14 +30,14 @@ async def async_call_shell_with_timeout(
|
||||
async with asyncio.timeout(timeout):
|
||||
await proc.communicate()
|
||||
except TimeoutError:
|
||||
_LOGGER.error("Timeout for command: %s", command)
|
||||
LOGGER.error("Timeout for command: %s", command)
|
||||
return -1
|
||||
|
||||
return_code = proc.returncode
|
||||
if return_code == _EXEC_FAILED_CODE:
|
||||
_LOGGER.error("Error trying to exec command: %s", command)
|
||||
LOGGER.error("Error trying to exec command: %s", command)
|
||||
elif log_return_code and return_code != 0:
|
||||
_LOGGER.error(
|
||||
LOGGER.error(
|
||||
"Command failed (with return code %s): %s",
|
||||
proc.returncode,
|
||||
command,
|
||||
@@ -53,12 +57,39 @@ async def async_check_output_or_log(command: str, timeout: int) -> str | None:
|
||||
stdout, _ = await proc.communicate()
|
||||
|
||||
if proc.returncode != 0:
|
||||
_LOGGER.error(
|
||||
LOGGER.error(
|
||||
"Command failed (with return code %s): %s", proc.returncode, command
|
||||
)
|
||||
else:
|
||||
return stdout.strip().decode("utf-8")
|
||||
except TimeoutError:
|
||||
_LOGGER.error("Timeout for command: %s", command)
|
||||
LOGGER.error("Timeout for command: %s", command)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def render_template_args(hass: HomeAssistant, command: str) -> str | None:
|
||||
"""Render template arguments for command line utilities."""
|
||||
if " " not in command:
|
||||
prog = command
|
||||
args = None
|
||||
args_compiled = None
|
||||
else:
|
||||
prog, args = command.split(" ", 1)
|
||||
args_compiled = Template(args, hass)
|
||||
|
||||
rendered_args = None
|
||||
if args_compiled:
|
||||
args_to_render = {"arguments": args}
|
||||
try:
|
||||
rendered_args = args_compiled.async_render(args_to_render)
|
||||
except TemplateError as ex:
|
||||
LOGGER.exception("Error rendering command template: %s", ex)
|
||||
return None
|
||||
|
||||
if rendered_args != args:
|
||||
command = f"{prog} {rendered_args}"
|
||||
|
||||
LOGGER.debug("Running command: %s", command)
|
||||
|
||||
return command
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/compensation",
|
||||
"iot_class": "calculated",
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["numpy==2.2.2"]
|
||||
"requirements": ["numpy==2.3.0"]
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ from homeassistant.const import ATTR_DEVICE_ID, CONF_EVENT, CONF_ID
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
|
||||
from .const import CONF_GESTURE, DOMAIN as DECONZ_DOMAIN
|
||||
from .const import CONF_GESTURE, DOMAIN
|
||||
from .deconz_event import CONF_DECONZ_ALARM_EVENT, CONF_DECONZ_EVENT
|
||||
from .device_trigger import (
|
||||
CONF_BOTH_BUTTONS,
|
||||
@@ -200,6 +200,6 @@ def async_describe_events(
|
||||
}
|
||||
|
||||
async_describe_event(
|
||||
DECONZ_DOMAIN, CONF_DECONZ_ALARM_EVENT, async_describe_deconz_alarm_event
|
||||
DOMAIN, CONF_DECONZ_ALARM_EVENT, async_describe_deconz_alarm_event
|
||||
)
|
||||
async_describe_event(DECONZ_DOMAIN, CONF_DECONZ_EVENT, async_describe_deconz_event)
|
||||
async_describe_event(DOMAIN, CONF_DECONZ_EVENT, async_describe_deconz_event)
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/dnsip",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["aiodns==3.5.0"]
|
||||
"requirements": ["aiodns==3.4.0"]
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ from __future__ import annotations
|
||||
from datetime import timedelta
|
||||
from ipaddress import IPv4Address, IPv6Address
|
||||
import logging
|
||||
from typing import Literal
|
||||
|
||||
import aiodns
|
||||
from aiodns.error import DNSError
|
||||
@@ -34,7 +35,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
SCAN_INTERVAL = timedelta(seconds=120)
|
||||
|
||||
|
||||
def sort_ips(ips: list, querytype: str) -> list:
|
||||
def sort_ips(ips: list, querytype: Literal["A", "AAAA"]) -> list:
|
||||
"""Join IPs into a single string."""
|
||||
|
||||
if querytype == "AAAA":
|
||||
@@ -89,7 +90,7 @@ class WanIpSensor(SensorEntity):
|
||||
self.hostname = hostname
|
||||
self.resolver = aiodns.DNSResolver(tcp_port=port, udp_port=port)
|
||||
self.resolver.nameservers = [resolver]
|
||||
self.querytype = "AAAA" if ipv6 else "A"
|
||||
self.querytype: Literal["A", "AAAA"] = "AAAA" if ipv6 else "A"
|
||||
self._retries = DEFAULT_RETRIES
|
||||
self._attr_extra_state_attributes = {
|
||||
"resolver": resolver,
|
||||
@@ -106,7 +107,7 @@ class WanIpSensor(SensorEntity):
|
||||
async def async_update(self) -> None:
|
||||
"""Get the current DNS IP address for hostname."""
|
||||
try:
|
||||
response = await self.resolver.query(self.hostname, self.querytype) # type: ignore[call-overload]
|
||||
response = await self.resolver.query(self.hostname, self.querytype)
|
||||
except DNSError as err:
|
||||
_LOGGER.warning("Exception while resolving host: %s", err)
|
||||
response = None
|
||||
|
||||
@@ -8,7 +8,7 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import _LOGGER, CONF_DOWNLOAD_DIR
|
||||
from .services import register_services
|
||||
from .services import async_setup_services
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
@@ -25,6 +25,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
)
|
||||
return False
|
||||
|
||||
register_services(hass)
|
||||
async_setup_services(hass)
|
||||
|
||||
return True
|
||||
|
||||
@@ -141,7 +141,7 @@ def download_file(service: ServiceCall) -> None:
|
||||
threading.Thread(target=do_download).start()
|
||||
|
||||
|
||||
def register_services(hass: HomeAssistant) -> None:
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register the services for the downloader component."""
|
||||
async_register_admin_service(
|
||||
hass,
|
||||
|
||||
@@ -8,7 +8,7 @@ import re
|
||||
from typing import Any
|
||||
|
||||
from elkm1_lib.elements import Element
|
||||
from elkm1_lib.elk import Elk, Panel
|
||||
from elkm1_lib.elk import Elk
|
||||
from elkm1_lib.util import parse_url
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -26,12 +26,11 @@ from homeassistant.const import (
|
||||
Platform,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.network import is_ip_address
|
||||
|
||||
from .const import (
|
||||
@@ -62,6 +61,7 @@ from .discovery import (
|
||||
async_update_entry_from_discovery,
|
||||
)
|
||||
from .models import ELKM1Data
|
||||
from .services import async_setup_services
|
||||
|
||||
type ElkM1ConfigEntry = ConfigEntry[ELKM1Data]
|
||||
|
||||
@@ -79,19 +79,6 @@ PLATFORMS = [
|
||||
Platform.SWITCH,
|
||||
]
|
||||
|
||||
SPEAK_SERVICE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required("number"): vol.All(vol.Coerce(int), vol.Range(min=0, max=999)),
|
||||
vol.Optional("prefix", default=""): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
SET_TIME_SERVICE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional("prefix", default=""): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def hostname_from_url(url: str) -> str:
|
||||
"""Return the hostname from a url."""
|
||||
@@ -179,7 +166,7 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
|
||||
async def async_setup(hass: HomeAssistant, hass_config: ConfigType) -> bool:
|
||||
"""Set up the Elk M1 platform."""
|
||||
_create_elk_services(hass)
|
||||
async_setup_services(hass)
|
||||
|
||||
async def _async_discovery(*_: Any) -> None:
|
||||
async_trigger_discovery(
|
||||
@@ -326,17 +313,6 @@ def _included(ranges: list[tuple[int, int]], set_to: bool, values: list[bool]) -
|
||||
values[rng[0] - 1 : rng[1]] = [set_to] * (rng[1] - rng[0] + 1)
|
||||
|
||||
|
||||
def _find_elk_by_prefix(hass: HomeAssistant, prefix: str) -> Elk | None:
|
||||
"""Search all config entries for a given prefix."""
|
||||
for entry in hass.config_entries.async_entries(DOMAIN):
|
||||
if not entry.runtime_data:
|
||||
continue
|
||||
elk_data: ELKM1Data = entry.runtime_data
|
||||
if elk_data.prefix == prefix:
|
||||
return elk_data.elk
|
||||
return None
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ElkM1ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
@@ -390,39 +366,3 @@ async def async_wait_for_elk_to_sync(
|
||||
_LOGGER.debug("Received %s event", name)
|
||||
|
||||
return success
|
||||
|
||||
|
||||
@callback
|
||||
def _async_get_elk_panel(hass: HomeAssistant, service: ServiceCall) -> Panel:
|
||||
"""Get the ElkM1 panel from a service call."""
|
||||
prefix = service.data["prefix"]
|
||||
elk = _find_elk_by_prefix(hass, prefix)
|
||||
if elk is None:
|
||||
raise HomeAssistantError(f"No ElkM1 with prefix '{prefix}' found")
|
||||
return elk.panel
|
||||
|
||||
|
||||
def _create_elk_services(hass: HomeAssistant) -> None:
|
||||
"""Create ElkM1 services."""
|
||||
|
||||
@callback
|
||||
def _speak_word_service(service: ServiceCall) -> None:
|
||||
_async_get_elk_panel(hass, service).speak_word(service.data["number"])
|
||||
|
||||
@callback
|
||||
def _speak_phrase_service(service: ServiceCall) -> None:
|
||||
_async_get_elk_panel(hass, service).speak_phrase(service.data["number"])
|
||||
|
||||
@callback
|
||||
def _set_time_service(service: ServiceCall) -> None:
|
||||
_async_get_elk_panel(hass, service).set_time(dt_util.now())
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, "speak_word", _speak_word_service, SPEAK_SERVICE_SCHEMA
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, "speak_phrase", _speak_phrase_service, SPEAK_SERVICE_SCHEMA
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, "set_time", _set_time_service, SET_TIME_SERVICE_SCHEMA
|
||||
)
|
||||
|
||||
77
homeassistant/components/elkm1/services.py
Normal file
77
homeassistant/components/elkm1/services.py
Normal file
@@ -0,0 +1,77 @@
|
||||
"""Support the ElkM1 Gold and ElkM1 EZ8 alarm/integration panels."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from elkm1_lib.elk import Elk, Panel
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import DOMAIN
|
||||
from .models import ELKM1Data
|
||||
|
||||
SPEAK_SERVICE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required("number"): vol.All(vol.Coerce(int), vol.Range(min=0, max=999)),
|
||||
vol.Optional("prefix", default=""): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
SET_TIME_SERVICE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional("prefix", default=""): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def _find_elk_by_prefix(hass: HomeAssistant, prefix: str) -> Elk | None:
|
||||
"""Search all config entries for a given prefix."""
|
||||
for entry in hass.config_entries.async_entries(DOMAIN):
|
||||
if not entry.runtime_data:
|
||||
continue
|
||||
elk_data: ELKM1Data = entry.runtime_data
|
||||
if elk_data.prefix == prefix:
|
||||
return elk_data.elk
|
||||
return None
|
||||
|
||||
|
||||
@callback
|
||||
def _async_get_elk_panel(service: ServiceCall) -> Panel:
|
||||
"""Get the ElkM1 panel from a service call."""
|
||||
prefix = service.data["prefix"]
|
||||
elk = _find_elk_by_prefix(service.hass, prefix)
|
||||
if elk is None:
|
||||
raise HomeAssistantError(f"No ElkM1 with prefix '{prefix}' found")
|
||||
return elk.panel
|
||||
|
||||
|
||||
@callback
|
||||
def _speak_word_service(service: ServiceCall) -> None:
|
||||
_async_get_elk_panel(service).speak_word(service.data["number"])
|
||||
|
||||
|
||||
@callback
|
||||
def _speak_phrase_service(service: ServiceCall) -> None:
|
||||
_async_get_elk_panel(service).speak_phrase(service.data["number"])
|
||||
|
||||
|
||||
@callback
|
||||
def _set_time_service(service: ServiceCall) -> None:
|
||||
_async_get_elk_panel(service).set_time(dt_util.now())
|
||||
|
||||
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Create ElkM1 services."""
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, "speak_word", _speak_word_service, SPEAK_SERVICE_SCHEMA
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, "speak_phrase", _speak_phrase_service, SPEAK_SERVICE_SCHEMA
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, "set_time", _set_time_service, SET_TIME_SERVICE_SCHEMA
|
||||
)
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import httpx
|
||||
from pyenphase import Envoy
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -10,14 +9,9 @@ from homeassistant.const import CONF_HOST
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
OPTION_DISABLE_KEEP_ALIVE,
|
||||
OPTION_DISABLE_KEEP_ALIVE_DEFAULT_VALUE,
|
||||
PLATFORMS,
|
||||
)
|
||||
from .const import DOMAIN, PLATFORMS
|
||||
from .coordinator import EnphaseConfigEntry, EnphaseUpdateCoordinator
|
||||
|
||||
|
||||
@@ -25,19 +19,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: EnphaseConfigEntry) -> b
|
||||
"""Set up Enphase Envoy from a config entry."""
|
||||
|
||||
host = entry.data[CONF_HOST]
|
||||
options = entry.options
|
||||
envoy = (
|
||||
Envoy(
|
||||
host,
|
||||
httpx.AsyncClient(
|
||||
verify=False, limits=httpx.Limits(max_keepalive_connections=0)
|
||||
),
|
||||
)
|
||||
if options.get(
|
||||
OPTION_DISABLE_KEEP_ALIVE, OPTION_DISABLE_KEEP_ALIVE_DEFAULT_VALUE
|
||||
)
|
||||
else Envoy(host, get_async_client(hass, verify_ssl=False))
|
||||
)
|
||||
session = async_create_clientsession(hass, verify_ssl=False)
|
||||
envoy = Envoy(host, session)
|
||||
coordinator = EnphaseUpdateCoordinator(hass, envoy, entry)
|
||||
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
@@ -24,7 +24,7 @@ from homeassistant.const import (
|
||||
CONF_USERNAME,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
from homeassistant.helpers.typing import VolDictType
|
||||
|
||||
@@ -63,7 +63,7 @@ async def validate_input(
|
||||
description_placeholders: dict[str, str],
|
||||
) -> Envoy:
|
||||
"""Validate the user input allows us to connect."""
|
||||
envoy = Envoy(host, get_async_client(hass, verify_ssl=False))
|
||||
envoy = Envoy(host, async_get_clientsession(hass, verify_ssl=False))
|
||||
try:
|
||||
await envoy.setup()
|
||||
await envoy.authenticate(username=username, password=password)
|
||||
|
||||
@@ -6,6 +6,7 @@ import copy
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from aiohttp import ClientResponse
|
||||
from attr import asdict
|
||||
from pyenphase.envoy import Envoy
|
||||
from pyenphase.exceptions import EnvoyError
|
||||
@@ -69,14 +70,14 @@ async def _get_fixture_collection(envoy: Envoy, serial: str) -> dict[str, Any]:
|
||||
|
||||
for end_point in end_points:
|
||||
try:
|
||||
response = await envoy.request(end_point)
|
||||
fixture_data[end_point] = response.text.replace("\n", "").replace(
|
||||
serial, CLEAN_TEXT
|
||||
response: ClientResponse = await envoy.request(end_point)
|
||||
fixture_data[end_point] = (
|
||||
(await response.text()).replace("\n", "").replace(serial, CLEAN_TEXT)
|
||||
)
|
||||
fixture_data[f"{end_point}_log"] = json_dumps(
|
||||
{
|
||||
"headers": dict(response.headers.items()),
|
||||
"code": response.status_code,
|
||||
"code": response.status,
|
||||
}
|
||||
)
|
||||
except EnvoyError as err:
|
||||
|
||||
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
from collections.abc import Callable, Coroutine
|
||||
from typing import Any, Concatenate
|
||||
|
||||
from httpx import HTTPError
|
||||
from aiohttp import ClientError
|
||||
from pyenphase import EnvoyData
|
||||
from pyenphase.exceptions import EnvoyError
|
||||
|
||||
@@ -16,7 +16,7 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from .const import DOMAIN
|
||||
from .coordinator import EnphaseUpdateCoordinator
|
||||
|
||||
ACTIONERRORS = (EnvoyError, HTTPError)
|
||||
ACTIONERRORS = (EnvoyError, ClientError)
|
||||
|
||||
|
||||
class EnvoyBaseEntity(CoordinatorEntity[EnphaseUpdateCoordinator]):
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyenphase"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pyenphase==1.26.1"],
|
||||
"requirements": ["pyenphase==2.0.1"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_enphase-envoy._tcp.local."
|
||||
|
||||
@@ -71,6 +71,11 @@ class EvoDHW(EvoChild, WaterHeaterEntity):
|
||||
_attr_name = "DHW controller"
|
||||
_attr_icon = "mdi:thermometer-lines"
|
||||
_attr_operation_list = list(HA_STATE_TO_EVO)
|
||||
_attr_supported_features = (
|
||||
WaterHeaterEntityFeature.AWAY_MODE
|
||||
| WaterHeaterEntityFeature.ON_OFF
|
||||
| WaterHeaterEntityFeature.OPERATION_MODE
|
||||
)
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
|
||||
_evo_device: evo.HotWater
|
||||
@@ -91,9 +96,6 @@ class EvoDHW(EvoChild, WaterHeaterEntity):
|
||||
self._attr_precision = (
|
||||
PRECISION_TENTHS if coordinator.client_v1 else PRECISION_WHOLE
|
||||
)
|
||||
self._attr_supported_features = (
|
||||
WaterHeaterEntityFeature.AWAY_MODE | WaterHeaterEntityFeature.OPERATION_MODE
|
||||
)
|
||||
|
||||
@property
|
||||
def current_operation(self) -> str | None:
|
||||
|
||||
@@ -11,32 +11,25 @@ from propcache.api import cached_property
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
CONTENT_TYPE_MULTIPART,
|
||||
EVENT_HOMEASSISTANT_START,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_connect,
|
||||
async_dispatcher_send,
|
||||
)
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import bind_hass
|
||||
from homeassistant.util.signal_type import SignalType
|
||||
from homeassistant.util.system_info import is_official_image
|
||||
|
||||
DOMAIN = "ffmpeg"
|
||||
|
||||
SERVICE_START = "start"
|
||||
SERVICE_STOP = "stop"
|
||||
SERVICE_RESTART = "restart"
|
||||
|
||||
SIGNAL_FFMPEG_START = SignalType[list[str] | None]("ffmpeg.start")
|
||||
SIGNAL_FFMPEG_STOP = SignalType[list[str] | None]("ffmpeg.stop")
|
||||
SIGNAL_FFMPEG_RESTART = SignalType[list[str] | None]("ffmpeg.restart")
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
SIGNAL_FFMPEG_RESTART,
|
||||
SIGNAL_FFMPEG_START,
|
||||
SIGNAL_FFMPEG_STOP,
|
||||
)
|
||||
from .services import async_setup_services
|
||||
|
||||
DATA_FFMPEG = "ffmpeg"
|
||||
|
||||
@@ -63,8 +56,6 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
SERVICE_FFMPEG_SCHEMA = vol.Schema({vol.Optional(ATTR_ENTITY_ID): cv.entity_ids})
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the FFmpeg component."""
|
||||
@@ -74,29 +65,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
await manager.async_get_version()
|
||||
|
||||
# Register service
|
||||
async def async_service_handle(service: ServiceCall) -> None:
|
||||
"""Handle service ffmpeg process."""
|
||||
entity_ids: list[str] | None = service.data.get(ATTR_ENTITY_ID)
|
||||
|
||||
if service.service == SERVICE_START:
|
||||
async_dispatcher_send(hass, SIGNAL_FFMPEG_START, entity_ids)
|
||||
elif service.service == SERVICE_STOP:
|
||||
async_dispatcher_send(hass, SIGNAL_FFMPEG_STOP, entity_ids)
|
||||
else:
|
||||
async_dispatcher_send(hass, SIGNAL_FFMPEG_RESTART, entity_ids)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_START, async_service_handle, schema=SERVICE_FFMPEG_SCHEMA
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_STOP, async_service_handle, schema=SERVICE_FFMPEG_SCHEMA
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_RESTART, async_service_handle, schema=SERVICE_FFMPEG_SCHEMA
|
||||
)
|
||||
async_setup_services(hass)
|
||||
|
||||
hass.data[DATA_FFMPEG] = manager
|
||||
return True
|
||||
|
||||
9
homeassistant/components/ffmpeg/const.py
Normal file
9
homeassistant/components/ffmpeg/const.py
Normal file
@@ -0,0 +1,9 @@
|
||||
"""Support for FFmpeg."""
|
||||
|
||||
from homeassistant.util.signal_type import SignalType
|
||||
|
||||
DOMAIN = "ffmpeg"
|
||||
|
||||
SIGNAL_FFMPEG_START = SignalType[list[str] | None]("ffmpeg.start")
|
||||
SIGNAL_FFMPEG_STOP = SignalType[list[str] | None]("ffmpeg.stop")
|
||||
SIGNAL_FFMPEG_RESTART = SignalType[list[str] | None]("ffmpeg.restart")
|
||||
51
homeassistant/components/ffmpeg/services.py
Normal file
51
homeassistant/components/ffmpeg/services.py
Normal file
@@ -0,0 +1,51 @@
|
||||
"""Support for FFmpeg."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import ATTR_ENTITY_ID
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
SIGNAL_FFMPEG_RESTART,
|
||||
SIGNAL_FFMPEG_START,
|
||||
SIGNAL_FFMPEG_STOP,
|
||||
)
|
||||
|
||||
SERVICE_START = "start"
|
||||
SERVICE_STOP = "stop"
|
||||
SERVICE_RESTART = "restart"
|
||||
|
||||
SERVICE_FFMPEG_SCHEMA = vol.Schema({vol.Optional(ATTR_ENTITY_ID): cv.entity_ids})
|
||||
|
||||
|
||||
async def _async_service_handle(service: ServiceCall) -> None:
|
||||
"""Handle service ffmpeg process."""
|
||||
entity_ids: list[str] | None = service.data.get(ATTR_ENTITY_ID)
|
||||
|
||||
if service.service == SERVICE_START:
|
||||
async_dispatcher_send(service.hass, SIGNAL_FFMPEG_START, entity_ids)
|
||||
elif service.service == SERVICE_STOP:
|
||||
async_dispatcher_send(service.hass, SIGNAL_FFMPEG_STOP, entity_ids)
|
||||
else:
|
||||
async_dispatcher_send(service.hass, SIGNAL_FFMPEG_RESTART, entity_ids)
|
||||
|
||||
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register FFmpeg services."""
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_START, _async_service_handle, schema=SERVICE_FFMPEG_SCHEMA
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_STOP, _async_service_handle, schema=SERVICE_FFMPEG_SCHEMA
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_RESTART, _async_service_handle, schema=SERVICE_FFMPEG_SCHEMA
|
||||
)
|
||||
@@ -28,45 +28,36 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up the Fibaro covers."""
|
||||
controller = entry.runtime_data
|
||||
async_add_entities(
|
||||
[FibaroCover(device) for device in controller.fibaro_devices[Platform.COVER]],
|
||||
True,
|
||||
)
|
||||
|
||||
entities: list[FibaroEntity] = []
|
||||
for device in controller.fibaro_devices[Platform.COVER]:
|
||||
# Positionable covers report the position over value
|
||||
if device.value.has_value:
|
||||
entities.append(PositionableFibaroCover(device))
|
||||
else:
|
||||
entities.append(FibaroCover(device))
|
||||
async_add_entities(entities, True)
|
||||
|
||||
|
||||
class FibaroCover(FibaroEntity, CoverEntity):
|
||||
"""Representation a Fibaro Cover."""
|
||||
class PositionableFibaroCover(FibaroEntity, CoverEntity):
|
||||
"""Representation of a fibaro cover which supports positioning."""
|
||||
|
||||
def __init__(self, fibaro_device: DeviceModel) -> None:
|
||||
"""Initialize the Vera device."""
|
||||
"""Initialize the device."""
|
||||
super().__init__(fibaro_device)
|
||||
self.entity_id = ENTITY_ID_FORMAT.format(self.ha_id)
|
||||
|
||||
if self._is_open_close_only():
|
||||
self._attr_supported_features = (
|
||||
CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE
|
||||
)
|
||||
if "stop" in self.fibaro_device.actions:
|
||||
self._attr_supported_features |= CoverEntityFeature.STOP
|
||||
|
||||
@staticmethod
|
||||
def bound(position):
|
||||
def bound(position: int | None) -> int | None:
|
||||
"""Normalize the position."""
|
||||
if position is None:
|
||||
return None
|
||||
position = int(position)
|
||||
if position <= 5:
|
||||
return 0
|
||||
if position >= 95:
|
||||
return 100
|
||||
return position
|
||||
|
||||
def _is_open_close_only(self) -> bool:
|
||||
"""Return if only open / close is supported."""
|
||||
# Normally positionable devices report the position over value,
|
||||
# so if it is missing we have a device which supports open / close only
|
||||
return not self.fibaro_device.value.has_value
|
||||
|
||||
def update(self) -> None:
|
||||
"""Update the state."""
|
||||
super().update()
|
||||
@@ -74,20 +65,15 @@ class FibaroCover(FibaroEntity, CoverEntity):
|
||||
self._attr_current_cover_position = self.bound(self.level)
|
||||
self._attr_current_cover_tilt_position = self.bound(self.level2)
|
||||
|
||||
device_state = self.fibaro_device.state
|
||||
|
||||
# Be aware that opening and closing is only available for some modern
|
||||
# devices.
|
||||
# For example the Fibaro Roller Shutter 4 reports this correctly.
|
||||
if device_state.has_value:
|
||||
self._attr_is_opening = device_state.str_value().lower() == "opening"
|
||||
self._attr_is_closing = device_state.str_value().lower() == "closing"
|
||||
device_state = self.fibaro_device.state.str_value(default="").lower()
|
||||
self._attr_is_opening = device_state == "opening"
|
||||
self._attr_is_closing = device_state == "closing"
|
||||
|
||||
closed: bool | None = None
|
||||
if self._is_open_close_only():
|
||||
if device_state.has_value and device_state.str_value().lower() != "unknown":
|
||||
closed = device_state.str_value().lower() == "closed"
|
||||
elif self.current_cover_position is not None:
|
||||
if self.current_cover_position is not None:
|
||||
closed = self.current_cover_position == 0
|
||||
self._attr_is_closed = closed
|
||||
|
||||
@@ -96,7 +82,7 @@ class FibaroCover(FibaroEntity, CoverEntity):
|
||||
self.set_level(cast(int, kwargs.get(ATTR_POSITION)))
|
||||
|
||||
def set_cover_tilt_position(self, **kwargs: Any) -> None:
|
||||
"""Move the cover to a specific position."""
|
||||
"""Move the slats to a specific position."""
|
||||
self.set_level2(cast(int, kwargs.get(ATTR_TILT_POSITION)))
|
||||
|
||||
def open_cover(self, **kwargs: Any) -> None:
|
||||
@@ -118,3 +104,62 @@ class FibaroCover(FibaroEntity, CoverEntity):
|
||||
def stop_cover(self, **kwargs: Any) -> None:
|
||||
"""Stop the cover."""
|
||||
self.action("stop")
|
||||
|
||||
|
||||
class FibaroCover(FibaroEntity, CoverEntity):
|
||||
"""Representation of a fibaro cover which supports only open / close commands."""
|
||||
|
||||
def __init__(self, fibaro_device: DeviceModel) -> None:
|
||||
"""Initialize the device."""
|
||||
super().__init__(fibaro_device)
|
||||
self.entity_id = ENTITY_ID_FORMAT.format(self.ha_id)
|
||||
|
||||
self._attr_supported_features = (
|
||||
CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE
|
||||
)
|
||||
if "stop" in self.fibaro_device.actions:
|
||||
self._attr_supported_features |= CoverEntityFeature.STOP
|
||||
if "rotateSlatsUp" in self.fibaro_device.actions:
|
||||
self._attr_supported_features |= CoverEntityFeature.OPEN_TILT
|
||||
if "rotateSlatsDown" in self.fibaro_device.actions:
|
||||
self._attr_supported_features |= CoverEntityFeature.CLOSE_TILT
|
||||
if "stopSlats" in self.fibaro_device.actions:
|
||||
self._attr_supported_features |= CoverEntityFeature.STOP_TILT
|
||||
|
||||
def update(self) -> None:
|
||||
"""Update the state."""
|
||||
super().update()
|
||||
|
||||
device_state = self.fibaro_device.state.str_value(default="").lower()
|
||||
|
||||
self._attr_is_opening = device_state == "opening"
|
||||
self._attr_is_closing = device_state == "closing"
|
||||
|
||||
closed: bool | None = None
|
||||
if device_state not in {"", "unknown"}:
|
||||
closed = device_state == "closed"
|
||||
self._attr_is_closed = closed
|
||||
|
||||
def open_cover(self, **kwargs: Any) -> None:
|
||||
"""Open the cover."""
|
||||
self.action("open")
|
||||
|
||||
def close_cover(self, **kwargs: Any) -> None:
|
||||
"""Close the cover."""
|
||||
self.action("close")
|
||||
|
||||
def stop_cover(self, **kwargs: Any) -> None:
|
||||
"""Stop the cover."""
|
||||
self.action("stop")
|
||||
|
||||
def open_cover_tilt(self, **kwargs: Any) -> None:
|
||||
"""Open the cover slats."""
|
||||
self.action("rotateSlatsUp")
|
||||
|
||||
def close_cover_tilt(self, **kwargs: Any) -> None:
|
||||
"""Close the cover slats."""
|
||||
self.action("rotateSlatsDown")
|
||||
|
||||
def stop_cover_tilt(self, **kwargs: Any) -> None:
|
||||
"""Stop the cover slats turning."""
|
||||
self.action("stopSlats")
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20250531.3"]
|
||||
"requirements": ["home-assistant-frontend==20250531.2"]
|
||||
}
|
||||
|
||||
@@ -27,7 +27,7 @@ CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up Fully Kiosk Browser."""
|
||||
|
||||
await async_setup_services(hass)
|
||||
async_setup_services(hass)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
|
||||
from homeassistant.const import ATTR_DEVICE_ID
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
|
||||
@@ -23,71 +23,73 @@ from .const import (
|
||||
from .coordinator import FullyKioskDataUpdateCoordinator
|
||||
|
||||
|
||||
async def async_setup_services(hass: HomeAssistant) -> None:
|
||||
async def _collect_coordinators(
|
||||
call: ServiceCall,
|
||||
) -> list[FullyKioskDataUpdateCoordinator]:
|
||||
device_ids: list[str] = call.data[ATTR_DEVICE_ID]
|
||||
config_entries = list[ConfigEntry]()
|
||||
registry = dr.async_get(call.hass)
|
||||
for target in device_ids:
|
||||
device = registry.async_get(target)
|
||||
if device:
|
||||
device_entries = list[ConfigEntry]()
|
||||
for entry_id in device.config_entries:
|
||||
entry = call.hass.config_entries.async_get_entry(entry_id)
|
||||
if entry and entry.domain == DOMAIN:
|
||||
device_entries.append(entry)
|
||||
if not device_entries:
|
||||
raise HomeAssistantError(f"Device '{target}' is not a {DOMAIN} device")
|
||||
config_entries.extend(device_entries)
|
||||
else:
|
||||
raise HomeAssistantError(f"Device '{target}' not found in device registry")
|
||||
coordinators = list[FullyKioskDataUpdateCoordinator]()
|
||||
for config_entry in config_entries:
|
||||
if config_entry.state != ConfigEntryState.LOADED:
|
||||
raise HomeAssistantError(f"{config_entry.title} is not loaded")
|
||||
coordinators.append(config_entry.runtime_data)
|
||||
return coordinators
|
||||
|
||||
|
||||
async def _async_load_url(call: ServiceCall) -> None:
|
||||
"""Load a URL on the Fully Kiosk Browser."""
|
||||
for coordinator in await _collect_coordinators(call):
|
||||
await coordinator.fully.loadUrl(call.data[ATTR_URL])
|
||||
|
||||
|
||||
async def _async_start_app(call: ServiceCall) -> None:
|
||||
"""Start an app on the device."""
|
||||
for coordinator in await _collect_coordinators(call):
|
||||
await coordinator.fully.startApplication(call.data[ATTR_APPLICATION])
|
||||
|
||||
|
||||
async def _async_set_config(call: ServiceCall) -> None:
|
||||
"""Set a Fully Kiosk Browser config value on the device."""
|
||||
for coordinator in await _collect_coordinators(call):
|
||||
key = call.data[ATTR_KEY]
|
||||
value = call.data[ATTR_VALUE]
|
||||
|
||||
# Fully API has different methods for setting string and bool values.
|
||||
# check if call.data[ATTR_VALUE] is a bool
|
||||
if isinstance(value, bool) or (
|
||||
isinstance(value, str) and value.lower() in ("true", "false")
|
||||
):
|
||||
await coordinator.fully.setConfigurationBool(key, value)
|
||||
else:
|
||||
# Convert any int values to string
|
||||
if isinstance(value, int):
|
||||
value = str(value)
|
||||
|
||||
await coordinator.fully.setConfigurationString(key, value)
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the services for the Fully Kiosk Browser integration."""
|
||||
|
||||
async def collect_coordinators(
|
||||
device_ids: list[str],
|
||||
) -> list[FullyKioskDataUpdateCoordinator]:
|
||||
config_entries = list[ConfigEntry]()
|
||||
registry = dr.async_get(hass)
|
||||
for target in device_ids:
|
||||
device = registry.async_get(target)
|
||||
if device:
|
||||
device_entries = list[ConfigEntry]()
|
||||
for entry_id in device.config_entries:
|
||||
entry = hass.config_entries.async_get_entry(entry_id)
|
||||
if entry and entry.domain == DOMAIN:
|
||||
device_entries.append(entry)
|
||||
if not device_entries:
|
||||
raise HomeAssistantError(
|
||||
f"Device '{target}' is not a {DOMAIN} device"
|
||||
)
|
||||
config_entries.extend(device_entries)
|
||||
else:
|
||||
raise HomeAssistantError(
|
||||
f"Device '{target}' not found in device registry"
|
||||
)
|
||||
coordinators = list[FullyKioskDataUpdateCoordinator]()
|
||||
for config_entry in config_entries:
|
||||
if config_entry.state != ConfigEntryState.LOADED:
|
||||
raise HomeAssistantError(f"{config_entry.title} is not loaded")
|
||||
coordinators.append(config_entry.runtime_data)
|
||||
return coordinators
|
||||
|
||||
async def async_load_url(call: ServiceCall) -> None:
|
||||
"""Load a URL on the Fully Kiosk Browser."""
|
||||
for coordinator in await collect_coordinators(call.data[ATTR_DEVICE_ID]):
|
||||
await coordinator.fully.loadUrl(call.data[ATTR_URL])
|
||||
|
||||
async def async_start_app(call: ServiceCall) -> None:
|
||||
"""Start an app on the device."""
|
||||
for coordinator in await collect_coordinators(call.data[ATTR_DEVICE_ID]):
|
||||
await coordinator.fully.startApplication(call.data[ATTR_APPLICATION])
|
||||
|
||||
async def async_set_config(call: ServiceCall) -> None:
|
||||
"""Set a Fully Kiosk Browser config value on the device."""
|
||||
for coordinator in await collect_coordinators(call.data[ATTR_DEVICE_ID]):
|
||||
key = call.data[ATTR_KEY]
|
||||
value = call.data[ATTR_VALUE]
|
||||
|
||||
# Fully API has different methods for setting string and bool values.
|
||||
# check if call.data[ATTR_VALUE] is a bool
|
||||
if isinstance(value, bool) or (
|
||||
isinstance(value, str) and value.lower() in ("true", "false")
|
||||
):
|
||||
await coordinator.fully.setConfigurationBool(key, value)
|
||||
else:
|
||||
# Convert any int values to string
|
||||
if isinstance(value, int):
|
||||
value = str(value)
|
||||
|
||||
await coordinator.fully.setConfigurationString(key, value)
|
||||
|
||||
# Register all the above services
|
||||
service_mapping = [
|
||||
(async_load_url, SERVICE_LOAD_URL, ATTR_URL),
|
||||
(async_start_app, SERVICE_START_APPLICATION, ATTR_APPLICATION),
|
||||
(_async_load_url, SERVICE_LOAD_URL, ATTR_URL),
|
||||
(_async_start_app, SERVICE_START_APPLICATION, ATTR_APPLICATION),
|
||||
]
|
||||
for service_handler, service_name, attrib in service_mapping:
|
||||
hass.services.async_register(
|
||||
@@ -107,7 +109,7 @@ async def async_setup_services(hass: HomeAssistant) -> None:
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_SET_CONFIG,
|
||||
async_set_config,
|
||||
_async_set_config,
|
||||
schema=vol.Schema(
|
||||
vol.All(
|
||||
{
|
||||
|
||||
@@ -10,7 +10,6 @@ from typing import Any
|
||||
import aiohttp
|
||||
from gcal_sync.api import GoogleCalendarService
|
||||
from gcal_sync.exceptions import ApiException, AuthException
|
||||
from gcal_sync.model import DateOrDatetime, Event
|
||||
import voluptuous as vol
|
||||
import yaml
|
||||
|
||||
@@ -21,32 +20,14 @@ from homeassistant.const import (
|
||||
CONF_OFFSET,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.exceptions import (
|
||||
ConfigEntryAuthFailed,
|
||||
ConfigEntryNotReady,
|
||||
HomeAssistantError,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_entry_oauth2_flow, config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.entity import generate_entity_id
|
||||
|
||||
from .api import ApiAuthImpl, get_feature_access
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
EVENT_DESCRIPTION,
|
||||
EVENT_END_DATE,
|
||||
EVENT_END_DATETIME,
|
||||
EVENT_IN,
|
||||
EVENT_IN_DAYS,
|
||||
EVENT_IN_WEEKS,
|
||||
EVENT_LOCATION,
|
||||
EVENT_START_DATE,
|
||||
EVENT_START_DATETIME,
|
||||
EVENT_SUMMARY,
|
||||
EVENT_TYPES_CONF,
|
||||
FeatureAccess,
|
||||
)
|
||||
from .const import DOMAIN
|
||||
from .store import GoogleConfigEntry, GoogleRuntimeData, LocalCalendarStore
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -63,10 +44,6 @@ CONF_MAX_RESULTS = "max_results"
|
||||
|
||||
DEFAULT_CONF_OFFSET = "!!"
|
||||
|
||||
EVENT_CALENDAR_ID = "calendar_id"
|
||||
|
||||
SERVICE_ADD_EVENT = "add_event"
|
||||
|
||||
YAML_DEVICES = f"{DOMAIN}_calendars.yaml"
|
||||
|
||||
PLATFORMS = [Platform.CALENDAR]
|
||||
@@ -100,41 +77,6 @@ DEVICE_SCHEMA = vol.Schema(
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
_EVENT_IN_TYPES = vol.Schema(
|
||||
{
|
||||
vol.Exclusive(EVENT_IN_DAYS, EVENT_TYPES_CONF): cv.positive_int,
|
||||
vol.Exclusive(EVENT_IN_WEEKS, EVENT_TYPES_CONF): cv.positive_int,
|
||||
}
|
||||
)
|
||||
|
||||
ADD_EVENT_SERVICE_SCHEMA = vol.All(
|
||||
cv.has_at_least_one_key(EVENT_START_DATE, EVENT_START_DATETIME, EVENT_IN),
|
||||
cv.has_at_most_one_key(EVENT_START_DATE, EVENT_START_DATETIME, EVENT_IN),
|
||||
{
|
||||
vol.Required(EVENT_CALENDAR_ID): cv.string,
|
||||
vol.Required(EVENT_SUMMARY): cv.string,
|
||||
vol.Optional(EVENT_DESCRIPTION, default=""): cv.string,
|
||||
vol.Optional(EVENT_LOCATION, default=""): cv.string,
|
||||
vol.Inclusive(
|
||||
EVENT_START_DATE, "dates", "Start and end dates must both be specified"
|
||||
): cv.date,
|
||||
vol.Inclusive(
|
||||
EVENT_END_DATE, "dates", "Start and end dates must both be specified"
|
||||
): cv.date,
|
||||
vol.Inclusive(
|
||||
EVENT_START_DATETIME,
|
||||
"datetimes",
|
||||
"Start and end datetimes must both be specified",
|
||||
): cv.datetime,
|
||||
vol.Inclusive(
|
||||
EVENT_END_DATETIME,
|
||||
"datetimes",
|
||||
"Start and end datetimes must both be specified",
|
||||
): cv.datetime,
|
||||
vol.Optional(EVENT_IN): _EVENT_IN_TYPES,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: GoogleConfigEntry) -> bool:
|
||||
"""Set up Google from a config entry."""
|
||||
@@ -190,10 +132,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: GoogleConfigEntry) -> bo
|
||||
|
||||
hass.config_entries.async_update_entry(entry, unique_id=primary_calendar.id)
|
||||
|
||||
# Only expose the add event service if we have the correct permissions
|
||||
if get_feature_access(entry) is FeatureAccess.read_write:
|
||||
await async_setup_add_event_service(hass, calendar_service)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
entry.async_on_unload(entry.add_update_listener(async_reload_entry))
|
||||
@@ -225,79 +163,6 @@ async def async_remove_entry(hass: HomeAssistant, entry: GoogleConfigEntry) -> N
|
||||
await store.async_remove()
|
||||
|
||||
|
||||
async def async_setup_add_event_service(
|
||||
hass: HomeAssistant,
|
||||
calendar_service: GoogleCalendarService,
|
||||
) -> None:
|
||||
"""Add the service to add events."""
|
||||
|
||||
async def _add_event(call: ServiceCall) -> None:
|
||||
"""Add a new event to calendar."""
|
||||
_LOGGER.warning(
|
||||
"The Google Calendar add_event service has been deprecated, and "
|
||||
"will be removed in a future Home Assistant release. Please move "
|
||||
"calls to the create_event service"
|
||||
)
|
||||
|
||||
start: DateOrDatetime | None = None
|
||||
end: DateOrDatetime | None = None
|
||||
|
||||
if EVENT_IN in call.data:
|
||||
if EVENT_IN_DAYS in call.data[EVENT_IN]:
|
||||
now = datetime.now()
|
||||
|
||||
start_in = now + timedelta(days=call.data[EVENT_IN][EVENT_IN_DAYS])
|
||||
end_in = start_in + timedelta(days=1)
|
||||
|
||||
start = DateOrDatetime(date=start_in)
|
||||
end = DateOrDatetime(date=end_in)
|
||||
|
||||
elif EVENT_IN_WEEKS in call.data[EVENT_IN]:
|
||||
now = datetime.now()
|
||||
|
||||
start_in = now + timedelta(weeks=call.data[EVENT_IN][EVENT_IN_WEEKS])
|
||||
end_in = start_in + timedelta(days=1)
|
||||
|
||||
start = DateOrDatetime(date=start_in)
|
||||
end = DateOrDatetime(date=end_in)
|
||||
|
||||
elif EVENT_START_DATE in call.data and EVENT_END_DATE in call.data:
|
||||
start = DateOrDatetime(date=call.data[EVENT_START_DATE])
|
||||
end = DateOrDatetime(date=call.data[EVENT_END_DATE])
|
||||
|
||||
elif EVENT_START_DATETIME in call.data and EVENT_END_DATETIME in call.data:
|
||||
start_dt = call.data[EVENT_START_DATETIME]
|
||||
end_dt = call.data[EVENT_END_DATETIME]
|
||||
start = DateOrDatetime(
|
||||
date_time=start_dt, timezone=str(hass.config.time_zone)
|
||||
)
|
||||
end = DateOrDatetime(date_time=end_dt, timezone=str(hass.config.time_zone))
|
||||
|
||||
if start is None or end is None:
|
||||
raise ValueError(
|
||||
"Missing required fields to set start or end date/datetime"
|
||||
)
|
||||
event = Event(
|
||||
summary=call.data[EVENT_SUMMARY],
|
||||
description=call.data[EVENT_DESCRIPTION],
|
||||
start=start,
|
||||
end=end,
|
||||
)
|
||||
if location := call.data.get(EVENT_LOCATION):
|
||||
event.location = location
|
||||
try:
|
||||
await calendar_service.async_create_event(
|
||||
call.data[EVENT_CALENDAR_ID],
|
||||
event,
|
||||
)
|
||||
except ApiException as err:
|
||||
raise HomeAssistantError(str(err)) from err
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_ADD_EVENT, _add_event, schema=ADD_EVENT_SERVICE_SCHEMA
|
||||
)
|
||||
|
||||
|
||||
def get_calendar_info(
|
||||
hass: HomeAssistant, calendar: Mapping[str, Any]
|
||||
) -> dict[str, Any]:
|
||||
|
||||
@@ -2,21 +2,13 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
|
||||
import aiohttp
|
||||
from gassist_text import TextAssistant
|
||||
from google.oauth2.credentials import Credentials
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import conversation
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_NAME, Platform
|
||||
from homeassistant.core import (
|
||||
HomeAssistant,
|
||||
ServiceCall,
|
||||
ServiceResponse,
|
||||
SupportsResponse,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv, discovery, intent
|
||||
from homeassistant.helpers.config_entry_oauth2_flow import (
|
||||
@@ -31,21 +23,9 @@ from .helpers import (
|
||||
GoogleAssistantSDKConfigEntry,
|
||||
GoogleAssistantSDKRuntimeData,
|
||||
InMemoryStorage,
|
||||
async_send_text_commands,
|
||||
best_matching_language_code,
|
||||
)
|
||||
|
||||
SERVICE_SEND_TEXT_COMMAND = "send_text_command"
|
||||
SERVICE_SEND_TEXT_COMMAND_FIELD_COMMAND = "command"
|
||||
SERVICE_SEND_TEXT_COMMAND_FIELD_MEDIA_PLAYER = "media_player"
|
||||
SERVICE_SEND_TEXT_COMMAND_SCHEMA = vol.All(
|
||||
{
|
||||
vol.Required(SERVICE_SEND_TEXT_COMMAND_FIELD_COMMAND): vol.All(
|
||||
cv.ensure_list, [vol.All(str, vol.Length(min=1))]
|
||||
),
|
||||
vol.Optional(SERVICE_SEND_TEXT_COMMAND_FIELD_MEDIA_PLAYER): cv.comp_entity_ids,
|
||||
},
|
||||
)
|
||||
from .services import async_setup_services
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
@@ -58,6 +38,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
)
|
||||
)
|
||||
|
||||
async_setup_services(hass)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -81,8 +63,6 @@ async def async_setup_entry(
|
||||
mem_storage = InMemoryStorage(hass)
|
||||
hass.http.register_view(GoogleAssistantSDKAudioView(mem_storage))
|
||||
|
||||
await async_setup_service(hass)
|
||||
|
||||
entry.runtime_data = GoogleAssistantSDKRuntimeData(
|
||||
session=session, mem_storage=mem_storage
|
||||
)
|
||||
@@ -105,36 +85,6 @@ async def async_unload_entry(
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_service(hass: HomeAssistant) -> None:
|
||||
"""Add the services for Google Assistant SDK."""
|
||||
|
||||
async def send_text_command(call: ServiceCall) -> ServiceResponse:
|
||||
"""Send a text command to Google Assistant SDK."""
|
||||
commands: list[str] = call.data[SERVICE_SEND_TEXT_COMMAND_FIELD_COMMAND]
|
||||
media_players: list[str] | None = call.data.get(
|
||||
SERVICE_SEND_TEXT_COMMAND_FIELD_MEDIA_PLAYER
|
||||
)
|
||||
command_response_list = await async_send_text_commands(
|
||||
hass, commands, media_players
|
||||
)
|
||||
if call.return_response:
|
||||
return {
|
||||
"responses": [
|
||||
dataclasses.asdict(command_response)
|
||||
for command_response in command_response_list
|
||||
]
|
||||
}
|
||||
return None
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_SEND_TEXT_COMMAND,
|
||||
send_text_command,
|
||||
schema=SERVICE_SEND_TEXT_COMMAND_SCHEMA,
|
||||
supports_response=SupportsResponse.OPTIONAL,
|
||||
)
|
||||
|
||||
|
||||
class GoogleAssistantConversationAgent(conversation.AbstractConversationAgent):
|
||||
"""Google Assistant SDK conversation agent."""
|
||||
|
||||
|
||||
61
homeassistant/components/google_assistant_sdk/services.py
Normal file
61
homeassistant/components/google_assistant_sdk/services.py
Normal file
@@ -0,0 +1,61 @@
|
||||
"""Support for Google Assistant SDK."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import (
|
||||
HomeAssistant,
|
||||
ServiceCall,
|
||||
ServiceResponse,
|
||||
SupportsResponse,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .const import DOMAIN
|
||||
from .helpers import async_send_text_commands
|
||||
|
||||
SERVICE_SEND_TEXT_COMMAND = "send_text_command"
|
||||
SERVICE_SEND_TEXT_COMMAND_FIELD_COMMAND = "command"
|
||||
SERVICE_SEND_TEXT_COMMAND_FIELD_MEDIA_PLAYER = "media_player"
|
||||
SERVICE_SEND_TEXT_COMMAND_SCHEMA = vol.All(
|
||||
{
|
||||
vol.Required(SERVICE_SEND_TEXT_COMMAND_FIELD_COMMAND): vol.All(
|
||||
cv.ensure_list, [vol.All(str, vol.Length(min=1))]
|
||||
),
|
||||
vol.Optional(SERVICE_SEND_TEXT_COMMAND_FIELD_MEDIA_PLAYER): cv.comp_entity_ids,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def _send_text_command(call: ServiceCall) -> ServiceResponse:
|
||||
"""Send a text command to Google Assistant SDK."""
|
||||
commands: list[str] = call.data[SERVICE_SEND_TEXT_COMMAND_FIELD_COMMAND]
|
||||
media_players: list[str] | None = call.data.get(
|
||||
SERVICE_SEND_TEXT_COMMAND_FIELD_MEDIA_PLAYER
|
||||
)
|
||||
command_response_list = await async_send_text_commands(
|
||||
call.hass, commands, media_players
|
||||
)
|
||||
if call.return_response:
|
||||
return {
|
||||
"responses": [
|
||||
dataclasses.asdict(command_response)
|
||||
for command_response in command_response_list
|
||||
]
|
||||
}
|
||||
return None
|
||||
|
||||
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Add the services for Google Assistant SDK."""
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_SEND_TEXT_COMMAND,
|
||||
_send_text_command,
|
||||
schema=SERVICE_SEND_TEXT_COMMAND_SCHEMA,
|
||||
supports_response=SupportsResponse.OPTIONAL,
|
||||
)
|
||||
@@ -24,9 +24,11 @@ CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Google Mail platform."""
|
||||
"""Set up the Google Mail integration."""
|
||||
hass.data.setdefault(DOMAIN, {})[DATA_HASS_CONFIG] = config
|
||||
|
||||
async_setup_services(hass)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -52,8 +54,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: GoogleMailConfigEntry) -
|
||||
entry, [platform for platform in PLATFORMS if platform != Platform.NOTIFY]
|
||||
)
|
||||
|
||||
await async_setup_services(hass)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ from typing import TYPE_CHECKING
|
||||
from googleapiclient.http import HttpRequest
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.service import async_extract_config_entry_ids
|
||||
|
||||
@@ -46,56 +46,57 @@ SERVICE_VACATION_SCHEMA = vol.All(
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_services(hass: HomeAssistant) -> None:
|
||||
async def _extract_gmail_config_entries(
|
||||
call: ServiceCall,
|
||||
) -> list[GoogleMailConfigEntry]:
|
||||
return [
|
||||
entry
|
||||
for entry_id in await async_extract_config_entry_ids(call.hass, call)
|
||||
if (entry := call.hass.config_entries.async_get_entry(entry_id))
|
||||
and entry.domain == DOMAIN
|
||||
]
|
||||
|
||||
|
||||
async def _gmail_service(call: ServiceCall) -> None:
|
||||
"""Call Google Mail service."""
|
||||
for entry in await _extract_gmail_config_entries(call):
|
||||
try:
|
||||
auth = entry.runtime_data
|
||||
except AttributeError as ex:
|
||||
raise ValueError(f"Config entry not loaded: {entry.entry_id}") from ex
|
||||
service = await auth.get_resource()
|
||||
|
||||
_settings = {
|
||||
"enableAutoReply": call.data[ATTR_ENABLED],
|
||||
"responseSubject": call.data.get(ATTR_TITLE),
|
||||
}
|
||||
if contacts := call.data.get(ATTR_RESTRICT_CONTACTS):
|
||||
_settings["restrictToContacts"] = contacts
|
||||
if domain := call.data.get(ATTR_RESTRICT_DOMAIN):
|
||||
_settings["restrictToDomain"] = domain
|
||||
if _date := call.data.get(ATTR_START):
|
||||
_dt = datetime.combine(_date, datetime.min.time())
|
||||
_settings["startTime"] = _dt.timestamp() * 1000
|
||||
if _date := call.data.get(ATTR_END):
|
||||
_dt = datetime.combine(_date, datetime.min.time())
|
||||
_settings["endTime"] = (_dt + timedelta(days=1)).timestamp() * 1000
|
||||
if call.data[ATTR_PLAIN_TEXT]:
|
||||
_settings["responseBodyPlainText"] = call.data[ATTR_MESSAGE]
|
||||
else:
|
||||
_settings["responseBodyHtml"] = call.data[ATTR_MESSAGE]
|
||||
settings: HttpRequest = (
|
||||
service.users().settings().updateVacation(userId=ATTR_ME, body=_settings)
|
||||
)
|
||||
await call.hass.async_add_executor_job(settings.execute)
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up services for Google Mail integration."""
|
||||
|
||||
async def extract_gmail_config_entries(
|
||||
call: ServiceCall,
|
||||
) -> list[GoogleMailConfigEntry]:
|
||||
return [
|
||||
entry
|
||||
for entry_id in await async_extract_config_entry_ids(hass, call)
|
||||
if (entry := hass.config_entries.async_get_entry(entry_id))
|
||||
and entry.domain == DOMAIN
|
||||
]
|
||||
|
||||
async def gmail_service(call: ServiceCall) -> None:
|
||||
"""Call Google Mail service."""
|
||||
for entry in await extract_gmail_config_entries(call):
|
||||
try:
|
||||
auth = entry.runtime_data
|
||||
except AttributeError as ex:
|
||||
raise ValueError(f"Config entry not loaded: {entry.entry_id}") from ex
|
||||
service = await auth.get_resource()
|
||||
|
||||
_settings = {
|
||||
"enableAutoReply": call.data[ATTR_ENABLED],
|
||||
"responseSubject": call.data.get(ATTR_TITLE),
|
||||
}
|
||||
if contacts := call.data.get(ATTR_RESTRICT_CONTACTS):
|
||||
_settings["restrictToContacts"] = contacts
|
||||
if domain := call.data.get(ATTR_RESTRICT_DOMAIN):
|
||||
_settings["restrictToDomain"] = domain
|
||||
if _date := call.data.get(ATTR_START):
|
||||
_dt = datetime.combine(_date, datetime.min.time())
|
||||
_settings["startTime"] = _dt.timestamp() * 1000
|
||||
if _date := call.data.get(ATTR_END):
|
||||
_dt = datetime.combine(_date, datetime.min.time())
|
||||
_settings["endTime"] = (_dt + timedelta(days=1)).timestamp() * 1000
|
||||
if call.data[ATTR_PLAIN_TEXT]:
|
||||
_settings["responseBodyPlainText"] = call.data[ATTR_MESSAGE]
|
||||
else:
|
||||
_settings["responseBodyHtml"] = call.data[ATTR_MESSAGE]
|
||||
settings: HttpRequest = (
|
||||
service.users()
|
||||
.settings()
|
||||
.updateVacation(userId=ATTR_ME, body=_settings)
|
||||
)
|
||||
await hass.async_add_executor_job(settings.execute)
|
||||
|
||||
hass.services.async_register(
|
||||
domain=DOMAIN,
|
||||
service=SERVICE_SET_VACATION,
|
||||
schema=SERVICE_VACATION_SCHEMA,
|
||||
service_func=gmail_service,
|
||||
service_func=_gmail_service,
|
||||
)
|
||||
|
||||
@@ -7,17 +7,26 @@ from google_photos_library_api.api import GooglePhotosLibraryApi
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_entry_oauth2_flow
|
||||
from homeassistant.helpers import config_entry_oauth2_flow, config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from . import api
|
||||
from .const import DOMAIN
|
||||
from .coordinator import GooglePhotosConfigEntry, GooglePhotosUpdateCoordinator
|
||||
from .services import async_register_services
|
||||
from .services import async_setup_services
|
||||
|
||||
__all__ = [
|
||||
"DOMAIN",
|
||||
]
|
||||
__all__ = ["DOMAIN"]
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up Google Photos integration."""
|
||||
|
||||
async_setup_services(hass)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -48,8 +57,6 @@ async def async_setup_entry(
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
async_register_services(hass)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -77,7 +77,7 @@ def _read_file_contents(
|
||||
return results
|
||||
|
||||
|
||||
def async_register_services(hass: HomeAssistant) -> None:
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register Google Photos services."""
|
||||
|
||||
async def async_handle_upload(call: ServiceCall) -> ServiceResponse:
|
||||
@@ -152,11 +152,10 @@ def async_register_services(hass: HomeAssistant) -> None:
|
||||
}
|
||||
return None
|
||||
|
||||
if not hass.services.has_service(DOMAIN, UPLOAD_SERVICE):
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
UPLOAD_SERVICE,
|
||||
async_handle_upload,
|
||||
schema=UPLOAD_SERVICE_SCHEMA,
|
||||
supports_response=SupportsResponse.OPTIONAL,
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
UPLOAD_SERVICE,
|
||||
async_handle_upload,
|
||||
schema=UPLOAD_SERVICE_SCHEMA,
|
||||
supports_response=SupportsResponse.OPTIONAL,
|
||||
)
|
||||
|
||||
@@ -2,48 +2,33 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
import aiohttp
|
||||
from google.auth.exceptions import RefreshError
|
||||
from google.oauth2.credentials import Credentials
|
||||
from gspread import Client
|
||||
from gspread.exceptions import APIError
|
||||
from gspread.utils import ValueInputOption
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.exceptions import (
|
||||
ConfigEntryAuthFailed,
|
||||
ConfigEntryNotReady,
|
||||
HomeAssistantError,
|
||||
)
|
||||
from homeassistant.const import CONF_TOKEN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.config_entry_oauth2_flow import (
|
||||
OAuth2Session,
|
||||
async_get_config_entry_implementation,
|
||||
)
|
||||
from homeassistant.helpers.selector import ConfigEntrySelector
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DEFAULT_ACCESS, DOMAIN
|
||||
from .services import async_setup_services
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
type GoogleSheetsConfigEntry = ConfigEntry[OAuth2Session]
|
||||
|
||||
DATA = "data"
|
||||
DATA_CONFIG_ENTRY = "config_entry"
|
||||
WORKSHEET = "worksheet"
|
||||
|
||||
SERVICE_APPEND_SHEET = "append_sheet"
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Activate the Google Sheets component."""
|
||||
|
||||
SHEET_SERVICE_SCHEMA = vol.All(
|
||||
{
|
||||
vol.Required(DATA_CONFIG_ENTRY): ConfigEntrySelector({"integration": DOMAIN}),
|
||||
vol.Optional(WORKSHEET): cv.string,
|
||||
vol.Required(DATA): vol.Any(cv.ensure_list, [dict]),
|
||||
},
|
||||
)
|
||||
async_setup_services(hass)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -67,8 +52,6 @@ async def async_setup_entry(
|
||||
raise ConfigEntryAuthFailed("Required scopes are not present, reauth required")
|
||||
entry.runtime_data = session
|
||||
|
||||
await async_setup_service(hass)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -81,55 +64,4 @@ async def async_unload_entry(
|
||||
hass: HomeAssistant, entry: GoogleSheetsConfigEntry
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
if not hass.config_entries.async_loaded_entries(DOMAIN):
|
||||
for service_name in hass.services.async_services_for_domain(DOMAIN):
|
||||
hass.services.async_remove(DOMAIN, service_name)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_service(hass: HomeAssistant) -> None:
|
||||
"""Add the services for Google Sheets."""
|
||||
|
||||
def _append_to_sheet(call: ServiceCall, entry: GoogleSheetsConfigEntry) -> None:
|
||||
"""Run append in the executor."""
|
||||
service = Client(Credentials(entry.data[CONF_TOKEN][CONF_ACCESS_TOKEN])) # type: ignore[no-untyped-call]
|
||||
try:
|
||||
sheet = service.open_by_key(entry.unique_id)
|
||||
except RefreshError:
|
||||
entry.async_start_reauth(hass)
|
||||
raise
|
||||
except APIError as ex:
|
||||
raise HomeAssistantError("Failed to write data") from ex
|
||||
|
||||
worksheet = sheet.worksheet(call.data.get(WORKSHEET, sheet.sheet1.title))
|
||||
columns: list[str] = next(iter(worksheet.get_values("A1:ZZ1")), [])
|
||||
now = str(datetime.now())
|
||||
rows = []
|
||||
for d in call.data[DATA]:
|
||||
row_data = {"created": now} | d
|
||||
row = [row_data.get(column, "") for column in columns]
|
||||
for key, value in row_data.items():
|
||||
if key not in columns:
|
||||
columns.append(key)
|
||||
worksheet.update_cell(1, len(columns), key)
|
||||
row.append(value)
|
||||
rows.append(row)
|
||||
worksheet.append_rows(rows, value_input_option=ValueInputOption.user_entered)
|
||||
|
||||
async def append_to_sheet(call: ServiceCall) -> None:
|
||||
"""Append new line of data to a Google Sheets document."""
|
||||
entry: GoogleSheetsConfigEntry | None = hass.config_entries.async_get_entry(
|
||||
call.data[DATA_CONFIG_ENTRY]
|
||||
)
|
||||
if not entry or not hasattr(entry, "runtime_data"):
|
||||
raise ValueError(f"Invalid config entry: {call.data[DATA_CONFIG_ENTRY]}")
|
||||
await entry.runtime_data.async_ensure_token_valid()
|
||||
await hass.async_add_executor_job(_append_to_sheet, call, entry)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_APPEND_SHEET,
|
||||
append_to_sheet,
|
||||
schema=SHEET_SERVICE_SCHEMA,
|
||||
)
|
||||
|
||||
87
homeassistant/components/google_sheets/services.py
Normal file
87
homeassistant/components/google_sheets/services.py
Normal file
@@ -0,0 +1,87 @@
|
||||
"""Support for Google Sheets."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from google.auth.exceptions import RefreshError
|
||||
from google.oauth2.credentials import Credentials
|
||||
from gspread import Client
|
||||
from gspread.exceptions import APIError
|
||||
from gspread.utils import ValueInputOption
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.selector import ConfigEntrySelector
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import GoogleSheetsConfigEntry
|
||||
|
||||
DATA = "data"
|
||||
DATA_CONFIG_ENTRY = "config_entry"
|
||||
WORKSHEET = "worksheet"
|
||||
|
||||
SERVICE_APPEND_SHEET = "append_sheet"
|
||||
|
||||
SHEET_SERVICE_SCHEMA = vol.All(
|
||||
{
|
||||
vol.Required(DATA_CONFIG_ENTRY): ConfigEntrySelector({"integration": DOMAIN}),
|
||||
vol.Optional(WORKSHEET): cv.string,
|
||||
vol.Required(DATA): vol.Any(cv.ensure_list, [dict]),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def _append_to_sheet(call: ServiceCall, entry: GoogleSheetsConfigEntry) -> None:
|
||||
"""Run append in the executor."""
|
||||
service = Client(Credentials(entry.data[CONF_TOKEN][CONF_ACCESS_TOKEN])) # type: ignore[no-untyped-call]
|
||||
try:
|
||||
sheet = service.open_by_key(entry.unique_id)
|
||||
except RefreshError:
|
||||
entry.async_start_reauth(call.hass)
|
||||
raise
|
||||
except APIError as ex:
|
||||
raise HomeAssistantError("Failed to write data") from ex
|
||||
|
||||
worksheet = sheet.worksheet(call.data.get(WORKSHEET, sheet.sheet1.title))
|
||||
columns: list[str] = next(iter(worksheet.get_values("A1:ZZ1")), [])
|
||||
now = str(datetime.now())
|
||||
rows = []
|
||||
for d in call.data[DATA]:
|
||||
row_data = {"created": now} | d
|
||||
row = [row_data.get(column, "") for column in columns]
|
||||
for key, value in row_data.items():
|
||||
if key not in columns:
|
||||
columns.append(key)
|
||||
worksheet.update_cell(1, len(columns), key)
|
||||
row.append(value)
|
||||
rows.append(row)
|
||||
worksheet.append_rows(rows, value_input_option=ValueInputOption.user_entered)
|
||||
|
||||
|
||||
async def _async_append_to_sheet(call: ServiceCall) -> None:
|
||||
"""Append new line of data to a Google Sheets document."""
|
||||
entry: GoogleSheetsConfigEntry | None = call.hass.config_entries.async_get_entry(
|
||||
call.data[DATA_CONFIG_ENTRY]
|
||||
)
|
||||
if not entry or not hasattr(entry, "runtime_data"):
|
||||
raise ValueError(f"Invalid config entry: {call.data[DATA_CONFIG_ENTRY]}")
|
||||
await entry.runtime_data.async_ensure_token_valid()
|
||||
await call.hass.async_add_executor_job(_append_to_sheet, call, entry)
|
||||
|
||||
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Add the services for Google Sheets."""
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_APPEND_SHEET,
|
||||
_async_append_to_sheet,
|
||||
schema=SHEET_SERVICE_SCHEMA,
|
||||
)
|
||||
@@ -55,7 +55,7 @@ from homeassistant.helpers.issue_registry import (
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, StateType
|
||||
|
||||
from .const import CONF_IGNORE_NON_NUMERIC, DOMAIN as GROUP_DOMAIN
|
||||
from .const import CONF_IGNORE_NON_NUMERIC, DOMAIN
|
||||
from .entity import GroupEntity
|
||||
|
||||
DEFAULT_NAME = "Sensor Group"
|
||||
@@ -509,7 +509,7 @@ class SensorGroup(GroupEntity, SensorEntity):
|
||||
return state_classes[0]
|
||||
async_create_issue(
|
||||
self.hass,
|
||||
GROUP_DOMAIN,
|
||||
DOMAIN,
|
||||
f"{self.entity_id}_state_classes_not_matching",
|
||||
is_fixable=False,
|
||||
is_persistent=False,
|
||||
@@ -566,7 +566,7 @@ class SensorGroup(GroupEntity, SensorEntity):
|
||||
return device_classes[0]
|
||||
async_create_issue(
|
||||
self.hass,
|
||||
GROUP_DOMAIN,
|
||||
DOMAIN,
|
||||
f"{self.entity_id}_device_classes_not_matching",
|
||||
is_fixable=False,
|
||||
is_persistent=False,
|
||||
@@ -654,7 +654,7 @@ class SensorGroup(GroupEntity, SensorEntity):
|
||||
if device_class:
|
||||
async_create_issue(
|
||||
self.hass,
|
||||
GROUP_DOMAIN,
|
||||
DOMAIN,
|
||||
f"{self.entity_id}_uoms_not_matching_device_class",
|
||||
is_fixable=False,
|
||||
is_persistent=False,
|
||||
@@ -670,7 +670,7 @@ class SensorGroup(GroupEntity, SensorEntity):
|
||||
else:
|
||||
async_create_issue(
|
||||
self.hass,
|
||||
GROUP_DOMAIN,
|
||||
DOMAIN,
|
||||
f"{self.entity_id}_uoms_not_matching_no_device_class",
|
||||
is_fixable=False,
|
||||
is_persistent=False,
|
||||
|
||||
@@ -144,5 +144,5 @@ class SupervisorEntityModel(StrEnum):
|
||||
ADDON = "Home Assistant Add-on"
|
||||
OS = "Home Assistant Operating System"
|
||||
CORE = "Home Assistant Core"
|
||||
SUPERVIOSR = "Home Assistant Supervisor"
|
||||
SUPERVISOR = "Home Assistant Supervisor"
|
||||
HOST = "Home Assistant Host"
|
||||
|
||||
@@ -261,7 +261,7 @@ def async_register_supervisor_in_dev_reg(
|
||||
params = DeviceInfo(
|
||||
identifiers={(DOMAIN, "supervisor")},
|
||||
manufacturer="Home Assistant",
|
||||
model=SupervisorEntityModel.SUPERVIOSR,
|
||||
model=SupervisorEntityModel.SUPERVISOR,
|
||||
sw_version=supervisor_dict[ATTR_VERSION],
|
||||
name="Home Assistant Supervisor",
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
|
||||
@@ -73,7 +73,9 @@ async def async_setup_entry(
|
||||
class HiveWaterHeater(HiveEntity, WaterHeaterEntity):
|
||||
"""Hive Water Heater Device."""
|
||||
|
||||
_attr_supported_features = WaterHeaterEntityFeature.OPERATION_MODE
|
||||
_attr_supported_features = (
|
||||
WaterHeaterEntityFeature.ON_OFF | WaterHeaterEntityFeature.OPERATION_MODE
|
||||
)
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_operation_list = SUPPORT_WATER_HEATER
|
||||
|
||||
|
||||
@@ -27,6 +27,7 @@ PLATFORMS = [
|
||||
Platform.NUMBER,
|
||||
Platform.SELECT,
|
||||
Platform.SENSOR,
|
||||
Platform.SIREN,
|
||||
Platform.SWITCH,
|
||||
Platform.VALVE,
|
||||
]
|
||||
|
||||
@@ -83,7 +83,7 @@ class HomeeClimate(HomeeNodeEntity, ClimateEntity):
|
||||
if ClimateEntityFeature.TURN_OFF in self.supported_features and (
|
||||
self._heating_mode is not None
|
||||
):
|
||||
if self._heating_mode.current_value == 0:
|
||||
if self._heating_mode.current_value == self._heating_mode.minimum:
|
||||
return HVACMode.OFF
|
||||
|
||||
return HVACMode.HEAT
|
||||
@@ -91,7 +91,10 @@ class HomeeClimate(HomeeNodeEntity, ClimateEntity):
|
||||
@property
|
||||
def hvac_action(self) -> HVACAction:
|
||||
"""Return the hvac action."""
|
||||
if self._heating_mode is not None and self._heating_mode.current_value == 0:
|
||||
if (
|
||||
self._heating_mode is not None
|
||||
and self._heating_mode.current_value == self._heating_mode.minimum
|
||||
):
|
||||
return HVACAction.OFF
|
||||
|
||||
if (
|
||||
@@ -110,10 +113,12 @@ class HomeeClimate(HomeeNodeEntity, ClimateEntity):
|
||||
if (
|
||||
ClimateEntityFeature.PRESET_MODE in self.supported_features
|
||||
and self._heating_mode is not None
|
||||
and self._heating_mode.current_value > 0
|
||||
and self._heating_mode.current_value > self._heating_mode.minimum
|
||||
):
|
||||
assert self._attr_preset_modes is not None
|
||||
return self._attr_preset_modes[int(self._heating_mode.current_value) - 1]
|
||||
return self._attr_preset_modes[
|
||||
int(self._heating_mode.current_value - self._heating_mode.minimum) - 1
|
||||
]
|
||||
|
||||
return PRESET_NONE
|
||||
|
||||
@@ -147,14 +152,16 @@ class HomeeClimate(HomeeNodeEntity, ClimateEntity):
|
||||
# Currently only HEAT and OFF are supported.
|
||||
assert self._heating_mode is not None
|
||||
await self.async_set_homee_value(
|
||||
self._heating_mode, float(hvac_mode == HVACMode.HEAT)
|
||||
self._heating_mode,
|
||||
(hvac_mode == HVACMode.HEAT) + self._heating_mode.minimum,
|
||||
)
|
||||
|
||||
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
||||
"""Set new target preset mode."""
|
||||
assert self._heating_mode is not None and self._attr_preset_modes is not None
|
||||
await self.async_set_homee_value(
|
||||
self._heating_mode, self._attr_preset_modes.index(preset_mode) + 1
|
||||
self._heating_mode,
|
||||
self._attr_preset_modes.index(preset_mode) + self._heating_mode.minimum + 1,
|
||||
)
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
@@ -168,12 +175,16 @@ class HomeeClimate(HomeeNodeEntity, ClimateEntity):
|
||||
async def async_turn_on(self) -> None:
|
||||
"""Turn the entity on."""
|
||||
assert self._heating_mode is not None
|
||||
await self.async_set_homee_value(self._heating_mode, 1)
|
||||
await self.async_set_homee_value(
|
||||
self._heating_mode, 1 + self._heating_mode.minimum
|
||||
)
|
||||
|
||||
async def async_turn_off(self) -> None:
|
||||
"""Turn the entity on."""
|
||||
assert self._heating_mode is not None
|
||||
await self.async_set_homee_value(self._heating_mode, 0)
|
||||
await self.async_set_homee_value(
|
||||
self._heating_mode, 0 + self._heating_mode.minimum
|
||||
)
|
||||
|
||||
|
||||
def get_climate_features(
|
||||
@@ -193,7 +204,10 @@ def get_climate_features(
|
||||
if attribute.maximum > 1:
|
||||
# Node supports more modes than off and heating.
|
||||
features |= ClimateEntityFeature.PRESET_MODE
|
||||
preset_modes.extend([PRESET_ECO, PRESET_BOOST, PRESET_MANUAL])
|
||||
if attribute.maximum < 5:
|
||||
preset_modes.extend([PRESET_ECO, PRESET_BOOST, PRESET_MANUAL])
|
||||
else:
|
||||
preset_modes.extend([PRESET_ECO])
|
||||
|
||||
if len(preset_modes) > 0:
|
||||
preset_modes.insert(0, PRESET_NONE)
|
||||
|
||||
@@ -83,3 +83,54 @@ class HomeeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
data_schema=AUTH_SCHEMA,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the reconfigure flow."""
|
||||
errors: dict[str, str] = {}
|
||||
reconfigure_entry = self._get_reconfigure_entry()
|
||||
|
||||
if user_input:
|
||||
self.homee = Homee(
|
||||
user_input[CONF_HOST],
|
||||
reconfigure_entry.data[CONF_USERNAME],
|
||||
reconfigure_entry.data[CONF_PASSWORD],
|
||||
)
|
||||
|
||||
try:
|
||||
await self.homee.get_access_token()
|
||||
except HomeeConnectionFailedException:
|
||||
errors["base"] = "cannot_connect"
|
||||
except HomeeAuthenticationFailedException:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
self.hass.loop.create_task(self.homee.run())
|
||||
await self.homee.wait_until_connected()
|
||||
self.homee.disconnect()
|
||||
await self.homee.wait_until_disconnected()
|
||||
|
||||
await self.async_set_unique_id(self.homee.settings.uid)
|
||||
self._abort_if_unique_id_mismatch(reason="wrong_hub")
|
||||
|
||||
_LOGGER.debug("Updated homee entry with ID %s", self.homee.settings.uid)
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reconfigure_entry(), data_updates=user_input
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_HOST, default=reconfigure_entry.data[CONF_HOST]
|
||||
): str
|
||||
}
|
||||
),
|
||||
description_placeholders={
|
||||
"name": reconfigure_entry.runtime_data.settings.uid
|
||||
},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
43
homeassistant/components/homee/diagnostics.py
Normal file
43
homeassistant/components/homee/diagnostics.py
Normal file
@@ -0,0 +1,43 @@
|
||||
"""Diagnostics for homee integration."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceEntry
|
||||
|
||||
from . import DOMAIN, HomeeConfigEntry
|
||||
|
||||
TO_REDACT = [CONF_PASSWORD, CONF_USERNAME, "latitude", "longitude", "wlan_ssid"]
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: HomeeConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
|
||||
return {
|
||||
"entry_data": async_redact_data(entry.data, TO_REDACT),
|
||||
"settings": async_redact_data(entry.runtime_data.settings.raw_data, TO_REDACT),
|
||||
"devices": [{"node": node.raw_data} for node in entry.runtime_data.nodes],
|
||||
}
|
||||
|
||||
|
||||
async def async_get_device_diagnostics(
|
||||
hass: HomeAssistant, entry: HomeeConfigEntry, device: DeviceEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a device."""
|
||||
|
||||
# Extract node_id from the device identifiers
|
||||
split_uid = next(
|
||||
identifier[1] for identifier in device.identifiers if identifier[0] == DOMAIN
|
||||
).split("-")
|
||||
# Homee hub itself only has MAC as identifier and a node_id of -1
|
||||
node_id = -1 if len(split_uid) < 2 else split_uid[1]
|
||||
|
||||
node = entry.runtime_data.get_node_by_id(int(node_id))
|
||||
assert node is not None
|
||||
return {
|
||||
"homee node": node.raw_data,
|
||||
}
|
||||
@@ -31,6 +31,22 @@ class HomeeNumberEntityDescription(NumberEntityDescription):
|
||||
|
||||
|
||||
NUMBER_DESCRIPTIONS = {
|
||||
AttributeType.BUTTON_BRIGHTNESS_ACTIVE: HomeeNumberEntityDescription(
|
||||
key="button_brightness_active",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
AttributeType.BUTTON_BRIGHTNESS_DIMMED: HomeeNumberEntityDescription(
|
||||
key="button_brightness_dimmed",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
AttributeType.DISPLAY_BRIGHTNESS_ACTIVE: HomeeNumberEntityDescription(
|
||||
key="display_brightness_active",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
AttributeType.DISPLAY_BRIGHTNESS_DIMMED: HomeeNumberEntityDescription(
|
||||
key="display_brightness_dimmed",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
AttributeType.DOWN_POSITION: HomeeNumberEntityDescription(
|
||||
key="down_position",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
@@ -48,6 +64,14 @@ NUMBER_DESCRIPTIONS = {
|
||||
key="endposition_configuration",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
AttributeType.EXTERNAL_TEMPERATURE_OFFSET: HomeeNumberEntityDescription(
|
||||
key="external_temperature_offset",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
AttributeType.FLOOR_TEMPERATURE_OFFSET: HomeeNumberEntityDescription(
|
||||
key="floor_temperature_offset",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
AttributeType.MOTION_ALARM_CANCELATION_DELAY: HomeeNumberEntityDescription(
|
||||
key="motion_alarm_cancelation_delay",
|
||||
device_class=NumberDeviceClass.DURATION,
|
||||
@@ -83,6 +107,11 @@ NUMBER_DESCRIPTIONS = {
|
||||
key="temperature_offset",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
AttributeType.TEMPERATURE_REPORT_INTERVAL: HomeeNumberEntityDescription(
|
||||
key="temperature_report_interval",
|
||||
device_class=NumberDeviceClass.DURATION,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
AttributeType.UP_TIME: HomeeNumberEntityDescription(
|
||||
key="up_time",
|
||||
device_class=NumberDeviceClass.DURATION,
|
||||
|
||||
@@ -14,6 +14,11 @@ from .entity import HomeeEntity
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
SELECT_DESCRIPTIONS: dict[AttributeType, SelectEntityDescription] = {
|
||||
AttributeType.DISPLAY_TEMPERATURE_SELECTION: SelectEntityDescription(
|
||||
key="display_temperature_selection",
|
||||
options=["target", "current"],
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
AttributeType.REPEATER_MODE: SelectEntityDescription(
|
||||
key="repeater_mode",
|
||||
options=["off", "level1", "level2"],
|
||||
|
||||
@@ -129,6 +129,16 @@ SENSOR_DESCRIPTIONS: dict[AttributeType, HomeeSensorEntityDescription] = {
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
AttributeType.EXTERNAL_TEMPERATURE: HomeeSensorEntityDescription(
|
||||
key="external_temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
AttributeType.FLOOR_TEMPERATURE: HomeeSensorEntityDescription(
|
||||
key="floor_temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
AttributeType.INDOOR_RELATIVE_HUMIDITY: HomeeSensorEntityDescription(
|
||||
key="indoor_humidity",
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
|
||||
49
homeassistant/components/homee/siren.py
Normal file
49
homeassistant/components/homee/siren.py
Normal file
@@ -0,0 +1,49 @@
|
||||
"""The homee siren platform."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from pyHomee.const import AttributeType
|
||||
|
||||
from homeassistant.components.siren import SirenEntity, SirenEntityFeature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import HomeeConfigEntry
|
||||
from .entity import HomeeEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: HomeeConfigEntry,
|
||||
async_add_devices: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Add siren entities for homee."""
|
||||
|
||||
async_add_devices(
|
||||
HomeeSiren(attribute, config_entry)
|
||||
for node in config_entry.runtime_data.nodes
|
||||
for attribute in node.attributes
|
||||
if attribute.type == AttributeType.SIREN
|
||||
)
|
||||
|
||||
|
||||
class HomeeSiren(HomeeEntity, SirenEntity):
|
||||
"""Representation of a homee siren device."""
|
||||
|
||||
_attr_name = None
|
||||
_attr_supported_features = SirenEntityFeature.TURN_ON | SirenEntityFeature.TURN_OFF
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return the state of the siren."""
|
||||
return self._attribute.current_value == 1.0
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the siren on."""
|
||||
await self.async_set_homee_value(1)
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the siren off."""
|
||||
await self.async_set_homee_value(0)
|
||||
@@ -2,7 +2,9 @@
|
||||
"config": {
|
||||
"flow_title": "homee {name} ({host})",
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"wrong_hub": "Address belongs to a different homee."
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
@@ -22,6 +24,16 @@
|
||||
"username": "The username for your homee.",
|
||||
"password": "The password for your homee."
|
||||
}
|
||||
},
|
||||
"reconfigure": {
|
||||
"title": "Reconfigure homee {name}",
|
||||
"description": "Reconfigure the IP address of your homee.",
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The IP address of your homee."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -187,6 +199,18 @@
|
||||
}
|
||||
},
|
||||
"number": {
|
||||
"button_brightness_active": {
|
||||
"name": "Button brightness (active)"
|
||||
},
|
||||
"button_brightness_dimmed": {
|
||||
"name": "Button brightness (dimmed)"
|
||||
},
|
||||
"display_brightness_active": {
|
||||
"name": "Display brightness (active)"
|
||||
},
|
||||
"display_brightness_dimmed": {
|
||||
"name": "Display brightness (dimmed)"
|
||||
},
|
||||
"down_position": {
|
||||
"name": "Down position"
|
||||
},
|
||||
@@ -199,6 +223,12 @@
|
||||
"endposition_configuration": {
|
||||
"name": "End position"
|
||||
},
|
||||
"external_temperature_offset": {
|
||||
"name": "External temperature offset"
|
||||
},
|
||||
"floor_temperature_offset": {
|
||||
"name": "Floor temperature offset"
|
||||
},
|
||||
"motion_alarm_cancelation_delay": {
|
||||
"name": "Motion alarm delay"
|
||||
},
|
||||
@@ -223,6 +253,9 @@
|
||||
"temperature_offset": {
|
||||
"name": "Temperature offset"
|
||||
},
|
||||
"temperature_report_interval": {
|
||||
"name": "Temperature report interval"
|
||||
},
|
||||
"up_time": {
|
||||
"name": "Up-movement duration"
|
||||
},
|
||||
@@ -234,6 +267,13 @@
|
||||
}
|
||||
},
|
||||
"select": {
|
||||
"display_temperature_selection": {
|
||||
"name": "Displayed temperature",
|
||||
"state": {
|
||||
"target": "Target",
|
||||
"current": "Measured"
|
||||
}
|
||||
},
|
||||
"repeater_mode": {
|
||||
"name": "Repeater mode",
|
||||
"state": {
|
||||
@@ -265,6 +305,12 @@
|
||||
"exhaust_motor_revs": {
|
||||
"name": "Exhaust motor speed"
|
||||
},
|
||||
"external_temperature": {
|
||||
"name": "External temperature"
|
||||
},
|
||||
"floor_temperature": {
|
||||
"name": "Floor temperature"
|
||||
},
|
||||
"indoor_humidity": {
|
||||
"name": "Indoor humidity"
|
||||
},
|
||||
|
||||
@@ -14,6 +14,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/homekit_controller",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiohomekit", "commentjson"],
|
||||
"requirements": ["aiohomekit==3.2.14"],
|
||||
"requirements": ["aiohomekit==3.2.15"],
|
||||
"zeroconf": ["_hap._tcp.local.", "_hap._udp.local."]
|
||||
}
|
||||
|
||||
@@ -21,7 +21,7 @@ from .const import (
|
||||
HMIPC_NAME,
|
||||
)
|
||||
from .hap import HomematicIPConfigEntry, HomematicipHAP
|
||||
from .services import async_setup_services, async_unload_services
|
||||
from .services import async_setup_services
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
@@ -63,6 +63,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
)
|
||||
)
|
||||
|
||||
await async_setup_services(hass)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -83,7 +85,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: HomematicIPConfigEntry)
|
||||
if not await hap.async_setup():
|
||||
return False
|
||||
|
||||
await async_setup_services(hass)
|
||||
_async_remove_obsolete_entities(hass, entry, hap)
|
||||
|
||||
# Register on HA stop event to gracefully shutdown HomematicIP Cloud connection
|
||||
@@ -115,8 +116,6 @@ async def async_unload_entry(
|
||||
assert hap.reset_connection_listener is not None
|
||||
hap.reset_connection_listener()
|
||||
|
||||
await async_unload_services(hass)
|
||||
|
||||
return await hap.async_reset()
|
||||
|
||||
|
||||
|
||||
@@ -128,7 +128,6 @@ class HomematicipHAP:
|
||||
self.config_entry.data.get(HMIPC_AUTHTOKEN),
|
||||
self.config_entry.data.get(HMIPC_NAME),
|
||||
)
|
||||
|
||||
except HmipcConnectionError as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
except Exception as err: # noqa: BLE001
|
||||
@@ -211,13 +210,41 @@ class HomematicipHAP:
|
||||
for device in self.home.devices:
|
||||
device.fire_update_event()
|
||||
|
||||
async def async_connect(self, home: AsyncHome) -> None:
|
||||
"""Connect to HomematicIP Cloud Websocket."""
|
||||
await home.enable_events()
|
||||
async def async_connect(self) -> None:
|
||||
"""Start WebSocket connection."""
|
||||
tries = 0
|
||||
while True:
|
||||
retry_delay = 2 ** min(tries, 8)
|
||||
|
||||
home.set_on_connected_handler(self.ws_connected_handler)
|
||||
home.set_on_disconnected_handler(self.ws_disconnected_handler)
|
||||
home.set_on_reconnect_handler(self.ws_reconnected_handler)
|
||||
try:
|
||||
await self.home.get_current_state_async()
|
||||
hmip_events = self.home.enable_events()
|
||||
self.home.set_on_connected_handler(self.ws_connected_handler)
|
||||
self.home.set_on_disconnected_handler(self.ws_disconnected_handler)
|
||||
tries = 0
|
||||
await hmip_events
|
||||
except HmipConnectionError:
|
||||
_LOGGER.error(
|
||||
(
|
||||
"Error connecting to HomematicIP with HAP %s. "
|
||||
"Retrying in %d seconds"
|
||||
),
|
||||
self.config_entry.unique_id,
|
||||
retry_delay,
|
||||
)
|
||||
|
||||
if self._ws_close_requested:
|
||||
break
|
||||
self._ws_close_requested = False
|
||||
tries += 1
|
||||
|
||||
try:
|
||||
self._retry_task = self.hass.async_create_task(
|
||||
asyncio.sleep(retry_delay)
|
||||
)
|
||||
await self._retry_task
|
||||
except asyncio.CancelledError:
|
||||
break
|
||||
|
||||
async def async_reset(self) -> bool:
|
||||
"""Close the websocket connection."""
|
||||
@@ -245,22 +272,14 @@ class HomematicipHAP:
|
||||
|
||||
async def ws_connected_handler(self) -> None:
|
||||
"""Handle websocket connected."""
|
||||
_LOGGER.info("Websocket connection to HomematicIP Cloud established")
|
||||
_LOGGER.debug("WebSocket connection to HomematicIP established")
|
||||
if self._ws_connection_closed.is_set():
|
||||
await self.get_state()
|
||||
self._ws_connection_closed.clear()
|
||||
|
||||
async def ws_disconnected_handler(self) -> None:
|
||||
"""Handle websocket disconnection."""
|
||||
_LOGGER.warning("Websocket connection to HomematicIP Cloud closed")
|
||||
self._ws_connection_closed.set()
|
||||
|
||||
async def ws_reconnected_handler(self, reason: str) -> None:
|
||||
"""Handle websocket reconnection."""
|
||||
_LOGGER.info(
|
||||
"Websocket connection to HomematicIP Cloud re-established due to reason: %s",
|
||||
reason,
|
||||
)
|
||||
_LOGGER.warning("WebSocket connection to HomematicIP closed")
|
||||
self._ws_connection_closed.set()
|
||||
|
||||
async def get_hap(
|
||||
@@ -287,6 +306,6 @@ class HomematicipHAP:
|
||||
home.on_update(self.async_update)
|
||||
home.on_create(self.async_create_entity)
|
||||
|
||||
await self.async_connect(home)
|
||||
hass.loop.create_task(self.async_connect())
|
||||
|
||||
return home
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/homematicip_cloud",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["homematicip"],
|
||||
"requirements": ["homematicip==2.0.5"]
|
||||
"requirements": ["homematicip==2.0.4"]
|
||||
}
|
||||
|
||||
@@ -123,32 +123,29 @@ SCHEMA_SET_HOME_COOLING_MODE = vol.Schema(
|
||||
async def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the HomematicIP Cloud services."""
|
||||
|
||||
if hass.services.async_services_for_domain(DOMAIN):
|
||||
return
|
||||
|
||||
@verify_domain_control(hass, DOMAIN)
|
||||
async def async_call_hmipc_service(service: ServiceCall) -> None:
|
||||
"""Call correct HomematicIP Cloud service."""
|
||||
service_name = service.service
|
||||
|
||||
if service_name == SERVICE_ACTIVATE_ECO_MODE_WITH_DURATION:
|
||||
await _async_activate_eco_mode_with_duration(hass, service)
|
||||
await _async_activate_eco_mode_with_duration(service)
|
||||
elif service_name == SERVICE_ACTIVATE_ECO_MODE_WITH_PERIOD:
|
||||
await _async_activate_eco_mode_with_period(hass, service)
|
||||
await _async_activate_eco_mode_with_period(service)
|
||||
elif service_name == SERVICE_ACTIVATE_VACATION:
|
||||
await _async_activate_vacation(hass, service)
|
||||
await _async_activate_vacation(service)
|
||||
elif service_name == SERVICE_DEACTIVATE_ECO_MODE:
|
||||
await _async_deactivate_eco_mode(hass, service)
|
||||
await _async_deactivate_eco_mode(service)
|
||||
elif service_name == SERVICE_DEACTIVATE_VACATION:
|
||||
await _async_deactivate_vacation(hass, service)
|
||||
await _async_deactivate_vacation(service)
|
||||
elif service_name == SERVICE_DUMP_HAP_CONFIG:
|
||||
await _async_dump_hap_config(hass, service)
|
||||
await _async_dump_hap_config(service)
|
||||
elif service_name == SERVICE_RESET_ENERGY_COUNTER:
|
||||
await _async_reset_energy_counter(hass, service)
|
||||
await _async_reset_energy_counter(service)
|
||||
elif service_name == SERVICE_SET_ACTIVE_CLIMATE_PROFILE:
|
||||
await _set_active_climate_profile(hass, service)
|
||||
await _set_active_climate_profile(service)
|
||||
elif service_name == SERVICE_SET_HOME_COOLING_MODE:
|
||||
await _async_set_home_cooling_mode(hass, service)
|
||||
await _async_set_home_cooling_mode(service)
|
||||
|
||||
hass.services.async_register(
|
||||
domain=DOMAIN,
|
||||
@@ -217,90 +214,75 @@ async def async_setup_services(hass: HomeAssistant) -> None:
|
||||
)
|
||||
|
||||
|
||||
async def async_unload_services(hass: HomeAssistant):
|
||||
"""Unload HomematicIP Cloud services."""
|
||||
if hass.config_entries.async_loaded_entries(DOMAIN):
|
||||
return
|
||||
|
||||
for hmipc_service in HMIPC_SERVICES:
|
||||
hass.services.async_remove(domain=DOMAIN, service=hmipc_service)
|
||||
|
||||
|
||||
async def _async_activate_eco_mode_with_duration(
|
||||
hass: HomeAssistant, service: ServiceCall
|
||||
) -> None:
|
||||
async def _async_activate_eco_mode_with_duration(service: ServiceCall) -> None:
|
||||
"""Service to activate eco mode with duration."""
|
||||
duration = service.data[ATTR_DURATION]
|
||||
|
||||
if hapid := service.data.get(ATTR_ACCESSPOINT_ID):
|
||||
if home := _get_home(hass, hapid):
|
||||
if home := _get_home(service.hass, hapid):
|
||||
await home.activate_absence_with_duration_async(duration)
|
||||
else:
|
||||
entry: HomematicIPConfigEntry
|
||||
for entry in hass.config_entries.async_loaded_entries(DOMAIN):
|
||||
for entry in service.hass.config_entries.async_loaded_entries(DOMAIN):
|
||||
await entry.runtime_data.home.activate_absence_with_duration_async(duration)
|
||||
|
||||
|
||||
async def _async_activate_eco_mode_with_period(
|
||||
hass: HomeAssistant, service: ServiceCall
|
||||
) -> None:
|
||||
async def _async_activate_eco_mode_with_period(service: ServiceCall) -> None:
|
||||
"""Service to activate eco mode with period."""
|
||||
endtime = service.data[ATTR_ENDTIME]
|
||||
|
||||
if hapid := service.data.get(ATTR_ACCESSPOINT_ID):
|
||||
if home := _get_home(hass, hapid):
|
||||
if home := _get_home(service.hass, hapid):
|
||||
await home.activate_absence_with_period_async(endtime)
|
||||
else:
|
||||
entry: HomematicIPConfigEntry
|
||||
for entry in hass.config_entries.async_loaded_entries(DOMAIN):
|
||||
for entry in service.hass.config_entries.async_loaded_entries(DOMAIN):
|
||||
await entry.runtime_data.home.activate_absence_with_period_async(endtime)
|
||||
|
||||
|
||||
async def _async_activate_vacation(hass: HomeAssistant, service: ServiceCall) -> None:
|
||||
async def _async_activate_vacation(service: ServiceCall) -> None:
|
||||
"""Service to activate vacation."""
|
||||
endtime = service.data[ATTR_ENDTIME]
|
||||
temperature = service.data[ATTR_TEMPERATURE]
|
||||
|
||||
if hapid := service.data.get(ATTR_ACCESSPOINT_ID):
|
||||
if home := _get_home(hass, hapid):
|
||||
if home := _get_home(service.hass, hapid):
|
||||
await home.activate_vacation_async(endtime, temperature)
|
||||
else:
|
||||
entry: HomematicIPConfigEntry
|
||||
for entry in hass.config_entries.async_loaded_entries(DOMAIN):
|
||||
for entry in service.hass.config_entries.async_loaded_entries(DOMAIN):
|
||||
await entry.runtime_data.home.activate_vacation_async(endtime, temperature)
|
||||
|
||||
|
||||
async def _async_deactivate_eco_mode(hass: HomeAssistant, service: ServiceCall) -> None:
|
||||
async def _async_deactivate_eco_mode(service: ServiceCall) -> None:
|
||||
"""Service to deactivate eco mode."""
|
||||
if hapid := service.data.get(ATTR_ACCESSPOINT_ID):
|
||||
if home := _get_home(hass, hapid):
|
||||
if home := _get_home(service.hass, hapid):
|
||||
await home.deactivate_absence_async()
|
||||
else:
|
||||
entry: HomematicIPConfigEntry
|
||||
for entry in hass.config_entries.async_loaded_entries(DOMAIN):
|
||||
for entry in service.hass.config_entries.async_loaded_entries(DOMAIN):
|
||||
await entry.runtime_data.home.deactivate_absence_async()
|
||||
|
||||
|
||||
async def _async_deactivate_vacation(hass: HomeAssistant, service: ServiceCall) -> None:
|
||||
async def _async_deactivate_vacation(service: ServiceCall) -> None:
|
||||
"""Service to deactivate vacation."""
|
||||
if hapid := service.data.get(ATTR_ACCESSPOINT_ID):
|
||||
if home := _get_home(hass, hapid):
|
||||
if home := _get_home(service.hass, hapid):
|
||||
await home.deactivate_vacation_async()
|
||||
else:
|
||||
entry: HomematicIPConfigEntry
|
||||
for entry in hass.config_entries.async_loaded_entries(DOMAIN):
|
||||
for entry in service.hass.config_entries.async_loaded_entries(DOMAIN):
|
||||
await entry.runtime_data.home.deactivate_vacation_async()
|
||||
|
||||
|
||||
async def _set_active_climate_profile(
|
||||
hass: HomeAssistant, service: ServiceCall
|
||||
) -> None:
|
||||
async def _set_active_climate_profile(service: ServiceCall) -> None:
|
||||
"""Service to set the active climate profile."""
|
||||
entity_id_list = service.data[ATTR_ENTITY_ID]
|
||||
climate_profile_index = service.data[ATTR_CLIMATE_PROFILE_INDEX] - 1
|
||||
|
||||
entry: HomematicIPConfigEntry
|
||||
for entry in hass.config_entries.async_loaded_entries(DOMAIN):
|
||||
for entry in service.hass.config_entries.async_loaded_entries(DOMAIN):
|
||||
if entity_id_list != "all":
|
||||
for entity_id in entity_id_list:
|
||||
group = entry.runtime_data.hmip_device_by_entity_id.get(entity_id)
|
||||
@@ -312,16 +294,16 @@ async def _set_active_climate_profile(
|
||||
await group.set_active_profile_async(climate_profile_index)
|
||||
|
||||
|
||||
async def _async_dump_hap_config(hass: HomeAssistant, service: ServiceCall) -> None:
|
||||
async def _async_dump_hap_config(service: ServiceCall) -> None:
|
||||
"""Service to dump the configuration of a Homematic IP Access Point."""
|
||||
config_path: str = (
|
||||
service.data.get(ATTR_CONFIG_OUTPUT_PATH) or hass.config.config_dir
|
||||
service.data.get(ATTR_CONFIG_OUTPUT_PATH) or service.hass.config.config_dir
|
||||
)
|
||||
config_file_prefix = service.data[ATTR_CONFIG_OUTPUT_FILE_PREFIX]
|
||||
anonymize = service.data[ATTR_ANONYMIZE]
|
||||
|
||||
entry: HomematicIPConfigEntry
|
||||
for entry in hass.config_entries.async_loaded_entries(DOMAIN):
|
||||
for entry in service.hass.config_entries.async_loaded_entries(DOMAIN):
|
||||
hap_sgtin = entry.unique_id
|
||||
assert hap_sgtin is not None
|
||||
|
||||
@@ -338,12 +320,12 @@ async def _async_dump_hap_config(hass: HomeAssistant, service: ServiceCall) -> N
|
||||
config_file.write_text(json_state, encoding="utf8")
|
||||
|
||||
|
||||
async def _async_reset_energy_counter(hass: HomeAssistant, service: ServiceCall):
|
||||
async def _async_reset_energy_counter(service: ServiceCall):
|
||||
"""Service to reset the energy counter."""
|
||||
entity_id_list = service.data[ATTR_ENTITY_ID]
|
||||
|
||||
entry: HomematicIPConfigEntry
|
||||
for entry in hass.config_entries.async_loaded_entries(DOMAIN):
|
||||
for entry in service.hass.config_entries.async_loaded_entries(DOMAIN):
|
||||
if entity_id_list != "all":
|
||||
for entity_id in entity_id_list:
|
||||
device = entry.runtime_data.hmip_device_by_entity_id.get(entity_id)
|
||||
@@ -355,16 +337,16 @@ async def _async_reset_energy_counter(hass: HomeAssistant, service: ServiceCall)
|
||||
await device.reset_energy_counter_async()
|
||||
|
||||
|
||||
async def _async_set_home_cooling_mode(hass: HomeAssistant, service: ServiceCall):
|
||||
async def _async_set_home_cooling_mode(service: ServiceCall):
|
||||
"""Service to set the cooling mode."""
|
||||
cooling = service.data[ATTR_COOLING]
|
||||
|
||||
if hapid := service.data.get(ATTR_ACCESSPOINT_ID):
|
||||
if home := _get_home(hass, hapid):
|
||||
if home := _get_home(service.hass, hapid):
|
||||
await home.set_cooling_async(cooling)
|
||||
else:
|
||||
entry: HomematicIPConfigEntry
|
||||
for entry in hass.config_entries.async_loaded_entries(DOMAIN):
|
||||
for entry in service.hass.config_entries.async_loaded_entries(DOMAIN):
|
||||
await entry.runtime_data.home.set_cooling_async(cooling)
|
||||
|
||||
|
||||
|
||||
@@ -12,6 +12,6 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["homewizard_energy"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["python-homewizard-energy==v8.3.2"],
|
||||
"requirements": ["python-homewizard-energy==8.3.3"],
|
||||
"zeroconf": ["_hwenergy._tcp.local.", "_homewizard._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -39,14 +39,14 @@ def setup_cors(app: Application, origins: list[str]) -> None:
|
||||
cors = aiohttp_cors.setup(
|
||||
app,
|
||||
defaults={
|
||||
host: aiohttp_cors.ResourceOptions(
|
||||
host: aiohttp_cors.ResourceOptions( # type: ignore[no-untyped-call]
|
||||
allow_headers=ALLOWED_CORS_HEADERS, allow_methods="*"
|
||||
)
|
||||
for host in origins
|
||||
},
|
||||
)
|
||||
|
||||
cors_added = set()
|
||||
cors_added: set[str] = set()
|
||||
|
||||
def _allow_cors(
|
||||
route: AbstractRoute | AbstractResource,
|
||||
@@ -69,13 +69,13 @@ def setup_cors(app: Application, origins: list[str]) -> None:
|
||||
if path_str in cors_added:
|
||||
return
|
||||
|
||||
cors.add(route, config)
|
||||
cors.add(route, config) # type: ignore[arg-type]
|
||||
cors_added.add(path_str)
|
||||
|
||||
app[KEY_ALLOW_ALL_CORS] = lambda route: _allow_cors(
|
||||
route,
|
||||
{
|
||||
"*": aiohttp_cors.ResourceOptions(
|
||||
"*": aiohttp_cors.ResourceOptions( # type: ignore[no-untyped-call]
|
||||
allow_headers=ALLOWED_CORS_HEADERS, allow_methods="*"
|
||||
)
|
||||
},
|
||||
|
||||
@@ -5,12 +5,23 @@ from aiohue.util import normalize_bridge_id
|
||||
from homeassistant.components import persistent_notification
|
||||
from homeassistant.config_entries import SOURCE_IGNORE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .bridge import HueBridge, HueConfigEntry
|
||||
from .const import DOMAIN, SERVICE_HUE_ACTIVATE_SCENE
|
||||
from .const import DOMAIN
|
||||
from .migration import check_migration
|
||||
from .services import async_register_services
|
||||
from .services import async_setup_services
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up Hue integration."""
|
||||
|
||||
async_setup_services(hass)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: HueConfigEntry) -> bool:
|
||||
@@ -23,9 +34,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: HueConfigEntry) -> bool:
|
||||
if not await bridge.async_initialize_bridge():
|
||||
return False
|
||||
|
||||
# register Hue domain services
|
||||
async_register_services(hass)
|
||||
|
||||
api = bridge.api
|
||||
|
||||
# For backwards compat
|
||||
@@ -106,7 +114,4 @@ async def async_setup_entry(hass: HomeAssistant, entry: HueConfigEntry) -> bool:
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: HueConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
unload_success = await entry.runtime_data.async_reset()
|
||||
if not hass.config_entries.async_loaded_entries(DOMAIN):
|
||||
hass.services.async_remove(DOMAIN, SERVICE_HUE_ACTIVATE_SCENE)
|
||||
return unload_success
|
||||
return await entry.runtime_data.async_reset()
|
||||
|
||||
@@ -25,7 +25,7 @@ from .const import (
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def async_register_services(hass: HomeAssistant) -> None:
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register services for Hue integration."""
|
||||
|
||||
async def hue_activate_scene(call: ServiceCall, skip_reload=True) -> None:
|
||||
@@ -59,21 +59,20 @@ def async_register_services(hass: HomeAssistant) -> None:
|
||||
group_name,
|
||||
)
|
||||
|
||||
if not hass.services.has_service(DOMAIN, SERVICE_HUE_ACTIVATE_SCENE):
|
||||
# Register a local handler for scene activation
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_HUE_ACTIVATE_SCENE,
|
||||
verify_domain_control(hass, DOMAIN)(hue_activate_scene),
|
||||
schema=vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_GROUP_NAME): cv.string,
|
||||
vol.Required(ATTR_SCENE_NAME): cv.string,
|
||||
vol.Optional(ATTR_TRANSITION): cv.positive_int,
|
||||
vol.Optional(ATTR_DYNAMIC): cv.boolean,
|
||||
}
|
||||
),
|
||||
)
|
||||
# Register a local handler for scene activation
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_HUE_ACTIVATE_SCENE,
|
||||
verify_domain_control(hass, DOMAIN)(hue_activate_scene),
|
||||
schema=vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_GROUP_NAME): cv.string,
|
||||
vol.Required(ATTR_SCENE_NAME): cv.string,
|
||||
vol.Optional(ATTR_TRANSITION): cv.positive_int,
|
||||
vol.Optional(ATTR_DYNAMIC): cv.boolean,
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def hue_activate_scene_v1(
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/hyperion",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["hyperion"],
|
||||
"requirements": ["hyperion-py==0.7.5"],
|
||||
"requirements": ["hyperion-py==0.7.6"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "Hyperion Open Source Ambient Lighting",
|
||||
|
||||
@@ -82,6 +82,9 @@
|
||||
},
|
||||
"usb_capture": {
|
||||
"name": "Component USB capture"
|
||||
},
|
||||
"audio_capture": {
|
||||
"name": "Component Audio capture"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
|
||||
@@ -9,6 +9,7 @@ from hyperion import client
|
||||
from hyperion.const import (
|
||||
KEY_COMPONENT,
|
||||
KEY_COMPONENTID_ALL,
|
||||
KEY_COMPONENTID_AUDIO,
|
||||
KEY_COMPONENTID_BLACKBORDER,
|
||||
KEY_COMPONENTID_BOBLIGHTSERVER,
|
||||
KEY_COMPONENTID_FORWARDER,
|
||||
@@ -59,6 +60,7 @@ COMPONENT_SWITCHES = [
|
||||
KEY_COMPONENTID_GRABBER,
|
||||
KEY_COMPONENTID_LEDDEVICE,
|
||||
KEY_COMPONENTID_V4L,
|
||||
KEY_COMPONENTID_AUDIO,
|
||||
]
|
||||
|
||||
|
||||
@@ -83,6 +85,7 @@ def _component_to_translation_key(component: str) -> str:
|
||||
KEY_COMPONENTID_GRABBER: "platform_capture",
|
||||
KEY_COMPONENTID_LEDDEVICE: "led_device",
|
||||
KEY_COMPONENTID_V4L: "usb_capture",
|
||||
KEY_COMPONENTID_AUDIO: "audio_capture",
|
||||
}[component]
|
||||
|
||||
|
||||
|
||||
@@ -6,18 +6,31 @@ from typing import Any
|
||||
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.storage import Store
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .account import IcloudAccount, IcloudConfigEntry
|
||||
from .const import (
|
||||
CONF_GPS_ACCURACY_THRESHOLD,
|
||||
CONF_MAX_INTERVAL,
|
||||
CONF_WITH_FAMILY,
|
||||
DOMAIN,
|
||||
PLATFORMS,
|
||||
STORAGE_KEY,
|
||||
STORAGE_VERSION,
|
||||
)
|
||||
from .services import register_services
|
||||
from .services import async_setup_services
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up iCloud integration."""
|
||||
|
||||
async_setup_services(hass)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: IcloudConfigEntry) -> bool:
|
||||
@@ -51,8 +64,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: IcloudConfigEntry) -> bo
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
register_services(hass)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -115,8 +115,8 @@ def _get_account(hass: HomeAssistant, account_identifier: str) -> IcloudAccount:
|
||||
return icloud_account
|
||||
|
||||
|
||||
def register_services(hass: HomeAssistant) -> None:
|
||||
"""Set up an iCloud account from a config entry."""
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register iCloud services."""
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_ICLOUD_PLAY_SOUND, play_sound, schema=SERVICE_SCHEMA_PLAY_SOUND
|
||||
|
||||
40
homeassistant/components/imeon_inverter/entity.py
Normal file
40
homeassistant/components/imeon_inverter/entity.py
Normal file
@@ -0,0 +1,40 @@
|
||||
"""Imeon inverter base class for entities."""
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import InverterCoordinator
|
||||
|
||||
type InverterConfigEntry = ConfigEntry[InverterCoordinator]
|
||||
|
||||
|
||||
class InverterEntity(CoordinatorEntity[InverterCoordinator]):
|
||||
"""Common elements for all entities."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: InverterCoordinator,
|
||||
entry: InverterConfigEntry,
|
||||
entity_description: EntityDescription,
|
||||
) -> None:
|
||||
"""Pass coordinator to CoordinatorEntity."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = entity_description
|
||||
self._inverter = coordinator.api.inverter
|
||||
self.data_key = entity_description.key
|
||||
assert entry.unique_id
|
||||
self._attr_unique_id = f"{entry.unique_id}_{self.data_key}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, entry.unique_id)},
|
||||
name="Imeon inverter",
|
||||
manufacturer="Imeon Energy",
|
||||
model=self._inverter.get("inverter"),
|
||||
sw_version=self._inverter.get("software"),
|
||||
serial_number=self._inverter.get("serial"),
|
||||
configuration_url=self._inverter.get("url"),
|
||||
)
|
||||
@@ -21,20 +21,18 @@ from homeassistant.const import (
|
||||
UnitOfTime,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import InverterCoordinator
|
||||
from .entity import InverterEntity
|
||||
|
||||
type InverterConfigEntry = ConfigEntry[InverterCoordinator]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
ENTITY_DESCRIPTIONS = (
|
||||
SENSOR_DESCRIPTIONS = (
|
||||
# Battery
|
||||
SensorEntityDescription(
|
||||
key="battery_autonomy",
|
||||
@@ -423,42 +421,18 @@ async def async_setup_entry(
|
||||
"""Create each sensor for a given config entry."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
# Init sensor entities
|
||||
async_add_entities(
|
||||
InverterSensor(coordinator, entry, description)
|
||||
for description in ENTITY_DESCRIPTIONS
|
||||
for description in SENSOR_DESCRIPTIONS
|
||||
)
|
||||
|
||||
|
||||
class InverterSensor(CoordinatorEntity[InverterCoordinator], SensorEntity):
|
||||
"""A sensor that returns numerical values with units."""
|
||||
class InverterSensor(InverterEntity, SensorEntity):
|
||||
"""Representation of an Imeon inverter sensor."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_entity_category = EntityCategory.DIAGNOSTIC
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: InverterCoordinator,
|
||||
entry: InverterConfigEntry,
|
||||
description: SensorEntityDescription,
|
||||
) -> None:
|
||||
"""Pass coordinator to CoordinatorEntity."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
self._inverter = coordinator.api.inverter
|
||||
self.data_key = description.key
|
||||
assert entry.unique_id
|
||||
self._attr_unique_id = f"{entry.unique_id}_{self.data_key}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, entry.unique_id)},
|
||||
name="Imeon inverter",
|
||||
manufacturer="Imeon Energy",
|
||||
model=self._inverter.get("inverter"),
|
||||
sw_version=self._inverter.get("software"),
|
||||
)
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType | None:
|
||||
"""Value of the sensor."""
|
||||
"""Return the state of the entity."""
|
||||
return self.coordinator.data.get(self.data_key)
|
||||
|
||||
@@ -25,9 +25,9 @@ from .const import (
|
||||
DOMAIN,
|
||||
INSTEON_PLATFORMS,
|
||||
)
|
||||
from .services import async_setup_services
|
||||
from .utils import (
|
||||
add_insteon_events,
|
||||
async_register_services,
|
||||
get_device_platforms,
|
||||
register_new_device_callback,
|
||||
)
|
||||
@@ -145,7 +145,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
_LOGGER.debug("Insteon device count: %s", len(devices))
|
||||
register_new_device_callback(hass)
|
||||
async_register_services(hass)
|
||||
async_setup_services(hass)
|
||||
|
||||
create_insteon_device(hass, devices.modem, entry.entry_id)
|
||||
|
||||
|
||||
291
homeassistant/components/insteon/services.py
Normal file
291
homeassistant/components/insteon/services.py
Normal file
@@ -0,0 +1,291 @@
|
||||
"""Utilities used by insteon component."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from pyinsteon import devices
|
||||
from pyinsteon.address import Address
|
||||
from pyinsteon.managers.link_manager import (
|
||||
async_enter_linking_mode,
|
||||
async_enter_unlinking_mode,
|
||||
)
|
||||
from pyinsteon.managers.scene_manager import (
|
||||
async_trigger_scene_off,
|
||||
async_trigger_scene_on,
|
||||
)
|
||||
from pyinsteon.managers.x10_manager import (
|
||||
async_x10_all_lights_off,
|
||||
async_x10_all_lights_on,
|
||||
async_x10_all_units_off,
|
||||
)
|
||||
from pyinsteon.x10_address import create as create_x10_address
|
||||
|
||||
from homeassistant.const import (
|
||||
CONF_ADDRESS,
|
||||
CONF_ENTITY_ID,
|
||||
CONF_PLATFORM,
|
||||
ENTITY_MATCH_ALL,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_connect,
|
||||
async_dispatcher_send,
|
||||
dispatcher_send,
|
||||
)
|
||||
|
||||
from .const import (
|
||||
CONF_CAT,
|
||||
CONF_DIM_STEPS,
|
||||
CONF_HOUSECODE,
|
||||
CONF_SUBCAT,
|
||||
CONF_UNITCODE,
|
||||
DOMAIN,
|
||||
SIGNAL_ADD_DEFAULT_LINKS,
|
||||
SIGNAL_ADD_DEVICE_OVERRIDE,
|
||||
SIGNAL_ADD_X10_DEVICE,
|
||||
SIGNAL_LOAD_ALDB,
|
||||
SIGNAL_PRINT_ALDB,
|
||||
SIGNAL_REMOVE_DEVICE_OVERRIDE,
|
||||
SIGNAL_REMOVE_ENTITY,
|
||||
SIGNAL_REMOVE_HA_DEVICE,
|
||||
SIGNAL_REMOVE_INSTEON_DEVICE,
|
||||
SIGNAL_REMOVE_X10_DEVICE,
|
||||
SIGNAL_SAVE_DEVICES,
|
||||
SRV_ADD_ALL_LINK,
|
||||
SRV_ADD_DEFAULT_LINKS,
|
||||
SRV_ALL_LINK_GROUP,
|
||||
SRV_ALL_LINK_MODE,
|
||||
SRV_CONTROLLER,
|
||||
SRV_DEL_ALL_LINK,
|
||||
SRV_HOUSECODE,
|
||||
SRV_LOAD_ALDB,
|
||||
SRV_LOAD_DB_RELOAD,
|
||||
SRV_PRINT_ALDB,
|
||||
SRV_PRINT_IM_ALDB,
|
||||
SRV_SCENE_OFF,
|
||||
SRV_SCENE_ON,
|
||||
SRV_X10_ALL_LIGHTS_OFF,
|
||||
SRV_X10_ALL_LIGHTS_ON,
|
||||
SRV_X10_ALL_UNITS_OFF,
|
||||
)
|
||||
from .schemas import (
|
||||
ADD_ALL_LINK_SCHEMA,
|
||||
ADD_DEFAULT_LINKS_SCHEMA,
|
||||
DEL_ALL_LINK_SCHEMA,
|
||||
LOAD_ALDB_SCHEMA,
|
||||
PRINT_ALDB_SCHEMA,
|
||||
TRIGGER_SCENE_SCHEMA,
|
||||
X10_HOUSECODE_SCHEMA,
|
||||
)
|
||||
from .utils import print_aldb_to_log
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901
|
||||
"""Register services used by insteon component."""
|
||||
|
||||
save_lock = asyncio.Lock()
|
||||
|
||||
async def async_srv_add_all_link(service: ServiceCall) -> None:
|
||||
"""Add an INSTEON All-Link between two devices."""
|
||||
group = service.data[SRV_ALL_LINK_GROUP]
|
||||
mode = service.data[SRV_ALL_LINK_MODE]
|
||||
link_mode = mode.lower() == SRV_CONTROLLER
|
||||
await async_enter_linking_mode(link_mode, group)
|
||||
|
||||
async def async_srv_del_all_link(service: ServiceCall) -> None:
|
||||
"""Delete an INSTEON All-Link between two devices."""
|
||||
group = service.data.get(SRV_ALL_LINK_GROUP)
|
||||
await async_enter_unlinking_mode(group)
|
||||
|
||||
async def async_srv_load_aldb(service: ServiceCall) -> None:
|
||||
"""Load the device All-Link database."""
|
||||
entity_id = service.data[CONF_ENTITY_ID]
|
||||
reload = service.data[SRV_LOAD_DB_RELOAD]
|
||||
if entity_id.lower() == ENTITY_MATCH_ALL:
|
||||
await async_srv_load_aldb_all(reload)
|
||||
else:
|
||||
signal = f"{entity_id}_{SIGNAL_LOAD_ALDB}"
|
||||
async_dispatcher_send(hass, signal, reload)
|
||||
|
||||
async def async_srv_load_aldb_all(reload):
|
||||
"""Load the All-Link database for all devices."""
|
||||
# Cannot be done concurrently due to issues with the underlying protocol.
|
||||
for address in devices:
|
||||
device = devices[address]
|
||||
if device != devices.modem and device.cat != 0x03:
|
||||
await device.aldb.async_load(refresh=reload)
|
||||
await async_srv_save_devices()
|
||||
|
||||
async def async_srv_save_devices():
|
||||
"""Write the Insteon device configuration to file."""
|
||||
async with save_lock:
|
||||
_LOGGER.debug("Saving Insteon devices")
|
||||
await devices.async_save(hass.config.config_dir)
|
||||
|
||||
def print_aldb(service: ServiceCall) -> None:
|
||||
"""Print the All-Link Database for a device."""
|
||||
# For now this sends logs to the log file.
|
||||
# Future direction is to create an INSTEON control panel.
|
||||
entity_id = service.data[CONF_ENTITY_ID]
|
||||
signal = f"{entity_id}_{SIGNAL_PRINT_ALDB}"
|
||||
dispatcher_send(hass, signal)
|
||||
|
||||
def print_im_aldb(service: ServiceCall) -> None:
|
||||
"""Print the All-Link Database for a device."""
|
||||
# For now this sends logs to the log file.
|
||||
# Future direction is to create an INSTEON control panel.
|
||||
print_aldb_to_log(devices.modem.aldb)
|
||||
|
||||
async def async_srv_x10_all_units_off(service: ServiceCall) -> None:
|
||||
"""Send the X10 All Units Off command."""
|
||||
housecode = service.data.get(SRV_HOUSECODE)
|
||||
await async_x10_all_units_off(housecode)
|
||||
|
||||
async def async_srv_x10_all_lights_off(service: ServiceCall) -> None:
|
||||
"""Send the X10 All Lights Off command."""
|
||||
housecode = service.data.get(SRV_HOUSECODE)
|
||||
await async_x10_all_lights_off(housecode)
|
||||
|
||||
async def async_srv_x10_all_lights_on(service: ServiceCall) -> None:
|
||||
"""Send the X10 All Lights On command."""
|
||||
housecode = service.data.get(SRV_HOUSECODE)
|
||||
await async_x10_all_lights_on(housecode)
|
||||
|
||||
async def async_srv_scene_on(service: ServiceCall) -> None:
|
||||
"""Trigger an INSTEON scene ON."""
|
||||
group = service.data.get(SRV_ALL_LINK_GROUP)
|
||||
await async_trigger_scene_on(group)
|
||||
|
||||
async def async_srv_scene_off(service: ServiceCall) -> None:
|
||||
"""Trigger an INSTEON scene ON."""
|
||||
group = service.data.get(SRV_ALL_LINK_GROUP)
|
||||
await async_trigger_scene_off(group)
|
||||
|
||||
@callback
|
||||
def async_add_default_links(service: ServiceCall) -> None:
|
||||
"""Add the default All-Link entries to a device."""
|
||||
entity_id = service.data[CONF_ENTITY_ID]
|
||||
signal = f"{entity_id}_{SIGNAL_ADD_DEFAULT_LINKS}"
|
||||
async_dispatcher_send(hass, signal)
|
||||
|
||||
async def async_add_device_override(override):
|
||||
"""Remove an Insten device and associated entities."""
|
||||
address = Address(override[CONF_ADDRESS])
|
||||
await async_remove_ha_device(address)
|
||||
devices.set_id(address, override[CONF_CAT], override[CONF_SUBCAT], 0)
|
||||
await async_srv_save_devices()
|
||||
|
||||
async def async_remove_device_override(address):
|
||||
"""Remove an Insten device and associated entities."""
|
||||
address = Address(address)
|
||||
await async_remove_ha_device(address)
|
||||
devices.set_id(address, None, None, None)
|
||||
await devices.async_identify_device(address)
|
||||
await async_srv_save_devices()
|
||||
|
||||
@callback
|
||||
def async_add_x10_device(x10_config):
|
||||
"""Add X10 device."""
|
||||
housecode = x10_config[CONF_HOUSECODE]
|
||||
unitcode = x10_config[CONF_UNITCODE]
|
||||
platform = x10_config[CONF_PLATFORM]
|
||||
steps = x10_config.get(CONF_DIM_STEPS, 22)
|
||||
x10_type = "on_off"
|
||||
if platform == "light":
|
||||
x10_type = "dimmable"
|
||||
elif platform == "binary_sensor":
|
||||
x10_type = "sensor"
|
||||
_LOGGER.debug(
|
||||
"Adding X10 device to Insteon: %s %d %s", housecode, unitcode, x10_type
|
||||
)
|
||||
# This must be run in the event loop
|
||||
devices.add_x10_device(housecode, unitcode, x10_type, steps)
|
||||
|
||||
async def async_remove_x10_device(housecode, unitcode):
|
||||
"""Remove an X10 device and associated entities."""
|
||||
address = create_x10_address(housecode, unitcode)
|
||||
devices.pop(address)
|
||||
await async_remove_ha_device(address)
|
||||
|
||||
async def async_remove_ha_device(address: Address, remove_all_refs: bool = False):
|
||||
"""Remove the device and all entities from hass."""
|
||||
signal = f"{address.id}_{SIGNAL_REMOVE_ENTITY}"
|
||||
async_dispatcher_send(hass, signal)
|
||||
dev_registry = dr.async_get(hass)
|
||||
device = dev_registry.async_get_device(identifiers={(DOMAIN, str(address))})
|
||||
if device:
|
||||
dev_registry.async_remove_device(device.id)
|
||||
|
||||
async def async_remove_insteon_device(
|
||||
address: Address, remove_all_refs: bool = False
|
||||
):
|
||||
"""Remove the underlying Insteon device from the network."""
|
||||
await devices.async_remove_device(
|
||||
address=address, force=False, remove_all_refs=remove_all_refs
|
||||
)
|
||||
await async_srv_save_devices()
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SRV_ADD_ALL_LINK, async_srv_add_all_link, schema=ADD_ALL_LINK_SCHEMA
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, SRV_DEL_ALL_LINK, async_srv_del_all_link, schema=DEL_ALL_LINK_SCHEMA
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, SRV_LOAD_ALDB, async_srv_load_aldb, schema=LOAD_ALDB_SCHEMA
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, SRV_PRINT_ALDB, print_aldb, schema=PRINT_ALDB_SCHEMA
|
||||
)
|
||||
hass.services.async_register(DOMAIN, SRV_PRINT_IM_ALDB, print_im_aldb, schema=None)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SRV_X10_ALL_UNITS_OFF,
|
||||
async_srv_x10_all_units_off,
|
||||
schema=X10_HOUSECODE_SCHEMA,
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SRV_X10_ALL_LIGHTS_OFF,
|
||||
async_srv_x10_all_lights_off,
|
||||
schema=X10_HOUSECODE_SCHEMA,
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SRV_X10_ALL_LIGHTS_ON,
|
||||
async_srv_x10_all_lights_on,
|
||||
schema=X10_HOUSECODE_SCHEMA,
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, SRV_SCENE_ON, async_srv_scene_on, schema=TRIGGER_SCENE_SCHEMA
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, SRV_SCENE_OFF, async_srv_scene_off, schema=TRIGGER_SCENE_SCHEMA
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SRV_ADD_DEFAULT_LINKS,
|
||||
async_add_default_links,
|
||||
schema=ADD_DEFAULT_LINKS_SCHEMA,
|
||||
)
|
||||
async_dispatcher_connect(hass, SIGNAL_SAVE_DEVICES, async_srv_save_devices)
|
||||
async_dispatcher_connect(
|
||||
hass, SIGNAL_ADD_DEVICE_OVERRIDE, async_add_device_override
|
||||
)
|
||||
async_dispatcher_connect(
|
||||
hass, SIGNAL_REMOVE_DEVICE_OVERRIDE, async_remove_device_override
|
||||
)
|
||||
async_dispatcher_connect(hass, SIGNAL_ADD_X10_DEVICE, async_add_x10_device)
|
||||
async_dispatcher_connect(hass, SIGNAL_REMOVE_X10_DEVICE, async_remove_x10_device)
|
||||
async_dispatcher_connect(hass, SIGNAL_REMOVE_HA_DEVICE, async_remove_ha_device)
|
||||
async_dispatcher_connect(
|
||||
hass, SIGNAL_REMOVE_INSTEON_DEVICE, async_remove_insteon_device
|
||||
)
|
||||
_LOGGER.debug("Insteon Services registered")
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
@@ -12,90 +11,25 @@ from pyinsteon.address import Address
|
||||
from pyinsteon.constants import ALDBStatus, DeviceAction
|
||||
from pyinsteon.device_types.device_base import Device
|
||||
from pyinsteon.events import OFF_EVENT, OFF_FAST_EVENT, ON_EVENT, ON_FAST_EVENT, Event
|
||||
from pyinsteon.managers.link_manager import (
|
||||
async_enter_linking_mode,
|
||||
async_enter_unlinking_mode,
|
||||
)
|
||||
from pyinsteon.managers.scene_manager import (
|
||||
async_trigger_scene_off,
|
||||
async_trigger_scene_on,
|
||||
)
|
||||
from pyinsteon.managers.x10_manager import (
|
||||
async_x10_all_lights_off,
|
||||
async_x10_all_lights_on,
|
||||
async_x10_all_units_off,
|
||||
)
|
||||
from pyinsteon.x10_address import create as create_x10_address
|
||||
from serial.tools import list_ports
|
||||
|
||||
from homeassistant.components import usb
|
||||
from homeassistant.const import (
|
||||
CONF_ADDRESS,
|
||||
CONF_ENTITY_ID,
|
||||
CONF_PLATFORM,
|
||||
ENTITY_MATCH_ALL,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.const import CONF_ADDRESS, Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_connect,
|
||||
async_dispatcher_send,
|
||||
dispatcher_send,
|
||||
)
|
||||
from homeassistant.helpers.dispatcher import dispatcher_send
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import (
|
||||
CONF_CAT,
|
||||
CONF_DIM_STEPS,
|
||||
CONF_HOUSECODE,
|
||||
CONF_SUBCAT,
|
||||
CONF_UNITCODE,
|
||||
DOMAIN,
|
||||
EVENT_CONF_BUTTON,
|
||||
EVENT_GROUP_OFF,
|
||||
EVENT_GROUP_OFF_FAST,
|
||||
EVENT_GROUP_ON,
|
||||
EVENT_GROUP_ON_FAST,
|
||||
SIGNAL_ADD_DEFAULT_LINKS,
|
||||
SIGNAL_ADD_DEVICE_OVERRIDE,
|
||||
SIGNAL_ADD_ENTITIES,
|
||||
SIGNAL_ADD_X10_DEVICE,
|
||||
SIGNAL_LOAD_ALDB,
|
||||
SIGNAL_PRINT_ALDB,
|
||||
SIGNAL_REMOVE_DEVICE_OVERRIDE,
|
||||
SIGNAL_REMOVE_ENTITY,
|
||||
SIGNAL_REMOVE_HA_DEVICE,
|
||||
SIGNAL_REMOVE_INSTEON_DEVICE,
|
||||
SIGNAL_REMOVE_X10_DEVICE,
|
||||
SIGNAL_SAVE_DEVICES,
|
||||
SRV_ADD_ALL_LINK,
|
||||
SRV_ADD_DEFAULT_LINKS,
|
||||
SRV_ALL_LINK_GROUP,
|
||||
SRV_ALL_LINK_MODE,
|
||||
SRV_CONTROLLER,
|
||||
SRV_DEL_ALL_LINK,
|
||||
SRV_HOUSECODE,
|
||||
SRV_LOAD_ALDB,
|
||||
SRV_LOAD_DB_RELOAD,
|
||||
SRV_PRINT_ALDB,
|
||||
SRV_PRINT_IM_ALDB,
|
||||
SRV_SCENE_OFF,
|
||||
SRV_SCENE_ON,
|
||||
SRV_X10_ALL_LIGHTS_OFF,
|
||||
SRV_X10_ALL_LIGHTS_ON,
|
||||
SRV_X10_ALL_UNITS_OFF,
|
||||
)
|
||||
from .ipdb import get_device_platform_groups, get_device_platforms
|
||||
from .schemas import (
|
||||
ADD_ALL_LINK_SCHEMA,
|
||||
ADD_DEFAULT_LINKS_SCHEMA,
|
||||
DEL_ALL_LINK_SCHEMA,
|
||||
LOAD_ALDB_SCHEMA,
|
||||
PRINT_ALDB_SCHEMA,
|
||||
TRIGGER_SCENE_SCHEMA,
|
||||
X10_HOUSECODE_SCHEMA,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .entity import InsteonEntity
|
||||
@@ -154,7 +88,7 @@ def add_insteon_events(hass: HomeAssistant, device: Device) -> None:
|
||||
_register_event(event, async_fire_insteon_event)
|
||||
|
||||
|
||||
def register_new_device_callback(hass):
|
||||
def register_new_device_callback(hass: HomeAssistant) -> None:
|
||||
"""Register callback for new Insteon device."""
|
||||
|
||||
@callback
|
||||
@@ -180,212 +114,6 @@ def register_new_device_callback(hass):
|
||||
devices.subscribe(async_new_insteon_device, force_strong_ref=True)
|
||||
|
||||
|
||||
@callback
|
||||
def async_register_services(hass): # noqa: C901
|
||||
"""Register services used by insteon component."""
|
||||
|
||||
save_lock = asyncio.Lock()
|
||||
|
||||
async def async_srv_add_all_link(service: ServiceCall) -> None:
|
||||
"""Add an INSTEON All-Link between two devices."""
|
||||
group = service.data[SRV_ALL_LINK_GROUP]
|
||||
mode = service.data[SRV_ALL_LINK_MODE]
|
||||
link_mode = mode.lower() == SRV_CONTROLLER
|
||||
await async_enter_linking_mode(link_mode, group)
|
||||
|
||||
async def async_srv_del_all_link(service: ServiceCall) -> None:
|
||||
"""Delete an INSTEON All-Link between two devices."""
|
||||
group = service.data.get(SRV_ALL_LINK_GROUP)
|
||||
await async_enter_unlinking_mode(group)
|
||||
|
||||
async def async_srv_load_aldb(service: ServiceCall) -> None:
|
||||
"""Load the device All-Link database."""
|
||||
entity_id = service.data[CONF_ENTITY_ID]
|
||||
reload = service.data[SRV_LOAD_DB_RELOAD]
|
||||
if entity_id.lower() == ENTITY_MATCH_ALL:
|
||||
await async_srv_load_aldb_all(reload)
|
||||
else:
|
||||
signal = f"{entity_id}_{SIGNAL_LOAD_ALDB}"
|
||||
async_dispatcher_send(hass, signal, reload)
|
||||
|
||||
async def async_srv_load_aldb_all(reload):
|
||||
"""Load the All-Link database for all devices."""
|
||||
# Cannot be done concurrently due to issues with the underlying protocol.
|
||||
for address in devices:
|
||||
device = devices[address]
|
||||
if device != devices.modem and device.cat != 0x03:
|
||||
await device.aldb.async_load(refresh=reload)
|
||||
await async_srv_save_devices()
|
||||
|
||||
async def async_srv_save_devices():
|
||||
"""Write the Insteon device configuration to file."""
|
||||
async with save_lock:
|
||||
_LOGGER.debug("Saving Insteon devices")
|
||||
await devices.async_save(hass.config.config_dir)
|
||||
|
||||
def print_aldb(service: ServiceCall) -> None:
|
||||
"""Print the All-Link Database for a device."""
|
||||
# For now this sends logs to the log file.
|
||||
# Future direction is to create an INSTEON control panel.
|
||||
entity_id = service.data[CONF_ENTITY_ID]
|
||||
signal = f"{entity_id}_{SIGNAL_PRINT_ALDB}"
|
||||
dispatcher_send(hass, signal)
|
||||
|
||||
def print_im_aldb(service: ServiceCall) -> None:
|
||||
"""Print the All-Link Database for a device."""
|
||||
# For now this sends logs to the log file.
|
||||
# Future direction is to create an INSTEON control panel.
|
||||
print_aldb_to_log(devices.modem.aldb)
|
||||
|
||||
async def async_srv_x10_all_units_off(service: ServiceCall) -> None:
|
||||
"""Send the X10 All Units Off command."""
|
||||
housecode = service.data.get(SRV_HOUSECODE)
|
||||
await async_x10_all_units_off(housecode)
|
||||
|
||||
async def async_srv_x10_all_lights_off(service: ServiceCall) -> None:
|
||||
"""Send the X10 All Lights Off command."""
|
||||
housecode = service.data.get(SRV_HOUSECODE)
|
||||
await async_x10_all_lights_off(housecode)
|
||||
|
||||
async def async_srv_x10_all_lights_on(service: ServiceCall) -> None:
|
||||
"""Send the X10 All Lights On command."""
|
||||
housecode = service.data.get(SRV_HOUSECODE)
|
||||
await async_x10_all_lights_on(housecode)
|
||||
|
||||
async def async_srv_scene_on(service: ServiceCall) -> None:
|
||||
"""Trigger an INSTEON scene ON."""
|
||||
group = service.data.get(SRV_ALL_LINK_GROUP)
|
||||
await async_trigger_scene_on(group)
|
||||
|
||||
async def async_srv_scene_off(service: ServiceCall) -> None:
|
||||
"""Trigger an INSTEON scene ON."""
|
||||
group = service.data.get(SRV_ALL_LINK_GROUP)
|
||||
await async_trigger_scene_off(group)
|
||||
|
||||
@callback
|
||||
def async_add_default_links(service: ServiceCall) -> None:
|
||||
"""Add the default All-Link entries to a device."""
|
||||
entity_id = service.data[CONF_ENTITY_ID]
|
||||
signal = f"{entity_id}_{SIGNAL_ADD_DEFAULT_LINKS}"
|
||||
async_dispatcher_send(hass, signal)
|
||||
|
||||
async def async_add_device_override(override):
|
||||
"""Remove an Insten device and associated entities."""
|
||||
address = Address(override[CONF_ADDRESS])
|
||||
await async_remove_ha_device(address)
|
||||
devices.set_id(address, override[CONF_CAT], override[CONF_SUBCAT], 0)
|
||||
await async_srv_save_devices()
|
||||
|
||||
async def async_remove_device_override(address):
|
||||
"""Remove an Insten device and associated entities."""
|
||||
address = Address(address)
|
||||
await async_remove_ha_device(address)
|
||||
devices.set_id(address, None, None, None)
|
||||
await devices.async_identify_device(address)
|
||||
await async_srv_save_devices()
|
||||
|
||||
@callback
|
||||
def async_add_x10_device(x10_config):
|
||||
"""Add X10 device."""
|
||||
housecode = x10_config[CONF_HOUSECODE]
|
||||
unitcode = x10_config[CONF_UNITCODE]
|
||||
platform = x10_config[CONF_PLATFORM]
|
||||
steps = x10_config.get(CONF_DIM_STEPS, 22)
|
||||
x10_type = "on_off"
|
||||
if platform == "light":
|
||||
x10_type = "dimmable"
|
||||
elif platform == "binary_sensor":
|
||||
x10_type = "sensor"
|
||||
_LOGGER.debug(
|
||||
"Adding X10 device to Insteon: %s %d %s", housecode, unitcode, x10_type
|
||||
)
|
||||
# This must be run in the event loop
|
||||
devices.add_x10_device(housecode, unitcode, x10_type, steps)
|
||||
|
||||
async def async_remove_x10_device(housecode, unitcode):
|
||||
"""Remove an X10 device and associated entities."""
|
||||
address = create_x10_address(housecode, unitcode)
|
||||
devices.pop(address)
|
||||
await async_remove_ha_device(address)
|
||||
|
||||
async def async_remove_ha_device(address: Address, remove_all_refs: bool = False):
|
||||
"""Remove the device and all entities from hass."""
|
||||
signal = f"{address.id}_{SIGNAL_REMOVE_ENTITY}"
|
||||
async_dispatcher_send(hass, signal)
|
||||
dev_registry = dr.async_get(hass)
|
||||
device = dev_registry.async_get_device(identifiers={(DOMAIN, str(address))})
|
||||
if device:
|
||||
dev_registry.async_remove_device(device.id)
|
||||
|
||||
async def async_remove_insteon_device(
|
||||
address: Address, remove_all_refs: bool = False
|
||||
):
|
||||
"""Remove the underlying Insteon device from the network."""
|
||||
await devices.async_remove_device(
|
||||
address=address, force=False, remove_all_refs=remove_all_refs
|
||||
)
|
||||
await async_srv_save_devices()
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SRV_ADD_ALL_LINK, async_srv_add_all_link, schema=ADD_ALL_LINK_SCHEMA
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, SRV_DEL_ALL_LINK, async_srv_del_all_link, schema=DEL_ALL_LINK_SCHEMA
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, SRV_LOAD_ALDB, async_srv_load_aldb, schema=LOAD_ALDB_SCHEMA
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, SRV_PRINT_ALDB, print_aldb, schema=PRINT_ALDB_SCHEMA
|
||||
)
|
||||
hass.services.async_register(DOMAIN, SRV_PRINT_IM_ALDB, print_im_aldb, schema=None)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SRV_X10_ALL_UNITS_OFF,
|
||||
async_srv_x10_all_units_off,
|
||||
schema=X10_HOUSECODE_SCHEMA,
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SRV_X10_ALL_LIGHTS_OFF,
|
||||
async_srv_x10_all_lights_off,
|
||||
schema=X10_HOUSECODE_SCHEMA,
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SRV_X10_ALL_LIGHTS_ON,
|
||||
async_srv_x10_all_lights_on,
|
||||
schema=X10_HOUSECODE_SCHEMA,
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, SRV_SCENE_ON, async_srv_scene_on, schema=TRIGGER_SCENE_SCHEMA
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, SRV_SCENE_OFF, async_srv_scene_off, schema=TRIGGER_SCENE_SCHEMA
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SRV_ADD_DEFAULT_LINKS,
|
||||
async_add_default_links,
|
||||
schema=ADD_DEFAULT_LINKS_SCHEMA,
|
||||
)
|
||||
async_dispatcher_connect(hass, SIGNAL_SAVE_DEVICES, async_srv_save_devices)
|
||||
async_dispatcher_connect(
|
||||
hass, SIGNAL_ADD_DEVICE_OVERRIDE, async_add_device_override
|
||||
)
|
||||
async_dispatcher_connect(
|
||||
hass, SIGNAL_REMOVE_DEVICE_OVERRIDE, async_remove_device_override
|
||||
)
|
||||
async_dispatcher_connect(hass, SIGNAL_ADD_X10_DEVICE, async_add_x10_device)
|
||||
async_dispatcher_connect(hass, SIGNAL_REMOVE_X10_DEVICE, async_remove_x10_device)
|
||||
async_dispatcher_connect(hass, SIGNAL_REMOVE_HA_DEVICE, async_remove_ha_device)
|
||||
async_dispatcher_connect(
|
||||
hass, SIGNAL_REMOVE_INSTEON_DEVICE, async_remove_insteon_device
|
||||
)
|
||||
_LOGGER.debug("Insteon Services registered")
|
||||
|
||||
|
||||
def print_aldb_to_log(aldb):
|
||||
"""Print the All-Link Database to the log file."""
|
||||
logger = logging.getLogger(f"{__name__}.links")
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user