mirror of
https://github.com/home-assistant/core.git
synced 2026-01-15 03:57:00 +01:00
Compare commits
125 Commits
light_cond
...
rc
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e238d67818 | ||
|
|
992a9bdd3b | ||
|
|
ceaae1c1cc | ||
|
|
1c163c92dc | ||
|
|
a42aa9372c | ||
|
|
013592bd54 | ||
|
|
2101bae095 | ||
|
|
cfa1107135 | ||
|
|
a269ef660a | ||
|
|
c43c4f17e9 | ||
|
|
de25e6af51 | ||
|
|
18d3629b6c | ||
|
|
50c477a408 | ||
|
|
ea9cd7d905 | ||
|
|
2bf4ac20ea | ||
|
|
94ff881897 | ||
|
|
2975b3c1b9 | ||
|
|
0143c4ff85 | ||
|
|
f59566d20b | ||
|
|
395f0ad2a7 | ||
|
|
2af1fc6759 | ||
|
|
c1e7122d1c | ||
|
|
e5624b1224 | ||
|
|
6e380bafca | ||
|
|
bb9fd94430 | ||
|
|
07bc5d5c6b | ||
|
|
651b7116dd | ||
|
|
34438bd039 | ||
|
|
7b53b8691c | ||
|
|
8748d6f200 | ||
|
|
8d95511650 | ||
|
|
9aa5953a86 | ||
|
|
5ccdfda747 | ||
|
|
00ad44cb91 | ||
|
|
b7519cd880 | ||
|
|
ac44769539 | ||
|
|
9e95b80805 | ||
|
|
50086ca5c7 | ||
|
|
1f28fe9933 | ||
|
|
4465aa264c | ||
|
|
2c1bc96161 | ||
|
|
7127159a5b | ||
|
|
9f0eb6f077 | ||
|
|
da19cc06e3 | ||
|
|
fd92377cf2 | ||
|
|
c201938b8b | ||
|
|
b3765204b1 | ||
|
|
786257e051 | ||
|
|
9559634151 | ||
|
|
cf12ed8f08 | ||
|
|
e213f49c75 | ||
|
|
09c7cc113a | ||
|
|
e1e7e039a9 | ||
|
|
05a0f0d23f | ||
|
|
d3853019eb | ||
|
|
ccbaac55b3 | ||
|
|
771292ced9 | ||
|
|
5d4262e8b3 | ||
|
|
d96da9a639 | ||
|
|
288a805d0f | ||
|
|
8e55ceea77 | ||
|
|
14f1d9fbad | ||
|
|
eb6582bc24 | ||
|
|
4afe67f33d | ||
|
|
5d7b10f569 | ||
|
|
340c2e48df | ||
|
|
86257b1865 | ||
|
|
eea1adccfd | ||
|
|
242be14f88 | ||
|
|
7e013b723d | ||
|
|
4d55939f53 | ||
|
|
e5e7546d49 | ||
|
|
e560795d04 | ||
|
|
15b0342bd7 | ||
|
|
8d05a5f3d4 | ||
|
|
358ad29b59 | ||
|
|
5c4f99b828 | ||
|
|
b3f123c715 | ||
|
|
85c2351af2 | ||
|
|
ec19529c99 | ||
|
|
d5ebd02afe | ||
|
|
37d82ab795 | ||
|
|
5d08481137 | ||
|
|
0861b7541d | ||
|
|
abf7078842 | ||
|
|
c4012fae4e | ||
|
|
d6082ab6c3 | ||
|
|
77367e415f | ||
|
|
6c006c68c1 | ||
|
|
026fdeb4ce | ||
|
|
1034218e6e | ||
|
|
a21062f502 | ||
|
|
2e157f1bc6 | ||
|
|
a697e63b8c | ||
|
|
d28d55c7db | ||
|
|
8863488286 | ||
|
|
53cfdef1ac | ||
|
|
42ea7ecbd6 | ||
|
|
d58d08c350 | ||
|
|
65a259b9df | ||
|
|
cbfbfbee13 | ||
|
|
e503b37ddc | ||
|
|
217eef39f3 | ||
|
|
dcdbce9b21 | ||
|
|
71db8fe185 | ||
|
|
9b96cb66d5 | ||
|
|
78bccbbbc2 | ||
|
|
b0a8f9575c | ||
|
|
61104a9970 | ||
|
|
8d13dbdd0c | ||
|
|
9afb41004e | ||
|
|
cdd542f6e6 | ||
|
|
f520686002 | ||
|
|
e4d09bb615 | ||
|
|
10f6ccf6cc | ||
|
|
d9fa67b16f | ||
|
|
cf228ae02b | ||
|
|
cb4d62ab9a | ||
|
|
d2f75aec04 | ||
|
|
a609fbc07b | ||
|
|
1b9c7ae0ac | ||
|
|
492f2117fb | ||
|
|
2346f83635 | ||
|
|
8925bfb182 | ||
|
|
8f2b1f0eff |
@@ -40,8 +40,7 @@
|
||||
"python.terminal.activateEnvInCurrentTerminal": true,
|
||||
"python.testing.pytestArgs": ["--no-cov"],
|
||||
"pylint.importStrategy": "fromEnvironment",
|
||||
// Pyright type checking is not compatible with mypy which Home Assistant uses for type checking
|
||||
"python.analysis.typeCheckingMode": "off",
|
||||
"python.analysis.typeCheckingMode": "basic",
|
||||
"editor.formatOnPaste": false,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.formatOnType": true,
|
||||
|
||||
4
.github/copilot-instructions.md
vendored
4
.github/copilot-instructions.md
vendored
@@ -847,8 +847,8 @@ rules:
|
||||
## Development Commands
|
||||
|
||||
### Code Quality & Linting
|
||||
- **Run all linters on all files**: `prek run --all-files`
|
||||
- **Run linters on staged files only**: `prek run`
|
||||
- **Run all linters on all files**: `pre-commit run --all-files`
|
||||
- **Run linters on staged files only**: `pre-commit run`
|
||||
- **PyLint on everything** (slow): `pylint homeassistant`
|
||||
- **PyLint on specific folder**: `pylint homeassistant/components/my_integration`
|
||||
- **MyPy type checking (whole project)**: `mypy homeassistant/`
|
||||
|
||||
181
.github/workflows/ci.yaml
vendored
181
.github/workflows/ci.yaml
vendored
@@ -40,7 +40,7 @@ env:
|
||||
CACHE_VERSION: 2
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2026.2"
|
||||
HA_SHORT_VERSION: "2026.1"
|
||||
DEFAULT_PYTHON: "3.13.11"
|
||||
ALL_PYTHON_VERSIONS: "['3.13.11', '3.14.2']"
|
||||
# 10.3 is the oldest supported version
|
||||
@@ -59,6 +59,7 @@ env:
|
||||
# 15 is the latest version
|
||||
# - 15.2 is the latest (as of 9 Feb 2023)
|
||||
POSTGRESQL_VERSIONS: "['postgres:12.14','postgres:15.2']"
|
||||
PRE_COMMIT_CACHE: ~/.cache/pre-commit
|
||||
UV_CACHE_DIR: /tmp/uv-cache
|
||||
APT_CACHE_BASE: /home/runner/work/apt
|
||||
APT_CACHE_DIR: /home/runner/work/apt/cache
|
||||
@@ -82,6 +83,7 @@ jobs:
|
||||
integrations_glob: ${{ steps.info.outputs.integrations_glob }}
|
||||
integrations: ${{ steps.integrations.outputs.changes }}
|
||||
apt_cache_key: ${{ steps.generate_apt_cache_key.outputs.key }}
|
||||
pre-commit_cache_key: ${{ steps.generate_pre-commit_cache_key.outputs.key }}
|
||||
python_cache_key: ${{ steps.generate_python_cache_key.outputs.key }}
|
||||
requirements: ${{ steps.core.outputs.requirements }}
|
||||
mariadb_groups: ${{ steps.info.outputs.mariadb_groups }}
|
||||
@@ -109,6 +111,11 @@ jobs:
|
||||
hashFiles('requirements_all.txt') }}-${{
|
||||
hashFiles('homeassistant/package_constraints.txt') }}-${{
|
||||
hashFiles('script/gen_requirements_all.py') }}" >> $GITHUB_OUTPUT
|
||||
- name: Generate partial pre-commit restore key
|
||||
id: generate_pre-commit_cache_key
|
||||
run: >-
|
||||
echo "key=pre-commit-${{ env.CACHE_VERSION }}-${{
|
||||
hashFiles('.pre-commit-config.yaml') }}" >> $GITHUB_OUTPUT
|
||||
- name: Generate partial apt restore key
|
||||
id: generate_apt_cache_key
|
||||
run: |
|
||||
@@ -237,8 +244,8 @@ jobs:
|
||||
echo "skip_coverage: ${skip_coverage}"
|
||||
echo "skip_coverage=${skip_coverage}" >> $GITHUB_OUTPUT
|
||||
|
||||
prek:
|
||||
name: Run prek checks
|
||||
pre-commit:
|
||||
name: Prepare pre-commit base
|
||||
runs-on: *runs-on-ubuntu
|
||||
needs: [info]
|
||||
if: |
|
||||
@@ -247,23 +254,147 @@ jobs:
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
steps:
|
||||
- *checkout
|
||||
- &setup-python-default
|
||||
name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: &actions-setup-python actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: &actions-cache actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
path: venv
|
||||
key: &key-pre-commit-venv >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-venv-${{
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Create Python virtual environment
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
python -m venv venv
|
||||
. venv/bin/activate
|
||||
python --version
|
||||
pip install "$(grep '^uv' < requirements.txt)"
|
||||
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: *actions-cache
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
lookup-only: true
|
||||
key: &key-pre-commit-env >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Install pre-commit dependencies
|
||||
if: steps.cache-precommit.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit install-hooks
|
||||
|
||||
lint-ruff-format:
|
||||
name: Check ruff-format
|
||||
runs-on: *runs-on-ubuntu
|
||||
needs: &needs-pre-commit
|
||||
- info
|
||||
- pre-commit
|
||||
steps:
|
||||
- *checkout
|
||||
- *setup-python-default
|
||||
- &cache-restore-pre-commit-venv
|
||||
name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: &actions-cache-restore actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: *key-pre-commit-venv
|
||||
- &cache-restore-pre-commit-env
|
||||
name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: *actions-cache-restore
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
key: *key-pre-commit-env
|
||||
- name: Run ruff-format
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual ruff-format --all-files --show-diff-on-failure
|
||||
env:
|
||||
RUFF_OUTPUT_FORMAT: github
|
||||
|
||||
lint-ruff:
|
||||
name: Check ruff
|
||||
runs-on: *runs-on-ubuntu
|
||||
needs: *needs-pre-commit
|
||||
steps:
|
||||
- *checkout
|
||||
- *setup-python-default
|
||||
- *cache-restore-pre-commit-venv
|
||||
- *cache-restore-pre-commit-env
|
||||
- name: Run ruff
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual ruff-check --all-files --show-diff-on-failure
|
||||
env:
|
||||
RUFF_OUTPUT_FORMAT: github
|
||||
|
||||
lint-other:
|
||||
name: Check other linters
|
||||
runs-on: *runs-on-ubuntu
|
||||
needs: *needs-pre-commit
|
||||
steps:
|
||||
- *checkout
|
||||
- *setup-python-default
|
||||
- *cache-restore-pre-commit-venv
|
||||
- *cache-restore-pre-commit-env
|
||||
|
||||
- name: Register yamllint problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/yamllint.json"
|
||||
- name: Run yamllint
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual yamllint --all-files --show-diff-on-failure
|
||||
|
||||
- name: Register check-json problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/check-json.json"
|
||||
- name: Run check-json
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual check-json --all-files --show-diff-on-failure
|
||||
|
||||
- name: Run prettier (fully)
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual prettier --all-files --show-diff-on-failure
|
||||
|
||||
- name: Run prettier (partially)
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
shell: bash
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
shopt -s globstar
|
||||
pre-commit run --hook-stage manual prettier --show-diff-on-failure --files {homeassistant,tests}/components/${{ needs.info.outputs.integrations_glob }}/{*,**/*}
|
||||
|
||||
- name: Register check executables problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/check-executables-have-shebangs.json"
|
||||
- name: Run executables check
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual check-executables-have-shebangs --all-files --show-diff-on-failure
|
||||
|
||||
- name: Register codespell problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/codespell.json"
|
||||
- name: Run prek
|
||||
uses: j178/prek-action@9d6a3097e0c1865ecce00cfb89fe80f2ee91b547 # v1.0.12
|
||||
env:
|
||||
PREK_SKIP: no-commit-to-branch,mypy,pylint,gen_requirements_all,hassfest,hassfest-metadata,hassfest-mypy-config
|
||||
RUFF_OUTPUT_FORMAT: github
|
||||
- name: Run codespell
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --show-diff-on-failure --hook-stage manual codespell --all-files
|
||||
|
||||
lint-hadolint:
|
||||
name: Check ${{ matrix.file }}
|
||||
@@ -303,7 +434,7 @@ jobs:
|
||||
- &setup-python-matrix
|
||||
name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: &actions-setup-python actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
uses: *actions-setup-python
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -316,7 +447,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: &actions-cache actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
uses: *actions-cache
|
||||
with:
|
||||
path: venv
|
||||
key: &key-python-venv >-
|
||||
@@ -431,7 +562,7 @@ jobs:
|
||||
steps:
|
||||
- &cache-restore-apt
|
||||
name: Restore apt cache
|
||||
uses: &actions-cache-restore actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
uses: *actions-cache-restore
|
||||
with:
|
||||
path: *path-apt-cache
|
||||
fail-on-cache-miss: true
|
||||
@@ -448,13 +579,7 @@ jobs:
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
libturbojpeg
|
||||
- *checkout
|
||||
- &setup-python-default
|
||||
name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: *actions-setup-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- *setup-python-default
|
||||
- &cache-restore-python-default
|
||||
name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
@@ -657,7 +782,9 @@ jobs:
|
||||
- base
|
||||
- gen-requirements-all
|
||||
- hassfest
|
||||
- prek
|
||||
- lint-other
|
||||
- lint-ruff
|
||||
- lint-ruff-format
|
||||
- mypy
|
||||
steps:
|
||||
- *cache-restore-apt
|
||||
@@ -696,7 +823,9 @@ jobs:
|
||||
- base
|
||||
- gen-requirements-all
|
||||
- hassfest
|
||||
- prek
|
||||
- lint-other
|
||||
- lint-ruff
|
||||
- lint-ruff-format
|
||||
- mypy
|
||||
- prepare-pytest-full
|
||||
if: |
|
||||
@@ -820,7 +949,9 @@ jobs:
|
||||
- base
|
||||
- gen-requirements-all
|
||||
- hassfest
|
||||
- prek
|
||||
- lint-other
|
||||
- lint-ruff
|
||||
- lint-ruff-format
|
||||
- mypy
|
||||
if: |
|
||||
needs.info.outputs.lint_only != 'true'
|
||||
@@ -935,7 +1066,9 @@ jobs:
|
||||
- base
|
||||
- gen-requirements-all
|
||||
- hassfest
|
||||
- prek
|
||||
- lint-other
|
||||
- lint-ruff
|
||||
- lint-ruff-format
|
||||
- mypy
|
||||
if: |
|
||||
needs.info.outputs.lint_only != 'true'
|
||||
@@ -1069,7 +1202,9 @@ jobs:
|
||||
- base
|
||||
- gen-requirements-all
|
||||
- hassfest
|
||||
- prek
|
||||
- lint-other
|
||||
- lint-ruff
|
||||
- lint-ruff-format
|
||||
- mypy
|
||||
if: |
|
||||
needs.info.outputs.lint_only != 'true'
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@cdefb33c0f6224e58673d9004f47f7cb3e328b89 # v4.31.10
|
||||
uses: github/codeql-action/init@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@cdefb33c0f6224e58673d9004f47f7cb3e328b89 # v4.31.10
|
||||
uses: github/codeql-action/analyze@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
@@ -39,14 +39,14 @@ repos:
|
||||
- id: prettier
|
||||
additional_dependencies:
|
||||
- prettier@3.6.2
|
||||
- prettier-plugin-sort-json@4.2.0
|
||||
- prettier-plugin-sort-json@4.1.1
|
||||
- repo: https://github.com/cdce8p/python-typing-update
|
||||
rev: v0.6.0
|
||||
hooks:
|
||||
# Run `python-typing-update` hook manually from time to time
|
||||
# to update python typing syntax.
|
||||
# Will require manual work, before submitting changes!
|
||||
# prek run --hook-stage manual python-typing-update --all-files
|
||||
# pre-commit run --hook-stage manual python-typing-update --all-files
|
||||
- id: python-typing-update
|
||||
stages: [manual]
|
||||
args:
|
||||
|
||||
@@ -407,7 +407,6 @@ homeassistant.components.person.*
|
||||
homeassistant.components.pi_hole.*
|
||||
homeassistant.components.ping.*
|
||||
homeassistant.components.plugwise.*
|
||||
homeassistant.components.pooldose.*
|
||||
homeassistant.components.portainer.*
|
||||
homeassistant.components.powerfox.*
|
||||
homeassistant.components.powerwall.*
|
||||
|
||||
4
.vscode/settings.default.jsonc
vendored
4
.vscode/settings.default.jsonc
vendored
@@ -7,8 +7,8 @@
|
||||
"python.testing.pytestEnabled": false,
|
||||
// https://code.visualstudio.com/docs/python/linting#_general-settings
|
||||
"pylint.importStrategy": "fromEnvironment",
|
||||
// Pyright type checking is not compatible with mypy which Home Assistant uses for type checking
|
||||
"python.analysis.typeCheckingMode": "off",
|
||||
// Pyright is too pedantic for Home Assistant
|
||||
"python.analysis.typeCheckingMode": "basic",
|
||||
"[python]": {
|
||||
"editor.defaultFormatter": "charliermarsh.ruff",
|
||||
},
|
||||
|
||||
8
.vscode/tasks.json
vendored
8
.vscode/tasks.json
vendored
@@ -45,7 +45,7 @@
|
||||
{
|
||||
"label": "Ruff",
|
||||
"type": "shell",
|
||||
"command": "prek run ruff-check --all-files",
|
||||
"command": "pre-commit run ruff-check --all-files",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
@@ -57,9 +57,9 @@
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Prek",
|
||||
"label": "Pre-commit",
|
||||
"type": "shell",
|
||||
"command": "prek run --show-diff-on-failure",
|
||||
"command": "pre-commit run --show-diff-on-failure",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
@@ -120,7 +120,7 @@
|
||||
{
|
||||
"label": "Generate Requirements",
|
||||
"type": "shell",
|
||||
"command": "${command:python.interpreterPath} -m script.gen_requirements_all",
|
||||
"command": "./script/gen_requirements_all.py",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
|
||||
7
CODEOWNERS
generated
7
CODEOWNERS
generated
@@ -661,8 +661,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/harmony/ @ehendrix23 @bdraco @mkeesey @Aohzan
|
||||
/homeassistant/components/hassio/ @home-assistant/supervisor
|
||||
/tests/components/hassio/ @home-assistant/supervisor
|
||||
/homeassistant/components/hdfury/ @glenndehaan
|
||||
/tests/components/hdfury/ @glenndehaan
|
||||
/homeassistant/components/hdmi_cec/ @inytar
|
||||
/tests/components/hdmi_cec/ @inytar
|
||||
/homeassistant/components/heatmiser/ @andylockran
|
||||
@@ -1068,8 +1066,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/myuplink/ @pajzo @astrandb
|
||||
/homeassistant/components/nam/ @bieniu
|
||||
/tests/components/nam/ @bieniu
|
||||
/homeassistant/components/namecheapdns/ @tr4nt0r
|
||||
/tests/components/namecheapdns/ @tr4nt0r
|
||||
/homeassistant/components/nanoleaf/ @milanmeu @joostlek
|
||||
/tests/components/nanoleaf/ @milanmeu @joostlek
|
||||
/homeassistant/components/nasweb/ @nasWebio
|
||||
@@ -1174,8 +1170,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/open_router/ @joostlek
|
||||
/homeassistant/components/openerz/ @misialq
|
||||
/tests/components/openerz/ @misialq
|
||||
/homeassistant/components/openevse/ @c00w @firstof9
|
||||
/tests/components/openevse/ @c00w @firstof9
|
||||
/homeassistant/components/openexchangerates/ @MartinHjelmare
|
||||
/tests/components/openexchangerates/ @MartinHjelmare
|
||||
/homeassistant/components/opengarage/ @danielhiversen
|
||||
@@ -1807,7 +1801,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/waqi/ @joostlek
|
||||
/homeassistant/components/water_heater/ @home-assistant/core
|
||||
/tests/components/water_heater/ @home-assistant/core
|
||||
/homeassistant/components/waterfurnace/ @sdague @masterkoppa
|
||||
/homeassistant/components/watergate/ @adam-the-hero
|
||||
/tests/components/watergate/ @adam-the-hero
|
||||
/homeassistant/components/watson_tts/ @rutkai
|
||||
|
||||
@@ -7,12 +7,7 @@ from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import AirobotConfigEntry, AirobotDataUpdateCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [
|
||||
Platform.BUTTON,
|
||||
Platform.CLIMATE,
|
||||
Platform.NUMBER,
|
||||
Platform.SENSOR,
|
||||
]
|
||||
PLATFORMS: list[Platform] = [Platform.CLIMATE, Platform.NUMBER, Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AirobotConfigEntry) -> bool:
|
||||
|
||||
@@ -1,96 +0,0 @@
|
||||
"""Button platform for Airobot integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from pyairobotrest.exceptions import (
|
||||
AirobotConnectionError,
|
||||
AirobotError,
|
||||
AirobotTimeoutError,
|
||||
)
|
||||
|
||||
from homeassistant.components.button import (
|
||||
ButtonDeviceClass,
|
||||
ButtonEntity,
|
||||
ButtonEntityDescription,
|
||||
)
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AirobotConfigEntry, AirobotDataUpdateCoordinator
|
||||
from .entity import AirobotEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AirobotButtonEntityDescription(ButtonEntityDescription):
|
||||
"""Describes Airobot button entity."""
|
||||
|
||||
press_fn: Callable[[AirobotDataUpdateCoordinator], Coroutine[Any, Any, None]]
|
||||
|
||||
|
||||
BUTTON_TYPES: tuple[AirobotButtonEntityDescription, ...] = (
|
||||
AirobotButtonEntityDescription(
|
||||
key="restart",
|
||||
device_class=ButtonDeviceClass.RESTART,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
press_fn=lambda coordinator: coordinator.client.reboot_thermostat(),
|
||||
),
|
||||
AirobotButtonEntityDescription(
|
||||
key="recalibrate_co2",
|
||||
translation_key="recalibrate_co2",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
entity_registry_enabled_default=False,
|
||||
press_fn=lambda coordinator: coordinator.client.recalibrate_co2_sensor(),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AirobotConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Airobot button entities."""
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
AirobotButton(coordinator, description) for description in BUTTON_TYPES
|
||||
)
|
||||
|
||||
|
||||
class AirobotButton(AirobotEntity, ButtonEntity):
|
||||
"""Representation of an Airobot button."""
|
||||
|
||||
entity_description: AirobotButtonEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirobotDataUpdateCoordinator,
|
||||
description: AirobotButtonEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the button."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.data.status.device_id}_{description.key}"
|
||||
|
||||
async def async_press(self) -> None:
|
||||
"""Handle the button press."""
|
||||
try:
|
||||
await self.entity_description.press_fn(self.coordinator)
|
||||
except (AirobotConnectionError, AirobotTimeoutError):
|
||||
# Connection errors during reboot are expected as device restarts
|
||||
pass
|
||||
except AirobotError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="button_press_failed",
|
||||
translation_placeholders={"button": self.entity_description.key},
|
||||
) from err
|
||||
@@ -1,10 +1,5 @@
|
||||
{
|
||||
"entity": {
|
||||
"button": {
|
||||
"recalibrate_co2": {
|
||||
"default": "mdi:molecule-co2"
|
||||
}
|
||||
},
|
||||
"number": {
|
||||
"hysteresis_band": {
|
||||
"default": "mdi:delta"
|
||||
|
||||
@@ -12,6 +12,6 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyairobotrest"],
|
||||
"quality_scale": "gold",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["pyairobotrest==0.2.0"]
|
||||
}
|
||||
|
||||
@@ -43,7 +43,7 @@ rules:
|
||||
discovery-update-info: done
|
||||
discovery: done
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-examples: todo
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
|
||||
@@ -59,11 +59,6 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"button": {
|
||||
"recalibrate_co2": {
|
||||
"name": "Recalibrate CO2 sensor"
|
||||
}
|
||||
},
|
||||
"number": {
|
||||
"hysteresis_band": {
|
||||
"name": "Hysteresis band"
|
||||
@@ -91,9 +86,6 @@
|
||||
"authentication_failed": {
|
||||
"message": "Authentication failed, please reauthenticate."
|
||||
},
|
||||
"button_press_failed": {
|
||||
"message": "Failed to press {button} button."
|
||||
},
|
||||
"connection_failed": {
|
||||
"message": "Failed to communicate with device."
|
||||
},
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["airos==0.6.1"]
|
||||
"requirements": ["airos==0.6.0"]
|
||||
}
|
||||
|
||||
@@ -85,22 +85,6 @@ class AirzoneSystemEntity(AirzoneEntity):
|
||||
value = system[key]
|
||||
return value
|
||||
|
||||
async def _async_update_sys_params(self, params: dict[str, Any]) -> None:
|
||||
"""Send system parameters to API."""
|
||||
_params = {
|
||||
API_SYSTEM_ID: self.system_id,
|
||||
**params,
|
||||
}
|
||||
_LOGGER.debug("update_sys_params=%s", _params)
|
||||
try:
|
||||
await self.coordinator.airzone.set_sys_parameters(_params)
|
||||
except AirzoneError as error:
|
||||
raise HomeAssistantError(
|
||||
f"Failed to set system {self.entity_id}: {error}"
|
||||
) from error
|
||||
|
||||
self.coordinator.async_set_updated_data(self.coordinator.airzone.data())
|
||||
|
||||
|
||||
class AirzoneHotWaterEntity(AirzoneEntity):
|
||||
"""Define an Airzone Hot Water entity."""
|
||||
|
||||
@@ -12,5 +12,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairzone"],
|
||||
"requirements": ["aioairzone==1.0.5"]
|
||||
"requirements": ["aioairzone==1.0.4"]
|
||||
}
|
||||
|
||||
@@ -20,7 +20,6 @@ from aioairzone.const import (
|
||||
AZD_MODES,
|
||||
AZD_Q_ADAPT,
|
||||
AZD_SLEEP,
|
||||
AZD_SYSTEMS,
|
||||
AZD_ZONES,
|
||||
)
|
||||
|
||||
@@ -31,7 +30,7 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import AirzoneConfigEntry, AirzoneUpdateCoordinator
|
||||
from .entity import AirzoneEntity, AirzoneSystemEntity, AirzoneZoneEntity
|
||||
from .entity import AirzoneEntity, AirzoneZoneEntity
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
@@ -86,18 +85,6 @@ def main_zone_options(
|
||||
return [k for k, v in options.items() if v in modes]
|
||||
|
||||
|
||||
SYSTEM_SELECT_TYPES: Final[tuple[AirzoneSelectDescription, ...]] = (
|
||||
AirzoneSelectDescription(
|
||||
api_param=API_Q_ADAPT,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
key=AZD_Q_ADAPT,
|
||||
options=list(Q_ADAPT_DICT),
|
||||
options_dict=Q_ADAPT_DICT,
|
||||
translation_key="q_adapt",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
MAIN_ZONE_SELECT_TYPES: Final[tuple[AirzoneSelectDescription, ...]] = (
|
||||
AirzoneSelectDescription(
|
||||
api_param=API_MODE,
|
||||
@@ -106,6 +93,14 @@ MAIN_ZONE_SELECT_TYPES: Final[tuple[AirzoneSelectDescription, ...]] = (
|
||||
options_fn=main_zone_options,
|
||||
translation_key="modes",
|
||||
),
|
||||
AirzoneSelectDescription(
|
||||
api_param=API_Q_ADAPT,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
key=AZD_Q_ADAPT,
|
||||
options=list(Q_ADAPT_DICT),
|
||||
options_dict=Q_ADAPT_DICT,
|
||||
translation_key="q_adapt",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -145,37 +140,16 @@ async def async_setup_entry(
|
||||
"""Add Airzone select from a config_entry."""
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
added_systems: set[str] = set()
|
||||
added_zones: set[str] = set()
|
||||
|
||||
def _async_entity_listener() -> None:
|
||||
"""Handle additions of select."""
|
||||
|
||||
entities: list[AirzoneBaseSelect] = []
|
||||
|
||||
systems_data = coordinator.data.get(AZD_SYSTEMS, {})
|
||||
received_systems = set(systems_data)
|
||||
new_systems = received_systems - added_systems
|
||||
if new_systems:
|
||||
entities.extend(
|
||||
AirzoneSystemSelect(
|
||||
coordinator,
|
||||
description,
|
||||
entry,
|
||||
system_id,
|
||||
systems_data.get(system_id),
|
||||
)
|
||||
for system_id in new_systems
|
||||
for description in SYSTEM_SELECT_TYPES
|
||||
if description.key in systems_data.get(system_id)
|
||||
)
|
||||
added_systems.update(new_systems)
|
||||
|
||||
zones_data = coordinator.data.get(AZD_ZONES, {})
|
||||
received_zones = set(zones_data)
|
||||
new_zones = received_zones - added_zones
|
||||
if new_zones:
|
||||
entities.extend(
|
||||
entities: list[AirzoneZoneSelect] = [
|
||||
AirzoneZoneSelect(
|
||||
coordinator,
|
||||
description,
|
||||
@@ -187,8 +161,8 @@ async def async_setup_entry(
|
||||
for description in MAIN_ZONE_SELECT_TYPES
|
||||
if description.key in zones_data.get(system_zone_id)
|
||||
and zones_data.get(system_zone_id).get(AZD_MASTER) is True
|
||||
)
|
||||
entities.extend(
|
||||
]
|
||||
entities += [
|
||||
AirzoneZoneSelect(
|
||||
coordinator,
|
||||
description,
|
||||
@@ -199,11 +173,10 @@ async def async_setup_entry(
|
||||
for system_zone_id in new_zones
|
||||
for description in ZONE_SELECT_TYPES
|
||||
if description.key in zones_data.get(system_zone_id)
|
||||
)
|
||||
]
|
||||
async_add_entities(entities)
|
||||
added_zones.update(new_zones)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
entry.async_on_unload(coordinator.async_add_listener(_async_entity_listener))
|
||||
_async_entity_listener()
|
||||
|
||||
@@ -230,38 +203,6 @@ class AirzoneBaseSelect(AirzoneEntity, SelectEntity):
|
||||
self._attr_current_option = self._get_current_option()
|
||||
|
||||
|
||||
class AirzoneSystemSelect(AirzoneSystemEntity, AirzoneBaseSelect):
|
||||
"""Define an Airzone System select."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirzoneUpdateCoordinator,
|
||||
description: AirzoneSelectDescription,
|
||||
entry: ConfigEntry,
|
||||
system_id: str,
|
||||
system_data: dict[str, Any],
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator, entry, system_data)
|
||||
|
||||
self._attr_unique_id = f"{self._attr_unique_id}_{system_id}_{description.key}"
|
||||
self.entity_description = description
|
||||
|
||||
self._attr_options = self.entity_description.options_fn(
|
||||
system_data, description.options_dict
|
||||
)
|
||||
|
||||
self.values_dict = {v: k for k, v in description.options_dict.items()}
|
||||
|
||||
self._async_update_attrs()
|
||||
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Change the selected option."""
|
||||
param = self.entity_description.api_param
|
||||
value = self.entity_description.options_dict[option]
|
||||
await self._async_update_sys_params({param: value})
|
||||
|
||||
|
||||
class AirzoneZoneSelect(AirzoneZoneEntity, AirzoneBaseSelect):
|
||||
"""Define an Airzone Zone select."""
|
||||
|
||||
|
||||
@@ -4,8 +4,6 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
import dateutil
|
||||
|
||||
from homeassistant.components.automation import automations_with_entity
|
||||
from homeassistant.components.script import scripts_with_entity
|
||||
from homeassistant.components.sensor import (
|
||||
@@ -181,7 +179,6 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
LAST_S_TEST: SensorEntityDescription(
|
||||
key=LAST_S_TEST,
|
||||
translation_key="last_self_test",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
),
|
||||
"lastxfer": SensorEntityDescription(
|
||||
key="lastxfer",
|
||||
@@ -235,7 +232,6 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
"masterupd": SensorEntityDescription(
|
||||
key="masterupd",
|
||||
translation_key="master_update",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"maxlinev": SensorEntityDescription(
|
||||
@@ -369,7 +365,6 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
"starttime": SensorEntityDescription(
|
||||
key="starttime",
|
||||
translation_key="startup_time",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"statflag": SensorEntityDescription(
|
||||
@@ -421,19 +416,16 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
"xoffbat": SensorEntityDescription(
|
||||
key="xoffbat",
|
||||
translation_key="transfer_from_battery",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"xoffbatt": SensorEntityDescription(
|
||||
key="xoffbatt",
|
||||
translation_key="transfer_from_battery",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"xonbatt": SensorEntityDescription(
|
||||
key="xonbatt",
|
||||
translation_key="transfer_to_battery",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
}
|
||||
@@ -537,13 +529,7 @@ class APCUPSdSensor(APCUPSdEntity, SensorEntity):
|
||||
self._attr_native_value = None
|
||||
return
|
||||
|
||||
data = self.coordinator.data[key]
|
||||
|
||||
if self.entity_description.device_class == SensorDeviceClass.TIMESTAMP:
|
||||
self._attr_native_value = dateutil.parser.parse(data)
|
||||
return
|
||||
|
||||
self._attr_native_value, inferred_unit = infer_unit(data)
|
||||
self._attr_native_value, inferred_unit = infer_unit(self.coordinator.data[key])
|
||||
if not self.native_unit_of_measurement:
|
||||
self._attr_native_unit_of_measurement = inferred_unit
|
||||
|
||||
|
||||
@@ -3,8 +3,9 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
import math
|
||||
|
||||
from pymicro_vad import MicroVad
|
||||
from pysilero_vad import SileroVoiceActivityDetector
|
||||
from pyspeex_noise import AudioProcessor
|
||||
|
||||
from .const import BYTES_PER_CHUNK
|
||||
@@ -42,8 +43,8 @@ class AudioEnhancer(ABC):
|
||||
"""Enhance chunk of PCM audio @ 16Khz with 16-bit mono samples."""
|
||||
|
||||
|
||||
class MicroVadSpeexEnhancer(AudioEnhancer):
|
||||
"""Audio enhancer that runs microVAD and speex."""
|
||||
class SileroVadSpeexEnhancer(AudioEnhancer):
|
||||
"""Audio enhancer that runs Silero VAD and speex."""
|
||||
|
||||
def __init__(
|
||||
self, auto_gain: int, noise_suppression: int, is_vad_enabled: bool
|
||||
@@ -69,21 +70,49 @@ class MicroVadSpeexEnhancer(AudioEnhancer):
|
||||
self.noise_suppression,
|
||||
)
|
||||
|
||||
self.vad: MicroVad | None = None
|
||||
self.vad: SileroVoiceActivityDetector | None = None
|
||||
|
||||
# We get 10ms chunks but Silero works on 32ms chunks, so we have to
|
||||
# buffer audio. The previous speech probability is used until enough
|
||||
# audio has been buffered.
|
||||
self._vad_buffer: bytearray | None = None
|
||||
self._vad_buffer_chunks = 0
|
||||
self._vad_buffer_chunk_idx = 0
|
||||
self._last_speech_probability: float | None = None
|
||||
|
||||
if self.is_vad_enabled:
|
||||
self.vad = MicroVad()
|
||||
_LOGGER.debug("Initialized microVAD")
|
||||
self.vad = SileroVoiceActivityDetector()
|
||||
|
||||
# VAD buffer is a multiple of 10ms, but Silero VAD needs 32ms.
|
||||
self._vad_buffer_chunks = int(
|
||||
math.ceil(self.vad.chunk_bytes() / BYTES_PER_CHUNK)
|
||||
)
|
||||
self._vad_leftover_bytes = self.vad.chunk_bytes() - BYTES_PER_CHUNK
|
||||
self._vad_buffer = bytearray(self.vad.chunk_bytes())
|
||||
_LOGGER.debug("Initialized Silero VAD")
|
||||
|
||||
def enhance_chunk(self, audio: bytes, timestamp_ms: int) -> EnhancedAudioChunk:
|
||||
"""Enhance 10ms chunk of PCM audio @ 16Khz with 16-bit mono samples."""
|
||||
speech_probability: float | None = None
|
||||
|
||||
assert len(audio) == BYTES_PER_CHUNK
|
||||
|
||||
if self.vad is not None:
|
||||
# Run VAD
|
||||
speech_probability = self.vad.Process10ms(audio)
|
||||
assert self._vad_buffer is not None
|
||||
start_idx = self._vad_buffer_chunk_idx * BYTES_PER_CHUNK
|
||||
self._vad_buffer[start_idx : start_idx + BYTES_PER_CHUNK] = audio
|
||||
|
||||
self._vad_buffer_chunk_idx += 1
|
||||
if self._vad_buffer_chunk_idx >= self._vad_buffer_chunks:
|
||||
# We have enough data to run Silero VAD (32 ms)
|
||||
self._last_speech_probability = self.vad.process_chunk(
|
||||
self._vad_buffer[: self.vad.chunk_bytes()]
|
||||
)
|
||||
|
||||
# Copy leftover audio that wasn't processed to start
|
||||
self._vad_buffer[: self._vad_leftover_bytes] = self._vad_buffer[
|
||||
-self._vad_leftover_bytes :
|
||||
]
|
||||
self._vad_buffer_chunk_idx = 0
|
||||
|
||||
if self.audio_processor is not None:
|
||||
# Run noise suppression and auto gain
|
||||
@@ -92,5 +121,5 @@ class MicroVadSpeexEnhancer(AudioEnhancer):
|
||||
return EnhancedAudioChunk(
|
||||
audio=audio,
|
||||
timestamp_ms=timestamp_ms,
|
||||
speech_probability=speech_probability,
|
||||
speech_probability=self._last_speech_probability,
|
||||
)
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "local_push",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["pymicro-vad==1.0.1", "pyspeex-noise==1.0.2"]
|
||||
"requirements": ["pysilero-vad==3.2.0", "pyspeex-noise==1.0.2"]
|
||||
}
|
||||
|
||||
@@ -55,7 +55,7 @@ from homeassistant.util import (
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
from homeassistant.util.limited_size_dict import LimitedSizeDict
|
||||
|
||||
from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, MicroVadSpeexEnhancer
|
||||
from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, SileroVadSpeexEnhancer
|
||||
from .const import (
|
||||
ACKNOWLEDGE_PATH,
|
||||
BYTES_PER_CHUNK,
|
||||
@@ -633,7 +633,7 @@ class PipelineRun:
|
||||
# Initialize with audio settings
|
||||
if self.audio_settings.needs_processor and (self.audio_enhancer is None):
|
||||
# Default audio enhancer
|
||||
self.audio_enhancer = MicroVadSpeexEnhancer(
|
||||
self.audio_enhancer = SileroVadSpeexEnhancer(
|
||||
self.audio_settings.auto_gain_dbfs,
|
||||
self.audio_settings.noise_suppression_level,
|
||||
self.audio_settings.is_vad_enabled,
|
||||
|
||||
@@ -140,7 +140,6 @@ _EXPERIMENTAL_TRIGGER_PLATFORMS = {
|
||||
"light",
|
||||
"lock",
|
||||
"media_player",
|
||||
"person",
|
||||
"scene",
|
||||
"siren",
|
||||
"switch",
|
||||
|
||||
@@ -4,7 +4,6 @@ from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from azure.servicebus import ServiceBusMessage
|
||||
from azure.servicebus.aio import ServiceBusClient, ServiceBusSender
|
||||
@@ -93,7 +92,7 @@ class ServiceBusNotificationService(BaseNotificationService):
|
||||
"""Initialize the service."""
|
||||
self._client = client
|
||||
|
||||
async def async_send_message(self, message: str, **kwargs: Any) -> None:
|
||||
async def async_send_message(self, message, **kwargs):
|
||||
"""Send a message."""
|
||||
dto = {ATTR_ASB_MESSAGE: message}
|
||||
|
||||
|
||||
@@ -34,12 +34,7 @@ class BeoData:
|
||||
|
||||
type BeoConfigEntry = ConfigEntry[BeoData]
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.EVENT,
|
||||
Platform.MEDIA_PLAYER,
|
||||
Platform.SENSOR,
|
||||
]
|
||||
PLATFORMS = [Platform.EVENT, Platform.MEDIA_PLAYER]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: BeoConfigEntry) -> bool:
|
||||
|
||||
@@ -1,63 +0,0 @@
|
||||
"""Binary Sensor entities for the Bang & Olufsen integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from mozart_api.models import BatteryState
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import BeoConfigEntry
|
||||
from .const import CONNECTION_STATUS, DOMAIN, WebsocketNotification
|
||||
from .entity import BeoEntity
|
||||
from .util import supports_battery
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: BeoConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Binary Sensor entities from config entry."""
|
||||
if await supports_battery(config_entry.runtime_data.client):
|
||||
async_add_entities(new_entities=[BeoBinarySensorBatteryCharging(config_entry)])
|
||||
|
||||
|
||||
class BeoBinarySensorBatteryCharging(BinarySensorEntity, BeoEntity):
|
||||
"""Battery charging Binary Sensor."""
|
||||
|
||||
_attr_device_class = BinarySensorDeviceClass.BATTERY_CHARGING
|
||||
_attr_is_on = False
|
||||
|
||||
def __init__(self, config_entry: BeoConfigEntry) -> None:
|
||||
"""Init the battery charging Binary Sensor."""
|
||||
super().__init__(config_entry, config_entry.runtime_data.client)
|
||||
|
||||
self._attr_unique_id = f"{self._unique_id}_charging"
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Turn on the dispatchers."""
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{DOMAIN}_{self._unique_id}_{CONNECTION_STATUS}",
|
||||
self._async_update_connection_state,
|
||||
)
|
||||
)
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.BATTERY}",
|
||||
self._update_battery_charging,
|
||||
)
|
||||
)
|
||||
|
||||
async def _update_battery_charging(self, data: BatteryState) -> None:
|
||||
"""Update battery charging."""
|
||||
self._attr_is_on = bool(data.is_charging)
|
||||
self.async_write_ha_state()
|
||||
@@ -115,7 +115,6 @@ class WebsocketNotification(StrEnum):
|
||||
"""Enum for WebSocket notification types."""
|
||||
|
||||
ACTIVE_LISTENING_MODE = "active_listening_mode"
|
||||
BATTERY = "battery"
|
||||
BEO_REMOTE_BUTTON = "beo_remote_button"
|
||||
BUTTON = "button"
|
||||
PLAYBACK_ERROR = "playback_error"
|
||||
|
||||
@@ -4,10 +4,8 @@ from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN
|
||||
from homeassistant.components.event import DOMAIN as EVENT_DOMAIN
|
||||
from homeassistant.components.media_player import DOMAIN as MEDIA_PLAYER_DOMAIN
|
||||
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
|
||||
from homeassistant.const import CONF_MODEL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
@@ -57,19 +55,6 @@ async def async_get_config_entry_diagnostics(
|
||||
|
||||
# Get remotes
|
||||
for remote in await get_remotes(config_entry.runtime_data.client):
|
||||
# Get Battery Sensor states
|
||||
if entity_id := entity_registry.async_get_entity_id(
|
||||
SENSOR_DOMAIN,
|
||||
DOMAIN,
|
||||
f"{remote.serial_number}_{config_entry.unique_id}_remote_battery_level",
|
||||
):
|
||||
if state := hass.states.get(entity_id):
|
||||
state_dict = dict(state.as_dict())
|
||||
|
||||
# Remove context as it is not relevant
|
||||
state_dict.pop("context")
|
||||
data[f"remote_{remote.serial_number}_battery_level"] = state_dict
|
||||
|
||||
# Get key Event entity states (if enabled)
|
||||
for key_type in get_remote_keys():
|
||||
if entity_id := entity_registry.async_get_entity_id(
|
||||
@@ -87,26 +72,4 @@ async def async_get_config_entry_diagnostics(
|
||||
# Add remote Mozart model
|
||||
data[f"remote_{remote.serial_number}"] = dict(remote)
|
||||
|
||||
# Get Mozart battery entity
|
||||
if entity_id := entity_registry.async_get_entity_id(
|
||||
SENSOR_DOMAIN, DOMAIN, f"{config_entry.unique_id}_battery_level"
|
||||
):
|
||||
if state := hass.states.get(entity_id):
|
||||
state_dict = dict(state.as_dict())
|
||||
|
||||
# Remove context as it is not relevant
|
||||
state_dict.pop("context")
|
||||
data["battery_level"] = state_dict
|
||||
|
||||
# Get Mozart battery charging entity
|
||||
if entity_id := entity_registry.async_get_entity_id(
|
||||
BINARY_SENSOR_DOMAIN, DOMAIN, f"{config_entry.unique_id}_charging"
|
||||
):
|
||||
if state := hass.states.get(entity_id):
|
||||
state_dict = dict(state.as_dict())
|
||||
|
||||
# Remove context as it is not relevant
|
||||
state_dict.pop("context")
|
||||
data["charging"] = state_dict
|
||||
|
||||
return data
|
||||
|
||||
@@ -1,139 +0,0 @@
|
||||
"""Sensor entities for the Bang & Olufsen integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
from datetime import timedelta
|
||||
|
||||
from aiohttp import ClientConnectorError
|
||||
from mozart_api.exceptions import ApiException
|
||||
from mozart_api.models import BatteryState, PairedRemote
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import PERCENTAGE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import BeoConfigEntry
|
||||
from .const import CONNECTION_STATUS, DOMAIN, WebsocketNotification
|
||||
from .entity import BeoEntity
|
||||
from .util import get_remotes, supports_battery
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=15)
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: BeoConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Sensor entities from config entry."""
|
||||
entities: list[BeoSensor] = []
|
||||
|
||||
# Check for Mozart device with battery
|
||||
if await supports_battery(config_entry.runtime_data.client):
|
||||
entities.append(BeoSensorBatteryLevel(config_entry))
|
||||
|
||||
# Add any Beoremote One remotes
|
||||
entities.extend(
|
||||
[
|
||||
BeoSensorRemoteBatteryLevel(config_entry, remote)
|
||||
for remote in (await get_remotes(config_entry.runtime_data.client))
|
||||
]
|
||||
)
|
||||
|
||||
async_add_entities(entities, update_before_add=True)
|
||||
|
||||
|
||||
class BeoSensor(SensorEntity, BeoEntity):
|
||||
"""Base Bang & Olufsen Sensor."""
|
||||
|
||||
def __init__(self, config_entry: BeoConfigEntry) -> None:
|
||||
"""Initialize Sensor."""
|
||||
super().__init__(config_entry, config_entry.runtime_data.client)
|
||||
|
||||
|
||||
class BeoSensorBatteryLevel(BeoSensor):
|
||||
"""Battery level Sensor for Mozart devices."""
|
||||
|
||||
_attr_device_class = SensorDeviceClass.BATTERY
|
||||
_attr_native_unit_of_measurement = PERCENTAGE
|
||||
_attr_state_class = SensorStateClass.MEASUREMENT
|
||||
|
||||
def __init__(self, config_entry: BeoConfigEntry) -> None:
|
||||
"""Init the battery level Sensor."""
|
||||
super().__init__(config_entry)
|
||||
|
||||
self._attr_unique_id = f"{self._unique_id}_battery_level"
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Turn on the dispatchers."""
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{DOMAIN}_{self._unique_id}_{CONNECTION_STATUS}",
|
||||
self._async_update_connection_state,
|
||||
)
|
||||
)
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.BATTERY}",
|
||||
self._update_battery,
|
||||
)
|
||||
)
|
||||
|
||||
async def _update_battery(self, data: BatteryState) -> None:
|
||||
"""Update sensor value."""
|
||||
self._attr_native_value = data.battery_level
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
class BeoSensorRemoteBatteryLevel(BeoSensor):
|
||||
"""Battery level Sensor for the Beoremote One."""
|
||||
|
||||
_attr_device_class = SensorDeviceClass.BATTERY
|
||||
_attr_native_unit_of_measurement = PERCENTAGE
|
||||
_attr_should_poll = True
|
||||
_attr_state_class = SensorStateClass.MEASUREMENT
|
||||
|
||||
def __init__(self, config_entry: BeoConfigEntry, remote: PairedRemote) -> None:
|
||||
"""Init the battery level Sensor."""
|
||||
super().__init__(config_entry)
|
||||
# Serial number is not None, as the remote object is provided by get_remotes
|
||||
assert remote.serial_number
|
||||
|
||||
self._attr_unique_id = (
|
||||
f"{remote.serial_number}_{self._unique_id}_remote_battery_level"
|
||||
)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, f"{remote.serial_number}_{self._unique_id}")}
|
||||
)
|
||||
self._attr_native_value = remote.battery_level
|
||||
self._remote = remote
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Turn on the dispatchers."""
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{DOMAIN}_{self._unique_id}_{CONNECTION_STATUS}",
|
||||
self._async_update_connection_state,
|
||||
)
|
||||
)
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Poll battery status."""
|
||||
with contextlib.suppress(ApiException, ClientConnectorError, TimeoutError):
|
||||
for remote in await get_remotes(self._client):
|
||||
if remote.serial_number == self._remote.serial_number:
|
||||
self._attr_native_value = remote.battery_level
|
||||
break
|
||||
@@ -84,10 +84,3 @@ def get_remote_keys() -> list[str]:
|
||||
for key_type in (*BEO_REMOTE_KEYS, *BEO_REMOTE_CONTROL_KEYS)
|
||||
],
|
||||
]
|
||||
|
||||
|
||||
async def supports_battery(client: MozartClient) -> bool:
|
||||
"""Get if a Mozart device has a battery."""
|
||||
battery_state = await client.get_battery_state()
|
||||
|
||||
return battery_state.state != "BatteryNotPresent"
|
||||
|
||||
@@ -6,7 +6,6 @@ import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from mozart_api.models import (
|
||||
BatteryState,
|
||||
BeoRemoteButton,
|
||||
ButtonEvent,
|
||||
ListeningModeProps,
|
||||
@@ -61,7 +60,6 @@ class BeoWebsocket(BeoBase):
|
||||
self._client.get_active_listening_mode_notifications(
|
||||
self.on_active_listening_mode
|
||||
)
|
||||
self._client.get_battery_notifications(self.on_battery_notification)
|
||||
self._client.get_beo_remote_button_notifications(
|
||||
self.on_beo_remote_button_notification
|
||||
)
|
||||
@@ -117,14 +115,6 @@ class BeoWebsocket(BeoBase):
|
||||
notification,
|
||||
)
|
||||
|
||||
def on_battery_notification(self, notification: BatteryState) -> None:
|
||||
"""Send battery dispatch."""
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.BATTERY}",
|
||||
notification,
|
||||
)
|
||||
|
||||
def on_beo_remote_button_notification(self, notification: BeoRemoteButton) -> None:
|
||||
"""Send beo_remote_button dispatch."""
|
||||
if TYPE_CHECKING:
|
||||
|
||||
@@ -22,7 +22,7 @@ from homeassistant.components.media_player import MediaType
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_CLIENT_ID, CONF_PIN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, HomeAssistantError
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.debounce import Debouncer
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
@@ -56,31 +56,8 @@ def catch_braviatv_errors[_BraviaTVCoordinatorT: BraviaTVCoordinator, **_P](
|
||||
"""Catch Bravia errors and log message."""
|
||||
try:
|
||||
await func(self, *args, **kwargs)
|
||||
except BraviaNotFound as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="command_error_not_found",
|
||||
translation_placeholders={
|
||||
"device": self.config_entry.title,
|
||||
},
|
||||
) from err
|
||||
except (BraviaConnectionError, BraviaConnectionTimeout, BraviaTurnedOff) as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="command_error_offline",
|
||||
translation_placeholders={
|
||||
"device": self.config_entry.title,
|
||||
},
|
||||
) from err
|
||||
except BraviaError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="command_error",
|
||||
translation_placeholders={
|
||||
"device": self.config_entry.title,
|
||||
"error": repr(err),
|
||||
},
|
||||
) from err
|
||||
_LOGGER.error("Command error: %s", err)
|
||||
await self.async_request_refresh()
|
||||
|
||||
return wrapper
|
||||
@@ -188,35 +165,17 @@ class BraviaTVCoordinator(DataUpdateCoordinator[None]):
|
||||
if self.skipped_updates < 10:
|
||||
self.connected = False
|
||||
self.skipped_updates += 1
|
||||
_LOGGER.debug(
|
||||
"Update for %s skipped: the Bravia API service is reloading",
|
||||
self.config_entry.title,
|
||||
)
|
||||
_LOGGER.debug("Update skipped, Bravia API service is reloading")
|
||||
return
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_error_not_found",
|
||||
translation_placeholders={
|
||||
"device": self.config_entry.title,
|
||||
},
|
||||
) from err
|
||||
raise UpdateFailed("Error communicating with device") from err
|
||||
except (BraviaConnectionError, BraviaConnectionTimeout, BraviaTurnedOff):
|
||||
self.is_on = False
|
||||
self.connected = False
|
||||
_LOGGER.debug(
|
||||
"Update for %s skipped: the TV is turned off", self.config_entry.title
|
||||
)
|
||||
_LOGGER.debug("Update skipped, Bravia TV is off")
|
||||
except BraviaError as err:
|
||||
self.is_on = False
|
||||
self.connected = False
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_error",
|
||||
translation_placeholders={
|
||||
"device": self.config_entry.title,
|
||||
"error": repr(err),
|
||||
},
|
||||
) from err
|
||||
raise UpdateFailed("Error communicating with device") from err
|
||||
|
||||
async def async_update_volume(self) -> None:
|
||||
"""Update volume information."""
|
||||
|
||||
@@ -55,22 +55,5 @@
|
||||
"name": "Terminate apps"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"command_error": {
|
||||
"message": "Error sending command to {device}: {error}"
|
||||
},
|
||||
"command_error_not_found": {
|
||||
"message": "Error sending command to {device}: the Bravia API service is reloading"
|
||||
},
|
||||
"command_error_offline": {
|
||||
"message": "Error sending command to {device}: the TV is turned off"
|
||||
},
|
||||
"update_error": {
|
||||
"message": "Error updating data for {device}: {error}"
|
||||
},
|
||||
"update_error_not_found": {
|
||||
"message": "Error updating data for {device}: the Bravia API service is stuck"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
"""The BSB-Lan integration."""
|
||||
|
||||
import asyncio
|
||||
import dataclasses
|
||||
|
||||
from bsblan import (
|
||||
@@ -78,16 +77,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: BSBLanConfigEntry) -> bo
|
||||
bsblan = BSBLAN(config, session)
|
||||
|
||||
try:
|
||||
# Initialize the client first - this sets up internal caches and validates
|
||||
# the connection by fetching firmware version
|
||||
# Initialize the client first - this sets up internal caches and validates the connection
|
||||
await bsblan.initialize()
|
||||
|
||||
# Fetch device metadata in parallel for faster startup
|
||||
device, info, static = await asyncio.gather(
|
||||
bsblan.device(),
|
||||
bsblan.info(),
|
||||
bsblan.static_values(),
|
||||
)
|
||||
# Fetch all required device metadata
|
||||
device = await bsblan.device()
|
||||
info = await bsblan.info()
|
||||
static = await bsblan.static_values()
|
||||
except BSBLANConnectionError as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
@@ -115,10 +110,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: BSBLanConfigEntry) -> bo
|
||||
fast_coordinator = BSBLanFastCoordinator(hass, entry, bsblan)
|
||||
slow_coordinator = BSBLanSlowCoordinator(hass, entry, bsblan)
|
||||
|
||||
# Perform first refresh of fast coordinator (required for entities)
|
||||
# Perform first refresh of both coordinators
|
||||
await fast_coordinator.async_config_entry_first_refresh()
|
||||
|
||||
# Refresh slow coordinator - don't fail if DHW is not available
|
||||
# Try to refresh slow coordinator, but don't fail if DHW is not available
|
||||
# This allows the integration to work even if the device doesn't support DHW
|
||||
await slow_coordinator.async_refresh()
|
||||
|
||||
|
||||
@@ -111,17 +111,11 @@ class BSBLANClimate(BSBLanEntity, ClimateEntity):
|
||||
return None
|
||||
return self.coordinator.data.state.target_temperature.value
|
||||
|
||||
@property
|
||||
def _hvac_mode_value(self) -> int | str | None:
|
||||
"""Return the raw hvac_mode value from the coordinator."""
|
||||
if (hvac_mode := self.coordinator.data.state.hvac_mode) is None:
|
||||
return None
|
||||
return hvac_mode.value
|
||||
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode | None:
|
||||
"""Return hvac operation ie. heat, cool mode."""
|
||||
if (hvac_mode_value := self._hvac_mode_value) is None:
|
||||
hvac_mode_value = self.coordinator.data.state.hvac_mode.value
|
||||
if hvac_mode_value is None:
|
||||
return None
|
||||
# BSB-Lan returns integer values: 0=off, 1=auto, 2=eco, 3=heat
|
||||
if isinstance(hvac_mode_value, int):
|
||||
@@ -131,8 +125,9 @@ class BSBLANClimate(BSBLanEntity, ClimateEntity):
|
||||
@property
|
||||
def preset_mode(self) -> str | None:
|
||||
"""Return the current preset mode."""
|
||||
hvac_mode_value = self.coordinator.data.state.hvac_mode.value
|
||||
# BSB-Lan mode 2 is eco/reduced mode
|
||||
if self._hvac_mode_value == 2:
|
||||
if hvac_mode_value == 2:
|
||||
return PRESET_ECO
|
||||
return PRESET_NONE
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
from random import randint
|
||||
|
||||
from bsblan import (
|
||||
BSBLAN,
|
||||
@@ -22,17 +23,6 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda
|
||||
|
||||
from .const import DOMAIN, LOGGER, SCAN_INTERVAL_FAST, SCAN_INTERVAL_SLOW
|
||||
|
||||
# Filter lists for optimized API calls - only fetch parameters we actually use
|
||||
# This significantly reduces response time (~0.2s per parameter saved)
|
||||
STATE_INCLUDE = ["current_temperature", "target_temperature", "hvac_mode"]
|
||||
SENSOR_INCLUDE = ["current_temperature", "outside_temperature"]
|
||||
DHW_STATE_INCLUDE = [
|
||||
"operating_mode",
|
||||
"nominal_setpoint",
|
||||
"dhw_actual_value_top_temperature",
|
||||
]
|
||||
DHW_CONFIG_INCLUDE = ["reduced_setpoint", "nominal_setpoint_max"]
|
||||
|
||||
|
||||
@dataclass
|
||||
class BSBLanFastData:
|
||||
@@ -90,18 +80,26 @@ class BSBLanFastCoordinator(BSBLanCoordinator[BSBLanFastData]):
|
||||
config_entry,
|
||||
client,
|
||||
name=f"{DOMAIN}_fast_{config_entry.data[CONF_HOST]}",
|
||||
update_interval=SCAN_INTERVAL_FAST,
|
||||
update_interval=self._get_update_interval(),
|
||||
)
|
||||
|
||||
def _get_update_interval(self) -> timedelta:
|
||||
"""Get the update interval with a random offset.
|
||||
|
||||
Add a random number of seconds to avoid timeouts when
|
||||
the BSB-Lan device is already/still busy retrieving data,
|
||||
e.g. for MQTT or internal logging.
|
||||
"""
|
||||
return SCAN_INTERVAL_FAST + timedelta(seconds=randint(1, 8))
|
||||
|
||||
async def _async_update_data(self) -> BSBLanFastData:
|
||||
"""Fetch fast-changing data from the BSB-Lan device."""
|
||||
try:
|
||||
# Client is already initialized in async_setup_entry
|
||||
# Use include filtering to only fetch parameters we actually use
|
||||
# This reduces response time significantly (~0.2s per parameter)
|
||||
state = await self.client.state(include=STATE_INCLUDE)
|
||||
sensor = await self.client.sensor(include=SENSOR_INCLUDE)
|
||||
dhw = await self.client.hot_water_state(include=DHW_STATE_INCLUDE)
|
||||
# Fetch fast-changing data (state, sensor, DHW state)
|
||||
state = await self.client.state()
|
||||
sensor = await self.client.sensor()
|
||||
dhw = await self.client.hot_water_state()
|
||||
|
||||
except BSBLANAuthError as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
@@ -113,6 +111,9 @@ class BSBLanFastCoordinator(BSBLanCoordinator[BSBLanFastData]):
|
||||
f"Error while establishing connection with BSB-Lan device at {host}"
|
||||
) from err
|
||||
|
||||
# Update the interval with random jitter for next update
|
||||
self.update_interval = self._get_update_interval()
|
||||
|
||||
return BSBLanFastData(
|
||||
state=state,
|
||||
sensor=sensor,
|
||||
@@ -142,8 +143,8 @@ class BSBLanSlowCoordinator(BSBLanCoordinator[BSBLanSlowData]):
|
||||
"""Fetch slow-changing data from the BSB-Lan device."""
|
||||
try:
|
||||
# Client is already initialized in async_setup_entry
|
||||
# Use include filtering to only fetch parameters we actually use
|
||||
dhw_config = await self.client.hot_water_config(include=DHW_CONFIG_INCLUDE)
|
||||
# Fetch slow-changing configuration data
|
||||
dhw_config = await self.client.hot_water_config()
|
||||
dhw_schedule = await self.client.hot_water_schedule()
|
||||
|
||||
except AttributeError:
|
||||
|
||||
@@ -29,11 +29,7 @@ class BSBLanEntityBase[_T: BSBLanCoordinator](CoordinatorEntity[_T]):
|
||||
connections={(CONNECTION_NETWORK_MAC, format_mac(mac))},
|
||||
name=data.device.name,
|
||||
manufacturer="BSBLAN Inc.",
|
||||
model=(
|
||||
data.info.device_identification.value
|
||||
if data.info.device_identification
|
||||
else None
|
||||
),
|
||||
model=data.info.device_identification.value,
|
||||
sw_version=data.device.version,
|
||||
configuration_url=f"http://{host}",
|
||||
)
|
||||
|
||||
@@ -2,9 +2,6 @@
|
||||
"services": {
|
||||
"set_hot_water_schedule": {
|
||||
"service": "mdi:calendar-clock"
|
||||
},
|
||||
"sync_time": {
|
||||
"service": "mdi:timer-sync-outline"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["bsblan"],
|
||||
"requirements": ["python-bsblan==4.1.0"],
|
||||
"requirements": ["python-bsblan==3.1.4"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"name": "bsb-lan*",
|
||||
|
||||
@@ -13,7 +13,6 @@ from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
@@ -31,9 +30,8 @@ ATTR_FRIDAY_SLOTS = "friday_slots"
|
||||
ATTR_SATURDAY_SLOTS = "saturday_slots"
|
||||
ATTR_SUNDAY_SLOTS = "sunday_slots"
|
||||
|
||||
# Service names
|
||||
# Service name
|
||||
SERVICE_SET_HOT_WATER_SCHEDULE = "set_hot_water_schedule"
|
||||
SERVICE_SYNC_TIME = "sync_time"
|
||||
|
||||
|
||||
# Schema for a single time slot
|
||||
@@ -205,74 +203,6 @@ async def set_hot_water_schedule(service_call: ServiceCall) -> None:
|
||||
await entry.runtime_data.slow_coordinator.async_request_refresh()
|
||||
|
||||
|
||||
async def async_sync_time(service_call: ServiceCall) -> None:
|
||||
"""Synchronize BSB-LAN device time with Home Assistant."""
|
||||
device_id: str = service_call.data[ATTR_DEVICE_ID]
|
||||
|
||||
# Get the device and config entry
|
||||
device_registry = dr.async_get(service_call.hass)
|
||||
device_entry = device_registry.async_get(device_id)
|
||||
|
||||
if device_entry is None:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_device_id",
|
||||
translation_placeholders={"device_id": device_id},
|
||||
)
|
||||
|
||||
# Find the config entry for this device
|
||||
matching_entries: list[BSBLanConfigEntry] = [
|
||||
entry
|
||||
for entry in service_call.hass.config_entries.async_entries(DOMAIN)
|
||||
if entry.entry_id in device_entry.config_entries
|
||||
]
|
||||
|
||||
if not matching_entries:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="no_config_entry_for_device",
|
||||
translation_placeholders={"device_id": device_entry.name or device_id},
|
||||
)
|
||||
|
||||
entry = matching_entries[0]
|
||||
|
||||
# Verify the config entry is loaded
|
||||
if entry.state is not ConfigEntryState.LOADED:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="config_entry_not_loaded",
|
||||
translation_placeholders={"device_name": device_entry.name or device_id},
|
||||
)
|
||||
|
||||
client = entry.runtime_data.client
|
||||
|
||||
try:
|
||||
# Get current device time
|
||||
device_time = await client.time()
|
||||
current_time = dt_util.now()
|
||||
current_time_str = current_time.strftime("%d.%m.%Y %H:%M:%S")
|
||||
|
||||
# Only sync if device time differs from HA time
|
||||
if device_time.time.value != current_time_str:
|
||||
await client.set_time(current_time_str)
|
||||
except BSBLANError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="sync_time_failed",
|
||||
translation_placeholders={
|
||||
"device_name": device_entry.name or device_id,
|
||||
"error": str(err),
|
||||
},
|
||||
) from err
|
||||
|
||||
|
||||
SYNC_TIME_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_DEVICE_ID): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Register the BSB-Lan services."""
|
||||
@@ -282,10 +212,3 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
set_hot_water_schedule,
|
||||
schema=SERVICE_SET_HOT_WATER_SCHEDULE_SCHEMA,
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_SYNC_TIME,
|
||||
async_sync_time,
|
||||
schema=SYNC_TIME_SCHEMA,
|
||||
)
|
||||
|
||||
@@ -1,12 +1,3 @@
|
||||
sync_time:
|
||||
fields:
|
||||
device_id:
|
||||
required: true
|
||||
example: "abc123device456"
|
||||
selector:
|
||||
device:
|
||||
integration: bsblan
|
||||
|
||||
set_hot_water_schedule:
|
||||
fields:
|
||||
device_id:
|
||||
|
||||
@@ -79,6 +79,9 @@
|
||||
"invalid_device_id": {
|
||||
"message": "Invalid device ID: {device_id}"
|
||||
},
|
||||
"invalid_time_format": {
|
||||
"message": "Invalid time format provided"
|
||||
},
|
||||
"no_config_entry_for_device": {
|
||||
"message": "No configuration entry found for device: {device_id}"
|
||||
},
|
||||
@@ -105,9 +108,6 @@
|
||||
},
|
||||
"setup_general_error": {
|
||||
"message": "An unknown error occurred while retrieving static device data"
|
||||
},
|
||||
"sync_time_failed": {
|
||||
"message": "Failed to sync time for {device_name}: {error}"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
@@ -148,16 +148,6 @@
|
||||
}
|
||||
},
|
||||
"name": "Set hot water schedule"
|
||||
},
|
||||
"sync_time": {
|
||||
"description": "Synchronize Home Assistant time to the BSB-Lan device. Only updates if device time differs from Home Assistant time.",
|
||||
"fields": {
|
||||
"device_id": {
|
||||
"description": "The BSB-LAN device to sync time for.",
|
||||
"name": "Device"
|
||||
}
|
||||
},
|
||||
"name": "Sync time"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,13 +15,5 @@
|
||||
"get_events": {
|
||||
"service": "mdi:calendar-month"
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"event_ended": {
|
||||
"trigger": "mdi:calendar-end"
|
||||
},
|
||||
"event_started": {
|
||||
"trigger": "mdi:calendar-start"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -45,14 +45,6 @@
|
||||
"title": "Detected use of deprecated action calendar.list_events"
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"trigger_offset_type": {
|
||||
"options": {
|
||||
"after": "After",
|
||||
"before": "Before"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"create_event": {
|
||||
"description": "Adds a new calendar event.",
|
||||
@@ -111,35 +103,5 @@
|
||||
"name": "Get events"
|
||||
}
|
||||
},
|
||||
"title": "Calendar",
|
||||
"triggers": {
|
||||
"event_ended": {
|
||||
"description": "Triggers when a calendar event ends.",
|
||||
"fields": {
|
||||
"offset": {
|
||||
"description": "Offset from the end of the event.",
|
||||
"name": "Offset"
|
||||
},
|
||||
"offset_type": {
|
||||
"description": "Whether to trigger before or after the end of the event, if an offset is defined.",
|
||||
"name": "Offset type"
|
||||
}
|
||||
},
|
||||
"name": "Calendar event ended"
|
||||
},
|
||||
"event_started": {
|
||||
"description": "Triggers when a calendar event starts.",
|
||||
"fields": {
|
||||
"offset": {
|
||||
"description": "Offset from the start of the event.",
|
||||
"name": "Offset"
|
||||
},
|
||||
"offset_type": {
|
||||
"description": "Whether to trigger before or after the start of the event, if an offset is defined.",
|
||||
"name": "Offset type"
|
||||
}
|
||||
},
|
||||
"name": "Calendar event started"
|
||||
}
|
||||
}
|
||||
"title": "Calendar"
|
||||
}
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
import datetime
|
||||
@@ -11,15 +10,8 @@ from typing import TYPE_CHECKING, Any, cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
CONF_ENTITY_ID,
|
||||
CONF_EVENT,
|
||||
CONF_OFFSET,
|
||||
CONF_OPTIONS,
|
||||
CONF_TARGET,
|
||||
)
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback, split_entity_id
|
||||
from homeassistant.const import CONF_ENTITY_ID, CONF_EVENT, CONF_OFFSET, CONF_OPTIONS
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.automation import move_top_level_schema_fields_to_options
|
||||
@@ -28,13 +20,12 @@ from homeassistant.helpers.event import (
|
||||
async_track_point_in_time,
|
||||
async_track_time_interval,
|
||||
)
|
||||
from homeassistant.helpers.target import TargetEntityChangeTracker, TargetSelection
|
||||
from homeassistant.helpers.trigger import Trigger, TriggerActionRunner, TriggerConfig
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import CalendarEntity, CalendarEvent
|
||||
from .const import DATA_COMPONENT, DOMAIN
|
||||
from .const import DATA_COMPONENT
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -42,35 +33,19 @@ EVENT_START = "start"
|
||||
EVENT_END = "end"
|
||||
UPDATE_INTERVAL = datetime.timedelta(minutes=15)
|
||||
|
||||
CONF_OFFSET_TYPE = "offset_type"
|
||||
OFFSET_TYPE_BEFORE = "before"
|
||||
OFFSET_TYPE_AFTER = "after"
|
||||
|
||||
|
||||
_SINGLE_ENTITY_EVENT_OPTIONS_SCHEMA = {
|
||||
_OPTIONS_SCHEMA_DICT = {
|
||||
vol.Required(CONF_ENTITY_ID): cv.entity_id,
|
||||
vol.Optional(CONF_EVENT, default=EVENT_START): vol.In({EVENT_START, EVENT_END}),
|
||||
vol.Optional(CONF_OFFSET, default=datetime.timedelta(0)): cv.time_period,
|
||||
}
|
||||
|
||||
_SINGLE_ENTITY_EVENT_TRIGGER_SCHEMA = vol.Schema(
|
||||
_CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_OPTIONS): _SINGLE_ENTITY_EVENT_OPTIONS_SCHEMA,
|
||||
vol.Required(CONF_OPTIONS): _OPTIONS_SCHEMA_DICT,
|
||||
},
|
||||
)
|
||||
|
||||
_EVENT_TRIGGER_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_OPTIONS, default={}): {
|
||||
vol.Required(CONF_OFFSET, default=datetime.timedelta(0)): cv.time_period,
|
||||
vol.Required(CONF_OFFSET_TYPE, default=OFFSET_TYPE_BEFORE): vol.In(
|
||||
{OFFSET_TYPE_BEFORE, OFFSET_TYPE_AFTER}
|
||||
),
|
||||
},
|
||||
vol.Required(CONF_TARGET): cv.TARGET_FIELDS,
|
||||
}
|
||||
)
|
||||
|
||||
# mypy: disallow-any-generics
|
||||
|
||||
|
||||
@@ -80,7 +55,6 @@ class QueuedCalendarEvent:
|
||||
|
||||
trigger_time: datetime.datetime
|
||||
event: CalendarEvent
|
||||
entity_id: str
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -120,7 +94,7 @@ class Timespan:
|
||||
return f"[{self.start}, {self.end})"
|
||||
|
||||
|
||||
type EventFetcher = Callable[[Timespan], Awaitable[list[tuple[str, CalendarEvent]]]]
|
||||
type EventFetcher = Callable[[Timespan], Awaitable[list[CalendarEvent]]]
|
||||
type QueuedEventFetcher = Callable[[Timespan], Awaitable[list[QueuedCalendarEvent]]]
|
||||
|
||||
|
||||
@@ -136,24 +110,15 @@ def get_entity(hass: HomeAssistant, entity_id: str) -> CalendarEntity:
|
||||
return entity
|
||||
|
||||
|
||||
def event_fetcher(hass: HomeAssistant, entity_ids: set[str]) -> EventFetcher:
|
||||
def event_fetcher(hass: HomeAssistant, entity_id: str) -> EventFetcher:
|
||||
"""Build an async_get_events wrapper to fetch events during a time span."""
|
||||
|
||||
async def async_get_events(timespan: Timespan) -> list[tuple[str, CalendarEvent]]:
|
||||
async def async_get_events(timespan: Timespan) -> list[CalendarEvent]:
|
||||
"""Return events active in the specified time span."""
|
||||
entity = get_entity(hass, entity_id)
|
||||
# Expand by one second to make the end time exclusive
|
||||
end_time = timespan.end + datetime.timedelta(seconds=1)
|
||||
|
||||
events: list[tuple[str, CalendarEvent]] = []
|
||||
for entity_id in entity_ids:
|
||||
entity = get_entity(hass, entity_id)
|
||||
events.extend(
|
||||
(entity_id, event)
|
||||
for event in await entity.async_get_events(
|
||||
hass, timespan.start, end_time
|
||||
)
|
||||
)
|
||||
return events
|
||||
return await entity.async_get_events(hass, timespan.start, end_time)
|
||||
|
||||
return async_get_events
|
||||
|
||||
@@ -177,11 +142,12 @@ def queued_event_fetcher(
|
||||
# Example: For an EVENT_END trigger the event may start during this
|
||||
# time span, but need to be triggered later when the end happens.
|
||||
results = []
|
||||
for entity_id, event in active_events:
|
||||
trigger_time = get_trigger_time(event)
|
||||
for trigger_time, event in zip(
|
||||
map(get_trigger_time, active_events), active_events, strict=False
|
||||
):
|
||||
if trigger_time not in offset_timespan:
|
||||
continue
|
||||
results.append(QueuedCalendarEvent(trigger_time + offset, event, entity_id))
|
||||
results.append(QueuedCalendarEvent(trigger_time + offset, event))
|
||||
|
||||
_LOGGER.debug(
|
||||
"Scan events @ %s%s found %s eligible of %s active",
|
||||
@@ -274,7 +240,6 @@ class CalendarEventListener:
|
||||
_LOGGER.debug("Dispatching event: %s", queued_event.event)
|
||||
payload = {
|
||||
**self._trigger_payload,
|
||||
ATTR_ENTITY_ID: queued_event.entity_id,
|
||||
"calendar_event": queued_event.event.as_dict(),
|
||||
}
|
||||
self._action_runner(payload, "calendar event state change")
|
||||
@@ -295,77 +260,8 @@ class CalendarEventListener:
|
||||
self._listen_next_calendar_event()
|
||||
|
||||
|
||||
class TargetCalendarEventListener(TargetEntityChangeTracker):
|
||||
"""Helper class to listen to calendar events for target entity changes."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
target_selection: TargetSelection,
|
||||
event_type: str,
|
||||
offset: datetime.timedelta,
|
||||
run_action: TriggerActionRunner,
|
||||
) -> None:
|
||||
"""Initialize the state change tracker."""
|
||||
|
||||
def entity_filter(entities: set[str]) -> set[str]:
|
||||
return {
|
||||
entity_id
|
||||
for entity_id in entities
|
||||
if split_entity_id(entity_id)[0] == DOMAIN
|
||||
}
|
||||
|
||||
super().__init__(hass, target_selection, entity_filter)
|
||||
self._event_type = event_type
|
||||
self._offset = offset
|
||||
self._run_action = run_action
|
||||
self._trigger_data = {
|
||||
"event": event_type,
|
||||
"offset": offset,
|
||||
}
|
||||
|
||||
self._pending_listener_task: asyncio.Task[None] | None = None
|
||||
self._calendar_event_listener: CalendarEventListener | None = None
|
||||
|
||||
@callback
|
||||
def _handle_entities_update(self, tracked_entities: set[str]) -> None:
|
||||
"""Restart the listeners when the list of entities of the tracked targets is updated."""
|
||||
if self._pending_listener_task:
|
||||
self._pending_listener_task.cancel()
|
||||
self._pending_listener_task = self._hass.async_create_task(
|
||||
self._start_listening(tracked_entities)
|
||||
)
|
||||
|
||||
async def _start_listening(self, tracked_entities: set[str]) -> None:
|
||||
"""Start listening for calendar events."""
|
||||
_LOGGER.debug("Tracking events for calendars: %s", tracked_entities)
|
||||
if self._calendar_event_listener:
|
||||
self._calendar_event_listener.async_detach()
|
||||
self._calendar_event_listener = CalendarEventListener(
|
||||
self._hass,
|
||||
self._run_action,
|
||||
self._trigger_data,
|
||||
queued_event_fetcher(
|
||||
event_fetcher(self._hass, tracked_entities),
|
||||
self._event_type,
|
||||
self._offset,
|
||||
),
|
||||
)
|
||||
await self._calendar_event_listener.async_attach()
|
||||
|
||||
def _unsubscribe(self) -> None:
|
||||
"""Unsubscribe from all events."""
|
||||
super()._unsubscribe()
|
||||
if self._pending_listener_task:
|
||||
self._pending_listener_task.cancel()
|
||||
self._pending_listener_task = None
|
||||
if self._calendar_event_listener:
|
||||
self._calendar_event_listener.async_detach()
|
||||
self._calendar_event_listener = None
|
||||
|
||||
|
||||
class SingleEntityEventTrigger(Trigger):
|
||||
"""Legacy single calendar entity event trigger."""
|
||||
class EventTrigger(Trigger):
|
||||
"""Calendar event trigger."""
|
||||
|
||||
_options: dict[str, Any]
|
||||
|
||||
@@ -375,7 +271,7 @@ class SingleEntityEventTrigger(Trigger):
|
||||
) -> ConfigType:
|
||||
"""Validate complete config."""
|
||||
complete_config = move_top_level_schema_fields_to_options(
|
||||
complete_config, _SINGLE_ENTITY_EVENT_OPTIONS_SCHEMA
|
||||
complete_config, _OPTIONS_SCHEMA_DICT
|
||||
)
|
||||
return await super().async_validate_complete_config(hass, complete_config)
|
||||
|
||||
@@ -384,7 +280,7 @@ class SingleEntityEventTrigger(Trigger):
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return cast(ConfigType, _SINGLE_ENTITY_EVENT_TRIGGER_SCHEMA(config))
|
||||
return cast(ConfigType, _CONFIG_SCHEMA(config))
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize trigger."""
|
||||
@@ -415,72 +311,15 @@ class SingleEntityEventTrigger(Trigger):
|
||||
run_action,
|
||||
trigger_data,
|
||||
queued_event_fetcher(
|
||||
event_fetcher(self._hass, {entity_id}), event_type, offset
|
||||
event_fetcher(self._hass, entity_id), event_type, offset
|
||||
),
|
||||
)
|
||||
await listener.async_attach()
|
||||
return listener.async_detach
|
||||
|
||||
|
||||
class EventTrigger(Trigger):
|
||||
"""Calendar event trigger."""
|
||||
|
||||
_options: dict[str, Any]
|
||||
_event_type: str
|
||||
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return cast(ConfigType, _EVENT_TRIGGER_SCHEMA(config))
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize trigger."""
|
||||
super().__init__(hass, config)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert config.target is not None
|
||||
assert config.options is not None
|
||||
self._target = config.target
|
||||
self._options = config.options
|
||||
|
||||
async def async_attach_runner(
|
||||
self, run_action: TriggerActionRunner
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Attach a trigger."""
|
||||
|
||||
offset = self._options[CONF_OFFSET]
|
||||
offset_type = self._options[CONF_OFFSET_TYPE]
|
||||
|
||||
if offset_type == OFFSET_TYPE_BEFORE:
|
||||
offset = -offset
|
||||
|
||||
target_selection = TargetSelection(self._target)
|
||||
if not target_selection.has_any_target:
|
||||
raise HomeAssistantError(f"No target defined in {self._target}")
|
||||
listener = TargetCalendarEventListener(
|
||||
self._hass, target_selection, self._event_type, offset, run_action
|
||||
)
|
||||
return listener.async_setup()
|
||||
|
||||
|
||||
class EventStartedTrigger(EventTrigger):
|
||||
"""Calendar event started trigger."""
|
||||
|
||||
_event_type = EVENT_START
|
||||
|
||||
|
||||
class EventEndedTrigger(EventTrigger):
|
||||
"""Calendar event ended trigger."""
|
||||
|
||||
_event_type = EVENT_END
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"_": SingleEntityEventTrigger,
|
||||
"event_started": EventStartedTrigger,
|
||||
"event_ended": EventEndedTrigger,
|
||||
"_": EventTrigger,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
.trigger_common: &trigger_common
|
||||
target:
|
||||
entity:
|
||||
domain: calendar
|
||||
fields:
|
||||
offset:
|
||||
required: true
|
||||
default:
|
||||
days: 0
|
||||
hours: 0
|
||||
minutes: 0
|
||||
seconds: 0
|
||||
selector:
|
||||
duration:
|
||||
enable_day: true
|
||||
offset_type:
|
||||
required: true
|
||||
default: before
|
||||
selector:
|
||||
select:
|
||||
translation_key: trigger_offset_type
|
||||
options:
|
||||
- before
|
||||
- after
|
||||
|
||||
event_started: *trigger_common
|
||||
event_ended: *trigger_common
|
||||
@@ -3,7 +3,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
from webexpythonsdk import ApiError, WebexAPI, exceptions
|
||||
@@ -52,7 +51,7 @@ class CiscoWebexNotificationService(BaseNotificationService):
|
||||
self.room = room
|
||||
self.client = client
|
||||
|
||||
def send_message(self, message: str = "", **kwargs: Any) -> None:
|
||||
def send_message(self, message="", **kwargs):
|
||||
"""Send a message to a user."""
|
||||
|
||||
title = ""
|
||||
|
||||
@@ -5,7 +5,6 @@ from __future__ import annotations
|
||||
from http import HTTPStatus
|
||||
import json
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import requests
|
||||
import voluptuous as vol
|
||||
@@ -82,7 +81,7 @@ class ClicksendNotificationService(BaseNotificationService):
|
||||
self.language = config[CONF_LANGUAGE]
|
||||
self.voice = config[CONF_VOICE]
|
||||
|
||||
def send_message(self, message: str = "", **kwargs: Any) -> None:
|
||||
def send_message(self, message="", **kwargs):
|
||||
"""Send a voice call to a user."""
|
||||
data = {
|
||||
"messages": [
|
||||
|
||||
@@ -50,6 +50,7 @@ from . import (
|
||||
from .client import CloudClient
|
||||
from .const import (
|
||||
CONF_ACCOUNT_LINK_SERVER,
|
||||
CONF_ACCOUNTS_SERVER,
|
||||
CONF_ACME_SERVER,
|
||||
CONF_ALEXA,
|
||||
CONF_ALIASES,
|
||||
@@ -137,6 +138,7 @@ _BASE_CONFIG_SCHEMA = vol.Schema(
|
||||
vol.Optional(CONF_ALEXA): ALEXA_SCHEMA,
|
||||
vol.Optional(CONF_GOOGLE_ACTIONS): GACTIONS_SCHEMA,
|
||||
vol.Optional(CONF_ACCOUNT_LINK_SERVER): str,
|
||||
vol.Optional(CONF_ACCOUNTS_SERVER): str,
|
||||
vol.Optional(CONF_ACME_SERVER): str,
|
||||
vol.Optional(CONF_API_SERVER): str,
|
||||
vol.Optional(CONF_RELAYER_SERVER): str,
|
||||
|
||||
@@ -76,6 +76,7 @@ CONF_GOOGLE_ACTIONS = "google_actions"
|
||||
CONF_USER_POOL_ID = "user_pool_id"
|
||||
|
||||
CONF_ACCOUNT_LINK_SERVER = "account_link_server"
|
||||
CONF_ACCOUNTS_SERVER = "accounts_server"
|
||||
CONF_ACME_SERVER = "acme_server"
|
||||
CONF_API_SERVER = "api_server"
|
||||
CONF_DISCOVERY_SERVICE_ACTIONS = "discovery_service_actions"
|
||||
|
||||
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==1.9.0"],
|
||||
"requirements": ["hass-nabucasa==1.7.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -11,11 +11,13 @@ from homeassistant import config_entries
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.components.websocket_api import ERR_NOT_FOUND, require_admin
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
)
|
||||
from homeassistant.helpers.entity_component import async_get_entity_suggested_object_id
|
||||
from homeassistant.helpers.json import json_dumps
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -349,12 +351,26 @@ def websocket_get_automatic_entity_ids(
|
||||
if not (entry := registry.entities.get(entity_id)):
|
||||
automatic_entity_ids[entity_id] = None
|
||||
continue
|
||||
new_entity_id = registry.async_regenerate_entity_id(
|
||||
entry,
|
||||
try:
|
||||
suggested = async_get_entity_suggested_object_id(hass, entity_id)
|
||||
except HomeAssistantError as err:
|
||||
# This is raised if the entity has no object.
|
||||
_LOGGER.debug(
|
||||
"Unable to get suggested object ID for %s, entity ID: %s (%s)",
|
||||
entry.entity_id,
|
||||
entity_id,
|
||||
err,
|
||||
)
|
||||
automatic_entity_ids[entity_id] = None
|
||||
continue
|
||||
suggested_entity_id = registry.async_generate_entity_id(
|
||||
entry.domain,
|
||||
suggested or f"{entry.platform}_{entry.unique_id}",
|
||||
current_entity_id=entity_id,
|
||||
reserved_entity_ids=reserved_entity_ids,
|
||||
)
|
||||
automatic_entity_ids[entity_id] = new_entity_id
|
||||
reserved_entity_ids.add(new_entity_id)
|
||||
automatic_entity_ids[entity_id] = suggested_entity_id
|
||||
reserved_entity_ids.add(suggested_entity_id)
|
||||
|
||||
connection.send_message(
|
||||
websocket_api.result_message(msg["id"], automatic_entity_ids)
|
||||
|
||||
@@ -7,5 +7,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["datadog"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["datadog==0.52.0"]
|
||||
}
|
||||
|
||||
@@ -169,7 +169,6 @@ FRIENDS_OF_HUE_SWITCH = {
|
||||
}
|
||||
|
||||
RODRET_REMOTE_MODEL = "RODRET Dimmer"
|
||||
RODRET_REMOTE_MODEL_2 = "RODRET wireless dimmer"
|
||||
RODRET_REMOTE = {
|
||||
(CONF_SHORT_RELEASE, CONF_TURN_ON): {CONF_EVENT: 1002},
|
||||
(CONF_LONG_PRESS, CONF_TURN_ON): {CONF_EVENT: 1001},
|
||||
@@ -625,7 +624,6 @@ REMOTES = {
|
||||
HUE_WALL_REMOTE_MODEL: HUE_WALL_REMOTE,
|
||||
FRIENDS_OF_HUE_SWITCH_MODEL: FRIENDS_OF_HUE_SWITCH,
|
||||
RODRET_REMOTE_MODEL: RODRET_REMOTE,
|
||||
RODRET_REMOTE_MODEL_2: RODRET_REMOTE,
|
||||
SOMRIG_REMOTE_MODEL: SOMRIG_REMOTE,
|
||||
STYRBAR_REMOTE_MODEL: STYRBAR_REMOTE,
|
||||
SYMFONISK_SOUND_CONTROLLER_MODEL: SYMFONISK_SOUND_CONTROLLER,
|
||||
|
||||
@@ -28,11 +28,10 @@ async def async_setup_entry(
|
||||
DemoHumidifier(
|
||||
name="Humidifier",
|
||||
mode=None,
|
||||
target_humidity=65,
|
||||
target_humidity=68,
|
||||
current_humidity=45,
|
||||
action=HumidifierAction.HUMIDIFYING,
|
||||
device_class=HumidifierDeviceClass.HUMIDIFIER,
|
||||
target_humidity_step=5,
|
||||
),
|
||||
DemoHumidifier(
|
||||
name="Dehumidifier",
|
||||
@@ -67,7 +66,6 @@ class DemoHumidifier(HumidifierEntity):
|
||||
is_on: bool = True,
|
||||
action: HumidifierAction | None = None,
|
||||
device_class: HumidifierDeviceClass | None = None,
|
||||
target_humidity_step: float | None = None,
|
||||
) -> None:
|
||||
"""Initialize the humidifier device."""
|
||||
self._attr_name = name
|
||||
@@ -81,7 +79,6 @@ class DemoHumidifier(HumidifierEntity):
|
||||
self._attr_mode = mode
|
||||
self._attr_available_modes = available_modes
|
||||
self._attr_device_class = device_class
|
||||
self._attr_target_humidity_step = target_humidity_step
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the device on."""
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["async_upnp_client"],
|
||||
"requirements": ["async-upnp-client==0.46.2", "getmac==0.9.5"],
|
||||
"requirements": ["async-upnp-client==0.46.1", "getmac==0.9.5"],
|
||||
"ssdp": [
|
||||
{
|
||||
"deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1",
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/dlna_dms",
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["async-upnp-client==0.46.2"],
|
||||
"requirements": ["async-upnp-client==0.46.1"],
|
||||
"ssdp": [
|
||||
{
|
||||
"deviceType": "urn:schemas-upnp-org:device:MediaServer:1",
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/dnsip",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["aiodns==4.0.0"]
|
||||
"requirements": ["aiodns==3.6.1"]
|
||||
}
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.notify import ATTR_TARGET, BaseNotificationService
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -30,7 +29,7 @@ class DovadoSMSNotificationService(BaseNotificationService):
|
||||
"""Initialize the service."""
|
||||
self._client = client
|
||||
|
||||
def send_message(self, message: str, **kwargs: Any) -> None:
|
||||
def send_message(self, message, **kwargs):
|
||||
"""Send SMS to the specified target phone number."""
|
||||
if not (target := kwargs.get(ATTR_TARGET)):
|
||||
_LOGGER.error("One target is required")
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Duck DNS integration."""
|
||||
"""Integrate with DuckDNS."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
@@ -4,6 +4,5 @@
|
||||
"codeowners": ["@tr4nt0r"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/duckdns",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling"
|
||||
}
|
||||
|
||||
@@ -2,12 +2,11 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from aiohttp import ClientError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_DOMAIN
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import ConfigEntrySelector
|
||||
@@ -63,25 +62,9 @@ async def update_domain_service(call: ServiceCall) -> None:
|
||||
|
||||
session = async_get_clientsession(call.hass)
|
||||
|
||||
try:
|
||||
if not await update_duckdns(
|
||||
session,
|
||||
entry.data[CONF_DOMAIN],
|
||||
entry.data[CONF_ACCESS_TOKEN],
|
||||
txt=call.data.get(ATTR_TXT),
|
||||
):
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_failed",
|
||||
translation_placeholders={
|
||||
CONF_DOMAIN: entry.data[CONF_DOMAIN],
|
||||
},
|
||||
)
|
||||
except ClientError as e:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="connection_error",
|
||||
translation_placeholders={
|
||||
CONF_DOMAIN: entry.data[CONF_DOMAIN],
|
||||
},
|
||||
) from e
|
||||
await update_duckdns(
|
||||
session,
|
||||
entry.data[CONF_DOMAIN],
|
||||
entry.data[CONF_ACCESS_TOKEN],
|
||||
txt=call.data.get(ATTR_TXT),
|
||||
)
|
||||
|
||||
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/easyenergy",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["easyenergy==2.2.0"],
|
||||
"requirements": ["easyenergy==2.1.2"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -37,7 +37,7 @@ class EheimDigitalEntity[_DeviceT: EheimDigitalDevice](
|
||||
name=device.name,
|
||||
connections={(CONNECTION_NETWORK_MAC, device.mac_address)},
|
||||
manufacturer="EHEIM",
|
||||
model=device.model_name,
|
||||
model=device.device_type.model_name,
|
||||
identifiers={(DOMAIN, device.mac_address)},
|
||||
suggested_area=device.aquarium_name,
|
||||
sw_version=device.sw_version,
|
||||
@@ -59,9 +59,9 @@ class EheimDigitalEntity[_DeviceT: EheimDigitalDevice](
|
||||
def exception_handler[_EntityT: EheimDigitalEntity[EheimDigitalDevice], **_P](
|
||||
func: Callable[Concatenate[_EntityT, _P], Coroutine[Any, Any, Any]],
|
||||
) -> Callable[Concatenate[_EntityT, _P], Coroutine[Any, Any, None]]:
|
||||
"""Decorate eheimdigital calls to handle exceptions.
|
||||
"""Decorate AirGradient calls to handle exceptions.
|
||||
|
||||
A decorator that wraps the passed in function, catches eheimdigital errors.
|
||||
A decorator that wraps the passed in function, catches AirGradient errors.
|
||||
"""
|
||||
|
||||
async def handler(self: _EntityT, *args: _P.args, **kwargs: _P.kwargs) -> None:
|
||||
|
||||
@@ -6,7 +6,6 @@ from typing import Any, override
|
||||
|
||||
from eheimdigital.classic_vario import EheimDigitalClassicVario
|
||||
from eheimdigital.device import EheimDigitalDevice
|
||||
from eheimdigital.filter import EheimDigitalFilter
|
||||
from eheimdigital.heater import EheimDigitalHeater
|
||||
from eheimdigital.types import HeaterUnit
|
||||
|
||||
@@ -22,7 +21,6 @@ from homeassistant.const import (
|
||||
PRECISION_WHOLE,
|
||||
EntityCategory,
|
||||
UnitOfTemperature,
|
||||
UnitOfTime,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
@@ -44,34 +42,6 @@ class EheimDigitalNumberDescription[_DeviceT: EheimDigitalDevice](
|
||||
uom_fn: Callable[[_DeviceT], str] | None = None
|
||||
|
||||
|
||||
FILTER_DESCRIPTIONS: tuple[EheimDigitalNumberDescription[EheimDigitalFilter], ...] = (
|
||||
EheimDigitalNumberDescription[EheimDigitalFilter](
|
||||
key="high_pulse_time",
|
||||
translation_key="high_pulse_time",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
native_step=PRECISION_WHOLE,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
device_class=NumberDeviceClass.DURATION,
|
||||
native_min_value=5,
|
||||
native_max_value=200000,
|
||||
value_fn=lambda device: device.high_pulse_time,
|
||||
set_value_fn=lambda device, value: device.set_high_pulse_time(int(value)),
|
||||
),
|
||||
EheimDigitalNumberDescription[EheimDigitalFilter](
|
||||
key="low_pulse_time",
|
||||
translation_key="low_pulse_time",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
native_step=PRECISION_WHOLE,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
device_class=NumberDeviceClass.DURATION,
|
||||
native_min_value=5,
|
||||
native_max_value=200000,
|
||||
value_fn=lambda device: device.low_pulse_time,
|
||||
set_value_fn=lambda device, value: device.set_low_pulse_time(int(value)),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
CLASSICVARIO_DESCRIPTIONS: tuple[
|
||||
EheimDigitalNumberDescription[EheimDigitalClassicVario], ...
|
||||
] = (
|
||||
@@ -175,13 +145,6 @@ async def async_setup_entry(
|
||||
)
|
||||
for description in CLASSICVARIO_DESCRIPTIONS
|
||||
)
|
||||
if isinstance(device, EheimDigitalFilter):
|
||||
entities.extend(
|
||||
EheimDigitalNumber[EheimDigitalFilter](
|
||||
coordinator, device, description
|
||||
)
|
||||
for description in FILTER_DESCRIPTIONS
|
||||
)
|
||||
if isinstance(device, EheimDigitalHeater):
|
||||
entities.extend(
|
||||
EheimDigitalNumber[EheimDigitalHeater](
|
||||
|
||||
@@ -2,19 +2,13 @@
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Literal, override
|
||||
from typing import Any, override
|
||||
|
||||
from eheimdigital.classic_vario import EheimDigitalClassicVario
|
||||
from eheimdigital.device import EheimDigitalDevice
|
||||
from eheimdigital.filter import EheimDigitalFilter
|
||||
from eheimdigital.types import (
|
||||
FilterMode,
|
||||
FilterModeProf,
|
||||
UnitOfMeasurement as EheimDigitalUnitOfMeasurement,
|
||||
)
|
||||
from eheimdigital.types import FilterMode
|
||||
|
||||
from homeassistant.components.select import SelectEntity, SelectEntityDescription
|
||||
from homeassistant.const import EntityCategory, UnitOfFrequency, UnitOfVolumeFlowRate
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
@@ -30,109 +24,8 @@ class EheimDigitalSelectDescription[_DeviceT: EheimDigitalDevice](
|
||||
):
|
||||
"""Class describing EHEIM Digital select entities."""
|
||||
|
||||
options_fn: Callable[[_DeviceT], list[str]] | None = None
|
||||
use_api_unit: Literal[True] | None = None
|
||||
value_fn: Callable[[_DeviceT], str | None]
|
||||
set_value_fn: Callable[[_DeviceT, str], Awaitable[None] | None]
|
||||
|
||||
|
||||
FILTER_DESCRIPTIONS: tuple[EheimDigitalSelectDescription[EheimDigitalFilter], ...] = (
|
||||
EheimDigitalSelectDescription[EheimDigitalFilter](
|
||||
key="filter_mode",
|
||||
translation_key="filter_mode",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
options=[item.lower() for item in FilterModeProf._member_names_],
|
||||
value_fn=lambda device: device.filter_mode.name.lower(),
|
||||
set_value_fn=lambda device, value: device.set_filter_mode(
|
||||
FilterModeProf[value.upper()]
|
||||
),
|
||||
),
|
||||
EheimDigitalSelectDescription[EheimDigitalFilter](
|
||||
key="manual_speed",
|
||||
translation_key="manual_speed",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
unit_of_measurement=UnitOfFrequency.HERTZ,
|
||||
options_fn=lambda device: [str(i) for i in device.filter_manual_values],
|
||||
value_fn=lambda device: str(device.manual_speed),
|
||||
set_value_fn=lambda device, value: device.set_manual_speed(float(value)),
|
||||
),
|
||||
EheimDigitalSelectDescription[EheimDigitalFilter](
|
||||
key="const_flow_speed",
|
||||
translation_key="const_flow_speed",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
use_api_unit=True,
|
||||
unit_of_measurement=UnitOfVolumeFlowRate.LITERS_PER_HOUR,
|
||||
options_fn=lambda device: [str(i) for i in device.filter_const_flow_values],
|
||||
value_fn=lambda device: str(device.filter_const_flow_values[device.const_flow]),
|
||||
set_value_fn=(
|
||||
lambda device, value: device.set_const_flow(
|
||||
device.filter_const_flow_values.index(int(value))
|
||||
)
|
||||
),
|
||||
),
|
||||
EheimDigitalSelectDescription[EheimDigitalFilter](
|
||||
key="day_speed",
|
||||
translation_key="day_speed",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
use_api_unit=True,
|
||||
unit_of_measurement=UnitOfVolumeFlowRate.LITERS_PER_HOUR,
|
||||
options_fn=lambda device: [str(i) for i in device.filter_const_flow_values],
|
||||
value_fn=lambda device: str(device.filter_const_flow_values[device.day_speed]),
|
||||
set_value_fn=(
|
||||
lambda device, value: device.set_day_speed(
|
||||
device.filter_const_flow_values.index(int(value))
|
||||
)
|
||||
),
|
||||
),
|
||||
EheimDigitalSelectDescription[EheimDigitalFilter](
|
||||
key="night_speed",
|
||||
translation_key="night_speed",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
use_api_unit=True,
|
||||
unit_of_measurement=UnitOfVolumeFlowRate.LITERS_PER_HOUR,
|
||||
options_fn=lambda device: [str(i) for i in device.filter_const_flow_values],
|
||||
value_fn=lambda device: str(
|
||||
device.filter_const_flow_values[device.night_speed]
|
||||
),
|
||||
set_value_fn=(
|
||||
lambda device, value: device.set_night_speed(
|
||||
device.filter_const_flow_values.index(int(value))
|
||||
)
|
||||
),
|
||||
),
|
||||
EheimDigitalSelectDescription[EheimDigitalFilter](
|
||||
key="high_pulse_speed",
|
||||
translation_key="high_pulse_speed",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
use_api_unit=True,
|
||||
unit_of_measurement=UnitOfVolumeFlowRate.LITERS_PER_HOUR,
|
||||
options_fn=lambda device: [str(i) for i in device.filter_const_flow_values],
|
||||
value_fn=lambda device: str(
|
||||
device.filter_const_flow_values[device.high_pulse_speed]
|
||||
),
|
||||
set_value_fn=(
|
||||
lambda device, value: device.set_high_pulse_speed(
|
||||
device.filter_const_flow_values.index(int(value))
|
||||
)
|
||||
),
|
||||
),
|
||||
EheimDigitalSelectDescription[EheimDigitalFilter](
|
||||
key="low_pulse_speed",
|
||||
translation_key="low_pulse_speed",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
use_api_unit=True,
|
||||
unit_of_measurement=UnitOfVolumeFlowRate.LITERS_PER_HOUR,
|
||||
options_fn=lambda device: [str(i) for i in device.filter_const_flow_values],
|
||||
value_fn=lambda device: str(
|
||||
device.filter_const_flow_values[device.low_pulse_speed]
|
||||
),
|
||||
set_value_fn=(
|
||||
lambda device, value: device.set_low_pulse_speed(
|
||||
device.filter_const_flow_values.index(int(value))
|
||||
)
|
||||
),
|
||||
),
|
||||
)
|
||||
set_value_fn: Callable[[_DeviceT, str], Awaitable[None]]
|
||||
|
||||
|
||||
CLASSICVARIO_DESCRIPTIONS: tuple[
|
||||
@@ -141,7 +34,11 @@ CLASSICVARIO_DESCRIPTIONS: tuple[
|
||||
EheimDigitalSelectDescription[EheimDigitalClassicVario](
|
||||
key="filter_mode",
|
||||
translation_key="filter_mode",
|
||||
value_fn=lambda device: device.filter_mode.name.lower(),
|
||||
value_fn=(
|
||||
lambda device: device.filter_mode.name.lower()
|
||||
if device.filter_mode is not None
|
||||
else None
|
||||
),
|
||||
set_value_fn=(
|
||||
lambda device, value: device.set_filter_mode(FilterMode[value.upper()])
|
||||
),
|
||||
@@ -171,11 +68,6 @@ async def async_setup_entry(
|
||||
)
|
||||
for description in CLASSICVARIO_DESCRIPTIONS
|
||||
)
|
||||
if isinstance(device, EheimDigitalFilter):
|
||||
entities.extend(
|
||||
EheimDigitalFilterSelect(coordinator, device, description)
|
||||
for description in FILTER_DESCRIPTIONS
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
@@ -190,8 +82,6 @@ class EheimDigitalSelect[_DeviceT: EheimDigitalDevice](
|
||||
|
||||
entity_description: EheimDigitalSelectDescription[_DeviceT]
|
||||
|
||||
_attr_options: list[str]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: EheimDigitalUpdateCoordinator,
|
||||
@@ -201,49 +91,13 @@ class EheimDigitalSelect[_DeviceT: EheimDigitalDevice](
|
||||
"""Initialize an EHEIM Digital select entity."""
|
||||
super().__init__(coordinator, device)
|
||||
self.entity_description = description
|
||||
if description.options_fn is not None:
|
||||
self._attr_options = description.options_fn(device)
|
||||
elif description.options is not None:
|
||||
self._attr_options = description.options
|
||||
self._attr_unique_id = f"{self._device_address}_{description.key}"
|
||||
|
||||
@override
|
||||
@exception_handler
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
if await_return := self.entity_description.set_value_fn(self._device, option):
|
||||
return await await_return
|
||||
return None
|
||||
return await self.entity_description.set_value_fn(self._device, option)
|
||||
|
||||
@override
|
||||
def _async_update_attrs(self) -> None:
|
||||
self._attr_current_option = self.entity_description.value_fn(self._device)
|
||||
|
||||
|
||||
class EheimDigitalFilterSelect(EheimDigitalSelect[EheimDigitalFilter]):
|
||||
"""Represent an EHEIM Digital Filter select entity."""
|
||||
|
||||
entity_description: EheimDigitalSelectDescription[EheimDigitalFilter]
|
||||
_attr_native_unit_of_measurement: str | None
|
||||
|
||||
@override
|
||||
def _async_update_attrs(self) -> None:
|
||||
if (
|
||||
self.entity_description.options is None
|
||||
and self.entity_description.options_fn is not None
|
||||
):
|
||||
self._attr_options = self.entity_description.options_fn(self._device)
|
||||
if self.entity_description.use_api_unit:
|
||||
if (
|
||||
self.entity_description.unit_of_measurement
|
||||
== UnitOfVolumeFlowRate.LITERS_PER_HOUR
|
||||
and self._device.usrdta["unit"]
|
||||
== int(EheimDigitalUnitOfMeasurement.US_CUSTOMARY)
|
||||
):
|
||||
self._attr_native_unit_of_measurement = (
|
||||
UnitOfVolumeFlowRate.GALLONS_PER_HOUR
|
||||
)
|
||||
else:
|
||||
self._attr_native_unit_of_measurement = (
|
||||
self.entity_description.unit_of_measurement
|
||||
)
|
||||
super()._async_update_attrs()
|
||||
|
||||
@@ -6,7 +6,6 @@ from typing import Any, override
|
||||
|
||||
from eheimdigital.classic_vario import EheimDigitalClassicVario
|
||||
from eheimdigital.device import EheimDigitalDevice
|
||||
from eheimdigital.filter import EheimDigitalFilter
|
||||
from eheimdigital.types import FilterErrorCode
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
@@ -14,7 +13,7 @@ from homeassistant.components.sensor import (
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
)
|
||||
from homeassistant.const import PERCENTAGE, EntityCategory, UnitOfFrequency, UnitOfTime
|
||||
from homeassistant.const import PERCENTAGE, EntityCategory, UnitOfTime
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
@@ -34,27 +33,6 @@ class EheimDigitalSensorDescription[_DeviceT: EheimDigitalDevice](
|
||||
value_fn: Callable[[_DeviceT], float | str | None]
|
||||
|
||||
|
||||
FILTER_DESCRIPTIONS: tuple[EheimDigitalSensorDescription[EheimDigitalFilter], ...] = (
|
||||
EheimDigitalSensorDescription[EheimDigitalFilter](
|
||||
key="current_speed",
|
||||
translation_key="current_speed",
|
||||
value_fn=lambda device: device.current_speed,
|
||||
device_class=SensorDeviceClass.FREQUENCY,
|
||||
suggested_display_precision=1,
|
||||
native_unit_of_measurement=UnitOfFrequency.HERTZ,
|
||||
),
|
||||
EheimDigitalSensorDescription[EheimDigitalFilter](
|
||||
key="service_hours",
|
||||
translation_key="service_hours",
|
||||
value_fn=lambda device: device.service_hours,
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.HOURS,
|
||||
suggested_unit_of_measurement=UnitOfTime.DAYS,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
CLASSICVARIO_DESCRIPTIONS: tuple[
|
||||
EheimDigitalSensorDescription[EheimDigitalClassicVario], ...
|
||||
] = (
|
||||
@@ -76,7 +54,11 @@ CLASSICVARIO_DESCRIPTIONS: tuple[
|
||||
EheimDigitalSensorDescription[EheimDigitalClassicVario](
|
||||
key="error_code",
|
||||
translation_key="error_code",
|
||||
value_fn=lambda device: device.error_code.name.lower(),
|
||||
value_fn=(
|
||||
lambda device: device.error_code.name.lower()
|
||||
if device.error_code is not None
|
||||
else None
|
||||
),
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=[name.lower() for name in FilterErrorCode._member_names_],
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
@@ -98,13 +80,6 @@ async def async_setup_entry(
|
||||
"""Set up the light entities for one or multiple devices."""
|
||||
entities: list[EheimDigitalSensor[Any]] = []
|
||||
for device in device_address.values():
|
||||
if isinstance(device, EheimDigitalFilter):
|
||||
entities += [
|
||||
EheimDigitalSensor[EheimDigitalFilter](
|
||||
coordinator, device, description
|
||||
)
|
||||
for description in FILTER_DESCRIPTIONS
|
||||
]
|
||||
if isinstance(device, EheimDigitalClassicVario):
|
||||
entities += [
|
||||
EheimDigitalSensor[EheimDigitalClassicVario](
|
||||
|
||||
@@ -61,12 +61,6 @@
|
||||
"day_speed": {
|
||||
"name": "Day speed"
|
||||
},
|
||||
"high_pulse_time": {
|
||||
"name": "High pulse duration"
|
||||
},
|
||||
"low_pulse_time": {
|
||||
"name": "Low pulse duration"
|
||||
},
|
||||
"manual_speed": {
|
||||
"name": "Manual speed"
|
||||
},
|
||||
@@ -84,32 +78,13 @@
|
||||
}
|
||||
},
|
||||
"select": {
|
||||
"const_flow_speed": {
|
||||
"name": "Constant flow speed"
|
||||
},
|
||||
"day_speed": {
|
||||
"name": "Day speed"
|
||||
},
|
||||
"filter_mode": {
|
||||
"name": "Filter mode",
|
||||
"state": {
|
||||
"bio": "Bio",
|
||||
"constant_flow": "Constant flow",
|
||||
"manual": "Manual",
|
||||
"pulse": "Pulse"
|
||||
}
|
||||
},
|
||||
"high_pulse_speed": {
|
||||
"name": "High pulse speed"
|
||||
},
|
||||
"low_pulse_speed": {
|
||||
"name": "Low pulse speed"
|
||||
},
|
||||
"manual_speed": {
|
||||
"name": "Manual speed"
|
||||
},
|
||||
"night_speed": {
|
||||
"name": "Night speed"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
@@ -124,17 +99,8 @@
|
||||
"rotor_stuck": "Rotor stuck"
|
||||
}
|
||||
},
|
||||
"operating_time": {
|
||||
"name": "Operating time"
|
||||
},
|
||||
"service_hours": {
|
||||
"name": "Remaining hours until service"
|
||||
},
|
||||
"turn_feeding_time": {
|
||||
"name": "Remaining off time after feeding"
|
||||
},
|
||||
"turn_off_time": {
|
||||
"name": "Remaining off time"
|
||||
}
|
||||
},
|
||||
"time": {
|
||||
|
||||
@@ -4,7 +4,6 @@ from typing import Any, override
|
||||
|
||||
from eheimdigital.classic_vario import EheimDigitalClassicVario
|
||||
from eheimdigital.device import EheimDigitalDevice
|
||||
from eheimdigital.filter import EheimDigitalFilter
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -31,8 +30,8 @@ async def async_setup_entry(
|
||||
"""Set up the switch entities for one or multiple devices."""
|
||||
entities: list[SwitchEntity] = []
|
||||
for device in device_address.values():
|
||||
if isinstance(device, (EheimDigitalClassicVario, EheimDigitalFilter)):
|
||||
entities.append(EheimDigitalFilterSwitch(coordinator, device)) # noqa: PERF401
|
||||
if isinstance(device, EheimDigitalClassicVario):
|
||||
entities.append(EheimDigitalClassicVarioSwitch(coordinator, device)) # noqa: PERF401
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
@@ -40,10 +39,10 @@ async def async_setup_entry(
|
||||
async_setup_device_entities(coordinator.hub.devices)
|
||||
|
||||
|
||||
class EheimDigitalFilterSwitch(
|
||||
EheimDigitalEntity[EheimDigitalClassicVario | EheimDigitalFilter], SwitchEntity
|
||||
class EheimDigitalClassicVarioSwitch(
|
||||
EheimDigitalEntity[EheimDigitalClassicVario], SwitchEntity
|
||||
):
|
||||
"""Represent an EHEIM Digital classicVARIO or filter switch entity."""
|
||||
"""Represent an EHEIM Digital classicVARIO switch entity."""
|
||||
|
||||
_attr_translation_key = "filter_active"
|
||||
_attr_name = None
|
||||
@@ -51,9 +50,9 @@ class EheimDigitalFilterSwitch(
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: EheimDigitalUpdateCoordinator,
|
||||
device: EheimDigitalClassicVario | EheimDigitalFilter,
|
||||
device: EheimDigitalClassicVario,
|
||||
) -> None:
|
||||
"""Initialize an EHEIM Digital classicVARIO or filter switch entity."""
|
||||
"""Initialize an EHEIM Digital classicVARIO switch entity."""
|
||||
super().__init__(coordinator, device)
|
||||
self._attr_unique_id = device.mac_address
|
||||
self._async_update_attrs()
|
||||
|
||||
@@ -7,7 +7,6 @@ from typing import Any, final, override
|
||||
|
||||
from eheimdigital.classic_vario import EheimDigitalClassicVario
|
||||
from eheimdigital.device import EheimDigitalDevice
|
||||
from eheimdigital.filter import EheimDigitalFilter
|
||||
from eheimdigital.heater import EheimDigitalHeater
|
||||
|
||||
from homeassistant.components.time import TimeEntity, TimeEntityDescription
|
||||
@@ -29,23 +28,6 @@ class EheimDigitalTimeDescription[_DeviceT: EheimDigitalDevice](TimeEntityDescri
|
||||
set_value_fn: Callable[[_DeviceT, time], Awaitable[None]]
|
||||
|
||||
|
||||
FILTER_DESCRIPTIONS: tuple[EheimDigitalTimeDescription[EheimDigitalFilter], ...] = (
|
||||
EheimDigitalTimeDescription[EheimDigitalFilter](
|
||||
key="day_start_time",
|
||||
translation_key="day_start_time",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
value_fn=lambda device: device.day_start_time,
|
||||
set_value_fn=lambda device, value: device.set_day_start_time(value),
|
||||
),
|
||||
EheimDigitalTimeDescription[EheimDigitalFilter](
|
||||
key="night_start_time",
|
||||
translation_key="night_start_time",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
value_fn=lambda device: device.night_start_time,
|
||||
set_value_fn=lambda device, value: device.set_night_start_time(value),
|
||||
),
|
||||
)
|
||||
|
||||
CLASSICVARIO_DESCRIPTIONS: tuple[
|
||||
EheimDigitalTimeDescription[EheimDigitalClassicVario], ...
|
||||
] = (
|
||||
@@ -97,13 +79,6 @@ async def async_setup_entry(
|
||||
"""Set up the time entities for one or multiple devices."""
|
||||
entities: list[EheimDigitalTime[Any]] = []
|
||||
for device in device_address.values():
|
||||
if isinstance(device, EheimDigitalFilter):
|
||||
entities.extend(
|
||||
EheimDigitalTime[EheimDigitalFilter](
|
||||
coordinator, device, description
|
||||
)
|
||||
for description in FILTER_DESCRIPTIONS
|
||||
)
|
||||
if isinstance(device, EheimDigitalClassicVario):
|
||||
entities.extend(
|
||||
EheimDigitalTime[EheimDigitalClassicVario](
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyenphase"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pyenphase==2.4.3"],
|
||||
"requirements": ["pyenphase==2.4.2"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_enphase-envoy._tcp.local."
|
||||
|
||||
@@ -206,7 +206,7 @@ class EnvoyProductionSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes an Envoy production sensor entity."""
|
||||
|
||||
value_fn: Callable[[EnvoySystemProduction], int]
|
||||
on_phase: str | None = None
|
||||
on_phase: str | None
|
||||
|
||||
|
||||
PRODUCTION_SENSORS = (
|
||||
@@ -219,6 +219,7 @@ PRODUCTION_SENSORS = (
|
||||
suggested_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
suggested_display_precision=3,
|
||||
value_fn=attrgetter("watts_now"),
|
||||
on_phase=None,
|
||||
),
|
||||
EnvoyProductionSensorEntityDescription(
|
||||
key="daily_production",
|
||||
@@ -229,6 +230,7 @@ PRODUCTION_SENSORS = (
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
suggested_display_precision=2,
|
||||
value_fn=attrgetter("watt_hours_today"),
|
||||
on_phase=None,
|
||||
),
|
||||
EnvoyProductionSensorEntityDescription(
|
||||
key="seven_days_production",
|
||||
@@ -238,6 +240,7 @@ PRODUCTION_SENSORS = (
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
suggested_display_precision=1,
|
||||
value_fn=attrgetter("watt_hours_last_7_days"),
|
||||
on_phase=None,
|
||||
),
|
||||
EnvoyProductionSensorEntityDescription(
|
||||
key="lifetime_production",
|
||||
@@ -248,6 +251,7 @@ PRODUCTION_SENSORS = (
|
||||
suggested_unit_of_measurement=UnitOfEnergy.MEGA_WATT_HOUR,
|
||||
suggested_display_precision=3,
|
||||
value_fn=attrgetter("watt_hours_lifetime"),
|
||||
on_phase=None,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -273,7 +277,7 @@ class EnvoyConsumptionSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes an Envoy consumption sensor entity."""
|
||||
|
||||
value_fn: Callable[[EnvoySystemConsumption], int]
|
||||
on_phase: str | None = None
|
||||
on_phase: str | None
|
||||
|
||||
|
||||
CONSUMPTION_SENSORS = (
|
||||
@@ -286,6 +290,7 @@ CONSUMPTION_SENSORS = (
|
||||
suggested_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
suggested_display_precision=3,
|
||||
value_fn=attrgetter("watts_now"),
|
||||
on_phase=None,
|
||||
),
|
||||
EnvoyConsumptionSensorEntityDescription(
|
||||
key="daily_consumption",
|
||||
@@ -296,6 +301,7 @@ CONSUMPTION_SENSORS = (
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
suggested_display_precision=2,
|
||||
value_fn=attrgetter("watt_hours_today"),
|
||||
on_phase=None,
|
||||
),
|
||||
EnvoyConsumptionSensorEntityDescription(
|
||||
key="seven_days_consumption",
|
||||
@@ -305,6 +311,7 @@ CONSUMPTION_SENSORS = (
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
suggested_display_precision=1,
|
||||
value_fn=attrgetter("watt_hours_last_7_days"),
|
||||
on_phase=None,
|
||||
),
|
||||
EnvoyConsumptionSensorEntityDescription(
|
||||
key="lifetime_consumption",
|
||||
@@ -315,6 +322,7 @@ CONSUMPTION_SENSORS = (
|
||||
suggested_unit_of_measurement=UnitOfEnergy.MEGA_WATT_HOUR,
|
||||
suggested_display_precision=3,
|
||||
value_fn=attrgetter("watt_hours_lifetime"),
|
||||
on_phase=None,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -346,6 +354,7 @@ NET_CONSUMPTION_SENSORS = (
|
||||
suggested_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
suggested_display_precision=3,
|
||||
value_fn=attrgetter("watts_now"),
|
||||
on_phase=None,
|
||||
),
|
||||
EnvoyConsumptionSensorEntityDescription(
|
||||
key="lifetime_balanced_net_consumption",
|
||||
@@ -357,6 +366,7 @@ NET_CONSUMPTION_SENSORS = (
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
suggested_display_precision=3,
|
||||
value_fn=attrgetter("watt_hours_lifetime"),
|
||||
on_phase=None,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -385,7 +395,7 @@ class EnvoyCTSensorEntityDescription(SensorEntityDescription):
|
||||
[EnvoyMeterData],
|
||||
int | float | str | CtType | CtMeterStatus | CtStatusFlags | CtState | None,
|
||||
]
|
||||
on_phase: str | None = None
|
||||
on_phase: str | None
|
||||
cttype: str | None = None
|
||||
|
||||
|
||||
@@ -401,6 +411,7 @@ CT_SENSORS = (
|
||||
suggested_unit_of_measurement=UnitOfEnergy.MEGA_WATT_HOUR,
|
||||
suggested_display_precision=3,
|
||||
value_fn=attrgetter("energy_delivered"),
|
||||
on_phase=None,
|
||||
cttype=cttype,
|
||||
)
|
||||
for cttype, key in (
|
||||
@@ -419,6 +430,7 @@ CT_SENSORS = (
|
||||
suggested_unit_of_measurement=UnitOfEnergy.MEGA_WATT_HOUR,
|
||||
suggested_display_precision=3,
|
||||
value_fn=attrgetter("energy_received"),
|
||||
on_phase=None,
|
||||
cttype=cttype,
|
||||
)
|
||||
for cttype, key in (
|
||||
@@ -437,6 +449,7 @@ CT_SENSORS = (
|
||||
suggested_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
suggested_display_precision=3,
|
||||
value_fn=attrgetter("active_power"),
|
||||
on_phase=None,
|
||||
cttype=cttype,
|
||||
)
|
||||
for cttype, key in (
|
||||
@@ -455,6 +468,7 @@ CT_SENSORS = (
|
||||
suggested_display_precision=1,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("frequency"),
|
||||
on_phase=None,
|
||||
cttype=cttype,
|
||||
)
|
||||
for cttype, key, translation_key in (
|
||||
@@ -474,6 +488,7 @@ CT_SENSORS = (
|
||||
suggested_display_precision=1,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("voltage"),
|
||||
on_phase=None,
|
||||
cttype=cttype,
|
||||
)
|
||||
for cttype, key, translation_key in (
|
||||
@@ -493,6 +508,7 @@ CT_SENSORS = (
|
||||
suggested_display_precision=3,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("current"),
|
||||
on_phase=None,
|
||||
cttype=cttype,
|
||||
)
|
||||
for cttype, key in (
|
||||
@@ -510,6 +526,7 @@ CT_SENSORS = (
|
||||
suggested_display_precision=2,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("power_factor"),
|
||||
on_phase=None,
|
||||
cttype=cttype,
|
||||
)
|
||||
for cttype, key in (
|
||||
@@ -527,6 +544,7 @@ CT_SENSORS = (
|
||||
options=list(CtMeterStatus),
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("metering_status"),
|
||||
on_phase=None,
|
||||
cttype=cttype,
|
||||
)
|
||||
for cttype, key, translation_key in (
|
||||
@@ -547,6 +565,7 @@ CT_SENSORS = (
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda ct: 0 if ct.status_flags is None else len(ct.status_flags),
|
||||
on_phase=None,
|
||||
cttype=cttype,
|
||||
)
|
||||
for cttype, key, translation_key in (
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==43.10.1",
|
||||
"aioesphomeapi==43.9.1",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.4.0"
|
||||
],
|
||||
|
||||
@@ -5,7 +5,6 @@ from __future__ import annotations
|
||||
from http import HTTPStatus
|
||||
import json
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import requests
|
||||
import voluptuous as vol
|
||||
@@ -47,7 +46,7 @@ class FacebookNotificationService(BaseNotificationService):
|
||||
"""Initialize the service."""
|
||||
self.page_access_token = access_token
|
||||
|
||||
def send_message(self, message: str = "", **kwargs: Any) -> None:
|
||||
def send_message(self, message="", **kwargs):
|
||||
"""Send some message."""
|
||||
payload = {"access_token": self.page_access_token}
|
||||
targets = kwargs.get(ATTR_TARGET)
|
||||
|
||||
@@ -19,9 +19,6 @@ from .coordinator import FeedReaderCoordinator
|
||||
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
ATTR_CONTENT = "content"
|
||||
ATTR_DESCRIPTION = "description"
|
||||
ATTR_LINK = "link"
|
||||
|
||||
@@ -1,94 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: No custom actions are defined.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage:
|
||||
status: todo
|
||||
comment: missing test for uniqueness of feed URL.
|
||||
config-flow:
|
||||
status: todo
|
||||
comment: missing data descriptions
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: No custom actions are defined.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: No custom actions are defined.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow:
|
||||
status: exempt
|
||||
comment: No authentication support.
|
||||
test-coverage:
|
||||
status: done
|
||||
comment: Can use freezer for skipping time instead
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: No discovery support.
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: No discovery support.
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: Each config entry, represents one service.
|
||||
entity-category: done
|
||||
entity-device-class:
|
||||
status: exempt
|
||||
comment: Matches no available event entity class.
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: Only one entity per config entry.
|
||||
entity-translations: todo
|
||||
exception-translations: todo
|
||||
icon-translations: done
|
||||
reconfiguration-flow: done
|
||||
repair-issues:
|
||||
status: done
|
||||
comment: Only one repair-issue for yaml-import defined.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: Each config entry, represents one service.
|
||||
|
||||
# Platinum
|
||||
async-dependency:
|
||||
status: todo
|
||||
comment: feedparser lib is not async.
|
||||
inject-websession:
|
||||
status: todo
|
||||
comment: feedparser lib doesn't take a session as argument.
|
||||
strict-typing:
|
||||
status: todo
|
||||
comment: feedparser lib is not fully typed.
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
@@ -98,30 +97,17 @@ class FireflyDataUpdateCoordinator(DataUpdateCoordinator[FireflyCoordinatorData]
|
||||
end_date = now
|
||||
|
||||
try:
|
||||
(
|
||||
accounts,
|
||||
categories,
|
||||
primary_currency,
|
||||
budgets,
|
||||
bills,
|
||||
) = await asyncio.gather(
|
||||
self.firefly.get_accounts(),
|
||||
self.firefly.get_categories(),
|
||||
self.firefly.get_currency_primary(),
|
||||
self.firefly.get_budgets(start=start_date, end=end_date),
|
||||
self.firefly.get_bills(),
|
||||
)
|
||||
|
||||
category_details = await asyncio.gather(
|
||||
*(
|
||||
self.firefly.get_category(
|
||||
category_id=int(category.id),
|
||||
start=start_date,
|
||||
end=end_date,
|
||||
)
|
||||
for category in categories
|
||||
accounts = await self.firefly.get_accounts()
|
||||
categories = await self.firefly.get_categories()
|
||||
category_details = [
|
||||
await self.firefly.get_category(
|
||||
category_id=int(category.id), start=start_date, end=end_date
|
||||
)
|
||||
)
|
||||
for category in categories
|
||||
]
|
||||
primary_currency = await self.firefly.get_currency_primary()
|
||||
budgets = await self.firefly.get_budgets(start=start_date, end=end_date)
|
||||
bills = await self.firefly.get_bills()
|
||||
except FireflyAuthenticationError as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyfirefly==0.1.11"]
|
||||
"requirements": ["pyfirefly==0.1.8"]
|
||||
}
|
||||
|
||||
@@ -5,7 +5,6 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
from http import HTTPStatus
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -48,7 +47,7 @@ class FlockNotificationService(BaseNotificationService):
|
||||
self._url = url
|
||||
self._session = session
|
||||
|
||||
async def async_send_message(self, message: str, **kwargs: Any) -> None:
|
||||
async def async_send_message(self, message, **kwargs):
|
||||
"""Send the message to the user."""
|
||||
payload = {"text": message}
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@ from __future__ import annotations
|
||||
|
||||
from http import HTTPStatus
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from freesms import FreeClient
|
||||
import voluptuous as vol
|
||||
@@ -41,7 +40,7 @@ class FreeSMSNotificationService(BaseNotificationService):
|
||||
"""Initialize the service."""
|
||||
self.free_client = FreeClient(username, access_token)
|
||||
|
||||
def send_message(self, message: str = "", **kwargs: Any) -> None:
|
||||
def send_message(self, message="", **kwargs):
|
||||
"""Send a message to the Free Mobile user cell."""
|
||||
resp = self.free_client.send_sms(message)
|
||||
|
||||
|
||||
@@ -31,7 +31,7 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
)
|
||||
STEP_SMS_CODE_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_SMS_CODE): str,
|
||||
vol.Required(CONF_SMS_CODE): int,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -75,7 +75,7 @@ class FressnapfTrackerConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return errors, False
|
||||
|
||||
async def _async_verify_sms_code(
|
||||
self, sms_code: str
|
||||
self, sms_code: int
|
||||
) -> tuple[dict[str, str], str | None]:
|
||||
"""Verify SMS code and return errors and access_token."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from fressnapftracker import FressnapfTrackerError
|
||||
|
||||
from homeassistant.components.light import (
|
||||
ATTR_BRIGHTNESS,
|
||||
ColorMode,
|
||||
@@ -18,7 +16,6 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from . import FressnapfTrackerConfigEntry
|
||||
from .const import DOMAIN
|
||||
from .entity import FressnapfTrackerEntity
|
||||
from .services import handle_fressnapf_tracker_exception
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
@@ -64,18 +61,12 @@ class FressnapfTrackerLight(FressnapfTrackerEntity, LightEntity):
|
||||
self.raise_if_not_activatable()
|
||||
brightness = kwargs.get(ATTR_BRIGHTNESS, 255)
|
||||
brightness = int((brightness / 255) * 100)
|
||||
try:
|
||||
await self.coordinator.client.set_led_brightness(brightness)
|
||||
except FressnapfTrackerError as e:
|
||||
handle_fressnapf_tracker_exception(e)
|
||||
await self.coordinator.client.set_led_brightness(brightness)
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn off the device."""
|
||||
try:
|
||||
await self.coordinator.client.set_led_brightness(0)
|
||||
except FressnapfTrackerError as e:
|
||||
handle_fressnapf_tracker_exception(e)
|
||||
await self.coordinator.client.set_led_brightness(0)
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
def raise_if_not_activatable(self) -> None:
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["fressnapftracker==0.2.1"]
|
||||
"requirements": ["fressnapftracker==0.2.0"]
|
||||
}
|
||||
|
||||
@@ -26,7 +26,7 @@ rules:
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: done
|
||||
action-exceptions: todo
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
"""Services and service helpers for fressnapf_tracker."""
|
||||
|
||||
from fressnapftracker import FressnapfTrackerError, FressnapfTrackerInvalidTokenError
|
||||
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, HomeAssistantError
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
def handle_fressnapf_tracker_exception(exception: FressnapfTrackerError):
|
||||
"""Handle the different FressnapfTracker errors."""
|
||||
if isinstance(exception, FressnapfTrackerInvalidTokenError):
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_auth",
|
||||
) from exception
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="api_error",
|
||||
translation_placeholders={"error_message": str(exception)},
|
||||
) from exception
|
||||
@@ -77,9 +77,6 @@
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"api_error": {
|
||||
"message": "An error occurred while communicating with the Fressnapf Tracker API: {error_message}"
|
||||
},
|
||||
"charging": {
|
||||
"message": "The flashlight cannot be activated while charging."
|
||||
},
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from fressnapftracker import FressnapfTrackerError
|
||||
|
||||
from homeassistant.components.switch import (
|
||||
SwitchDeviceClass,
|
||||
SwitchEntity,
|
||||
@@ -15,7 +13,6 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import FressnapfTrackerConfigEntry
|
||||
from .entity import FressnapfTrackerEntity
|
||||
from .services import handle_fressnapf_tracker_exception
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
@@ -46,18 +43,12 @@ class FressnapfTrackerSwitch(FressnapfTrackerEntity, SwitchEntity):
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn on the device."""
|
||||
try:
|
||||
await self.coordinator.client.set_energy_saving(True)
|
||||
except FressnapfTrackerError as e:
|
||||
handle_fressnapf_tracker_exception(e)
|
||||
await self.coordinator.client.set_energy_saving(True)
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn off the device."""
|
||||
try:
|
||||
await self.coordinator.client.set_energy_saving(False)
|
||||
except FressnapfTrackerError as e:
|
||||
handle_fressnapf_tracker_exception(e)
|
||||
await self.coordinator.client.set_energy_saving(False)
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
@property
|
||||
|
||||
@@ -164,12 +164,13 @@ def _async_wol_buttons_list(
|
||||
class FritzBoxWOLButton(FritzDeviceBase, ButtonEntity):
|
||||
"""Defines a FRITZ!Box Tools Wake On LAN button."""
|
||||
|
||||
_attr_icon = "mdi:lan-pending"
|
||||
_attr_entity_registry_enabled_default = False
|
||||
_attr_translation_key = "wake_on_lan"
|
||||
|
||||
def __init__(self, avm_wrapper: AvmWrapper, device: FritzDevice) -> None:
|
||||
"""Initialize Fritz!Box WOL button."""
|
||||
super().__init__(avm_wrapper, device)
|
||||
self._name = f"{self.hostname} Wake on LAN"
|
||||
self._attr_unique_id = f"{self._mac}_wake_on_lan"
|
||||
self._is_available = True
|
||||
|
||||
|
||||
@@ -10,7 +10,6 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DEFAULT_DEVICE_NAME
|
||||
from .coordinator import FRITZ_DATA_KEY, AvmWrapper, FritzConfigEntry, FritzData
|
||||
from .entity import FritzDeviceBase
|
||||
from .helpers import device_filter_out_from_trackers
|
||||
@@ -72,7 +71,6 @@ class FritzBoxTracker(FritzDeviceBase, ScannerEntity):
|
||||
def __init__(self, avm_wrapper: AvmWrapper, device: FritzDevice) -> None:
|
||||
"""Initialize a FRITZ!Box device."""
|
||||
super().__init__(avm_wrapper, device)
|
||||
self._attr_name: str = device.hostname or DEFAULT_DEVICE_NAME
|
||||
self._last_activity: datetime.datetime | None = device.last_activity
|
||||
|
||||
@property
|
||||
|
||||
@@ -13,7 +13,7 @@ from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import DEFAULT_DEVICE_NAME, DOMAIN
|
||||
from .coordinator import AvmWrapper
|
||||
from .models import FritzDevice
|
||||
|
||||
@@ -21,17 +21,21 @@ from .models import FritzDevice
|
||||
class FritzDeviceBase(CoordinatorEntity[AvmWrapper]):
|
||||
"""Entity base class for a device connected to a FRITZ!Box device."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(self, avm_wrapper: AvmWrapper, device: FritzDevice) -> None:
|
||||
"""Initialize a FRITZ!Box device."""
|
||||
super().__init__(avm_wrapper)
|
||||
self._avm_wrapper = avm_wrapper
|
||||
self._mac: str = device.mac_address
|
||||
self._name: str = device.hostname or DEFAULT_DEVICE_NAME
|
||||
self._attr_device_info = DeviceInfo(
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, device.mac_address)}
|
||||
)
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Return device name."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def ip_address(self) -> str | None:
|
||||
"""Return the primary ip address of the device."""
|
||||
|
||||
@@ -3,9 +3,6 @@
|
||||
"button": {
|
||||
"cleanup": {
|
||||
"default": "mdi:broom"
|
||||
},
|
||||
"wake_on_lan": {
|
||||
"default": "mdi:lan-pending"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
@@ -51,11 +48,6 @@
|
||||
"max_kb_s_sent": {
|
||||
"default": "mdi:upload"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"internet_access": {
|
||||
"default": "mdi:router-wireless-settings"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
||||
@@ -8,7 +8,6 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["fritzconnection"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["fritzconnection[qr]==1.15.0", "xmltodict==1.0.2"],
|
||||
"ssdp": [
|
||||
{
|
||||
|
||||
@@ -13,7 +13,9 @@ rules:
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
has-entity-name:
|
||||
status: todo
|
||||
comment: partially done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
|
||||
@@ -108,9 +108,6 @@
|
||||
},
|
||||
"reconnect": {
|
||||
"name": "Reconnect"
|
||||
},
|
||||
"wake_on_lan": {
|
||||
"name": "Wake on LAN"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
@@ -165,11 +162,6 @@
|
||||
"max_kb_s_sent": {
|
||||
"name": "Max connection upload throughput"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"internet_access": {
|
||||
"name": "Internet access"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
|
||||
@@ -499,12 +499,13 @@ class FritzBoxDeflectionSwitch(FritzBoxBaseCoordinatorSwitch):
|
||||
class FritzBoxProfileSwitch(FritzDeviceBase, SwitchEntity):
|
||||
"""Defines a FRITZ!Box Tools DeviceProfile switch."""
|
||||
|
||||
_attr_translation_key = "internet_access"
|
||||
_attr_icon = "mdi:router-wireless-settings"
|
||||
|
||||
def __init__(self, avm_wrapper: AvmWrapper, device: FritzDevice) -> None:
|
||||
"""Init Fritz profile."""
|
||||
super().__init__(avm_wrapper, device)
|
||||
self._attr_is_on: bool = False
|
||||
self._name = f"{device.hostname} Internet Access"
|
||||
self._attr_unique_id = f"{self._mac}_internet_access"
|
||||
self._attr_entity_category = EntityCategory.CONFIG
|
||||
|
||||
|
||||
@@ -66,7 +66,6 @@ from .const import (
|
||||
CONF_COLD_TOLERANCE,
|
||||
CONF_HEATER,
|
||||
CONF_HOT_TOLERANCE,
|
||||
CONF_KEEP_ALIVE,
|
||||
CONF_MAX_TEMP,
|
||||
CONF_MIN_DUR,
|
||||
CONF_MIN_TEMP,
|
||||
@@ -82,6 +81,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
DEFAULT_NAME = "Generic Thermostat"
|
||||
|
||||
CONF_INITIAL_HVAC_MODE = "initial_hvac_mode"
|
||||
CONF_KEEP_ALIVE = "keep_alive"
|
||||
CONF_PRECISION = "precision"
|
||||
CONF_TARGET_TEMP = "target_temp"
|
||||
CONF_TEMP_STEP = "target_temp_step"
|
||||
|
||||
@@ -21,7 +21,6 @@ from .const import (
|
||||
CONF_COLD_TOLERANCE,
|
||||
CONF_HEATER,
|
||||
CONF_HOT_TOLERANCE,
|
||||
CONF_KEEP_ALIVE,
|
||||
CONF_MAX_TEMP,
|
||||
CONF_MIN_DUR,
|
||||
CONF_MIN_TEMP,
|
||||
@@ -60,9 +59,6 @@ OPTIONS_SCHEMA = {
|
||||
vol.Optional(CONF_MIN_DUR): selector.DurationSelector(
|
||||
selector.DurationSelectorConfig(allow_negative=False)
|
||||
),
|
||||
vol.Optional(CONF_KEEP_ALIVE): selector.DurationSelector(
|
||||
selector.DurationSelectorConfig(allow_negative=False)
|
||||
),
|
||||
vol.Optional(CONF_MIN_TEMP): selector.NumberSelector(
|
||||
selector.NumberSelectorConfig(
|
||||
mode=selector.NumberSelectorMode.BOX, unit_of_measurement=DEGREE, step=0.1
|
||||
|
||||
@@ -33,5 +33,4 @@ CONF_PRESETS = {
|
||||
)
|
||||
}
|
||||
CONF_SENSOR = "target_sensor"
|
||||
CONF_KEEP_ALIVE = "keep_alive"
|
||||
DEFAULT_TOLERANCE = 0.3
|
||||
|
||||
@@ -18,7 +18,6 @@
|
||||
"cold_tolerance": "Cold tolerance",
|
||||
"heater": "Actuator switch",
|
||||
"hot_tolerance": "Hot tolerance",
|
||||
"keep_alive": "Keep-alive interval",
|
||||
"max_temp": "Maximum target temperature",
|
||||
"min_cycle_duration": "Minimum cycle duration",
|
||||
"min_temp": "Minimum target temperature",
|
||||
@@ -30,7 +29,6 @@
|
||||
"cold_tolerance": "Minimum amount of difference between the temperature read by the temperature sensor the target temperature that must change prior to being switched on. For example, if the target temperature is 25 and the tolerance is 0.5 the heater will start when the sensor goes below 24.5.",
|
||||
"heater": "Switch entity used to cool or heat depending on A/C mode.",
|
||||
"hot_tolerance": "Minimum amount of difference between the temperature read by the temperature sensor the target temperature that must change prior to being switched off. For example, if the target temperature is 25 and the tolerance is 0.5 the heater will stop when the sensor equals or goes above 25.5.",
|
||||
"keep_alive": "Trigger the heater periodically to keep devices from losing state. When set, min cycle duration is ignored.",
|
||||
"min_cycle_duration": "Set a minimum amount of time that the switch specified must be in its current state prior to being switched either off or on.",
|
||||
"target_sensor": "Temperature sensor that reflects the current temperature."
|
||||
},
|
||||
@@ -47,7 +45,6 @@
|
||||
"cold_tolerance": "[%key:component::generic_thermostat::config::step::user::data::cold_tolerance%]",
|
||||
"heater": "[%key:component::generic_thermostat::config::step::user::data::heater%]",
|
||||
"hot_tolerance": "[%key:component::generic_thermostat::config::step::user::data::hot_tolerance%]",
|
||||
"keep_alive": "[%key:component::generic_thermostat::config::step::user::data::keep_alive%]",
|
||||
"max_temp": "[%key:component::generic_thermostat::config::step::user::data::max_temp%]",
|
||||
"min_cycle_duration": "[%key:component::generic_thermostat::config::step::user::data::min_cycle_duration%]",
|
||||
"min_temp": "[%key:component::generic_thermostat::config::step::user::data::min_temp%]",
|
||||
@@ -58,7 +55,6 @@
|
||||
"cold_tolerance": "[%key:component::generic_thermostat::config::step::user::data_description::cold_tolerance%]",
|
||||
"heater": "[%key:component::generic_thermostat::config::step::user::data_description::heater%]",
|
||||
"hot_tolerance": "[%key:component::generic_thermostat::config::step::user::data_description::hot_tolerance%]",
|
||||
"keep_alive": "[%key:component::generic_thermostat::config::step::user::data_description::keep_alive%]",
|
||||
"min_cycle_duration": "[%key:component::generic_thermostat::config::step::user::data_description::min_cycle_duration%]",
|
||||
"target_sensor": "[%key:component::generic_thermostat::config::step::user::data_description::target_sensor%]"
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user