mirror of
https://github.com/home-assistant/core.git
synced 2026-01-03 14:25:28 +01:00
Compare commits
2 Commits
config_sub
...
mill_devic
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a1bfc46e6c | ||
|
|
9e41f7c9ba |
2
.github/workflows/builder.yml
vendored
2
.github/workflows/builder.yml
vendored
@@ -69,7 +69,7 @@ jobs:
|
||||
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
||||
|
||||
- name: Upload translations
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
with:
|
||||
name: translations
|
||||
path: translations.tar.gz
|
||||
|
||||
28
.github/workflows/ci.yaml
vendored
28
.github/workflows/ci.yaml
vendored
@@ -40,7 +40,7 @@ env:
|
||||
CACHE_VERSION: 11
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 9
|
||||
HA_SHORT_VERSION: "2025.2"
|
||||
HA_SHORT_VERSION: "2025.1"
|
||||
DEFAULT_PYTHON: "3.12"
|
||||
ALL_PYTHON_VERSIONS: "['3.12', '3.13']"
|
||||
# 10.3 is the oldest supported version
|
||||
@@ -537,7 +537,7 @@ jobs:
|
||||
python --version
|
||||
uv pip freeze >> pip_freeze.txt
|
||||
- name: Upload pip_freeze artifact
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
with:
|
||||
name: pip-freeze-${{ matrix.python-version }}
|
||||
path: pip_freeze.txt
|
||||
@@ -661,7 +661,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json
|
||||
- name: Upload licenses
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
with:
|
||||
name: licenses-${{ github.run_number }}-${{ matrix.python-version }}
|
||||
path: licenses-${{ matrix.python-version }}.json
|
||||
@@ -877,7 +877,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
|
||||
- name: Upload pytest_buckets
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
with:
|
||||
name: pytest_buckets
|
||||
path: pytest_buckets.txt
|
||||
@@ -979,14 +979,14 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
@@ -1106,7 +1106,7 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@@ -1114,7 +1114,7 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@@ -1236,7 +1236,7 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@@ -1244,7 +1244,7 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@@ -1273,7 +1273,7 @@ jobs:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
uses: codecov/codecov-action@v5.1.2
|
||||
uses: codecov/codecov-action@v5.1.1
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
@@ -1378,14 +1378,14 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
@@ -1411,7 +1411,7 @@ jobs:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
uses: codecov/codecov-action@v5.1.2
|
||||
uses: codecov/codecov-action@v5.1.1
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.28.0
|
||||
uses: github/codeql-action/init@v3.27.9
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.28.0
|
||||
uses: github/codeql-action/analyze@v3.27.9
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
43
.github/workflows/wheels.yml
vendored
43
.github/workflows/wheels.yml
vendored
@@ -76,37 +76,18 @@ jobs:
|
||||
|
||||
# Use C-Extension for SQLAlchemy
|
||||
echo "REQUIRE_SQLALCHEMY_CEXT=1"
|
||||
|
||||
# Add additional pip wheel build constraints
|
||||
echo "PIP_CONSTRAINT=build_constraints.txt"
|
||||
) > .env_file
|
||||
|
||||
- name: Write pip wheel build constraints
|
||||
run: |
|
||||
(
|
||||
# ninja 1.11.1.2 + 1.11.1.3 seem to be broken on at least armhf
|
||||
# this caused the numpy builds to fail
|
||||
# https://github.com/scikit-build/ninja-python-distributions/issues/274
|
||||
echo "ninja==1.11.1.1"
|
||||
) > build_constraints.txt
|
||||
|
||||
- name: Upload env_file
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
with:
|
||||
name: env_file
|
||||
path: ./.env_file
|
||||
include-hidden-files: true
|
||||
overwrite: true
|
||||
|
||||
- name: Upload build_constraints
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
with:
|
||||
name: build_constraints
|
||||
path: ./build_constraints.txt
|
||||
overwrite: true
|
||||
|
||||
- name: Upload requirements_diff
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
with:
|
||||
name: requirements_diff
|
||||
path: ./requirements_diff.txt
|
||||
@@ -118,7 +99,7 @@ jobs:
|
||||
python -m script.gen_requirements_all ci
|
||||
|
||||
- name: Upload requirements_all_wheels
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
path: ./requirements_all_wheels_*.txt
|
||||
@@ -142,11 +123,6 @@ jobs:
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
@@ -166,7 +142,7 @@ jobs:
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "libffi-dev;openssl-dev;yaml-dev;nasm;zlib-ng-dev"
|
||||
apk: "libffi-dev;openssl-dev;yaml-dev;nasm;zlib-dev"
|
||||
skip-binary: aiohttp;multidict;propcache;yarl;SQLAlchemy
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
@@ -191,11 +167,6 @@ jobs:
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
@@ -234,7 +205,7 @@ jobs:
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
@@ -248,7 +219,7 @@ jobs:
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
@@ -262,7 +233,7 @@ jobs:
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.8.6
|
||||
rev: v0.8.3
|
||||
hooks:
|
||||
- id: ruff
|
||||
args:
|
||||
@@ -12,7 +12,7 @@ repos:
|
||||
hooks:
|
||||
- id: codespell
|
||||
args:
|
||||
- --ignore-words-list=aiport,astroid,checkin,currenty,hass,iif,incomfort,lookin,nam,NotIn
|
||||
- --ignore-words-list=astroid,checkin,currenty,hass,iif,incomfort,lookin,nam,NotIn
|
||||
- --skip="./.*,*.csv,*.json,*.ambr"
|
||||
- --quiet-level=2
|
||||
exclude_types: [csv, json, html]
|
||||
|
||||
@@ -311,8 +311,6 @@ homeassistant.components.manual.*
|
||||
homeassistant.components.mastodon.*
|
||||
homeassistant.components.matrix.*
|
||||
homeassistant.components.matter.*
|
||||
homeassistant.components.mcp_server.*
|
||||
homeassistant.components.mealie.*
|
||||
homeassistant.components.media_extractor.*
|
||||
homeassistant.components.media_player.*
|
||||
homeassistant.components.media_source.*
|
||||
@@ -363,11 +361,8 @@ homeassistant.components.openuv.*
|
||||
homeassistant.components.oralb.*
|
||||
homeassistant.components.otbr.*
|
||||
homeassistant.components.overkiz.*
|
||||
homeassistant.components.overseerr.*
|
||||
homeassistant.components.p1_monitor.*
|
||||
homeassistant.components.pandora.*
|
||||
homeassistant.components.panel_custom.*
|
||||
homeassistant.components.peblar.*
|
||||
homeassistant.components.peco.*
|
||||
homeassistant.components.persistent_notification.*
|
||||
homeassistant.components.pi_hole.*
|
||||
@@ -383,7 +378,6 @@ homeassistant.components.pure_energie.*
|
||||
homeassistant.components.purpleair.*
|
||||
homeassistant.components.pushbullet.*
|
||||
homeassistant.components.pvoutput.*
|
||||
homeassistant.components.python_script.*
|
||||
homeassistant.components.qnap_qsw.*
|
||||
homeassistant.components.rabbitair.*
|
||||
homeassistant.components.radarr.*
|
||||
|
||||
35
CODEOWNERS
35
CODEOWNERS
@@ -578,8 +578,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/google_tasks/ @allenporter
|
||||
/homeassistant/components/google_travel_time/ @eifinger
|
||||
/tests/components/google_travel_time/ @eifinger
|
||||
/homeassistant/components/govee_ble/ @bdraco
|
||||
/tests/components/govee_ble/ @bdraco
|
||||
/homeassistant/components/govee_ble/ @bdraco @PierreAronnax
|
||||
/tests/components/govee_ble/ @bdraco @PierreAronnax
|
||||
/homeassistant/components/govee_light_local/ @Galorhallen
|
||||
/tests/components/govee_light_local/ @Galorhallen
|
||||
/homeassistant/components/gpsd/ @fabaff @jrieger
|
||||
@@ -637,8 +637,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/homeassistant_sky_connect/ @home-assistant/core
|
||||
/homeassistant/components/homeassistant_yellow/ @home-assistant/core
|
||||
/tests/components/homeassistant_yellow/ @home-assistant/core
|
||||
/homeassistant/components/homee/ @Taraman17
|
||||
/tests/components/homee/ @Taraman17
|
||||
/homeassistant/components/homekit/ @bdraco
|
||||
/tests/components/homekit/ @bdraco
|
||||
/homeassistant/components/homekit_controller/ @Jc2k @bdraco
|
||||
@@ -688,8 +686,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/icloud/ @Quentame @nzapponi
|
||||
/homeassistant/components/idasen_desk/ @abmantis
|
||||
/tests/components/idasen_desk/ @abmantis
|
||||
/homeassistant/components/igloohome/ @keithle888
|
||||
/tests/components/igloohome/ @keithle888
|
||||
/homeassistant/components/ign_sismologia/ @exxamalte
|
||||
/tests/components/ign_sismologia/ @exxamalte
|
||||
/homeassistant/components/image/ @home-assistant/core
|
||||
@@ -891,8 +887,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/matrix/ @PaarthShah
|
||||
/homeassistant/components/matter/ @home-assistant/matter
|
||||
/tests/components/matter/ @home-assistant/matter
|
||||
/homeassistant/components/mcp_server/ @allenporter
|
||||
/tests/components/mcp_server/ @allenporter
|
||||
/homeassistant/components/mealie/ @joostlek @andrew-codechimp
|
||||
/tests/components/mealie/ @joostlek @andrew-codechimp
|
||||
/homeassistant/components/meater/ @Sotolotl @emontnemery
|
||||
@@ -1072,8 +1066,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/ondilo_ico/ @JeromeHXP
|
||||
/homeassistant/components/onewire/ @garbled1 @epenet
|
||||
/tests/components/onewire/ @garbled1 @epenet
|
||||
/homeassistant/components/onkyo/ @arturpragacz @eclair4151
|
||||
/tests/components/onkyo/ @arturpragacz @eclair4151
|
||||
/homeassistant/components/onkyo/ @arturpragacz
|
||||
/tests/components/onkyo/ @arturpragacz
|
||||
/homeassistant/components/onvif/ @hunterjm
|
||||
/tests/components/onvif/ @hunterjm
|
||||
/homeassistant/components/open_meteo/ @frenck
|
||||
@@ -1109,10 +1103,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/otbr/ @home-assistant/core
|
||||
/homeassistant/components/ourgroceries/ @OnFreund
|
||||
/tests/components/ourgroceries/ @OnFreund
|
||||
/homeassistant/components/overkiz/ @imicknl
|
||||
/tests/components/overkiz/ @imicknl
|
||||
/homeassistant/components/overseerr/ @joostlek
|
||||
/tests/components/overseerr/ @joostlek
|
||||
/homeassistant/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev @tronix117 @alexfp14
|
||||
/tests/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev @tronix117 @alexfp14
|
||||
/homeassistant/components/ovo_energy/ @timmo001
|
||||
/tests/components/ovo_energy/ @timmo001
|
||||
/homeassistant/components/p1_monitor/ @klaasnicolaas
|
||||
@@ -1121,8 +1113,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/palazzetti/ @dotvav
|
||||
/homeassistant/components/panel_custom/ @home-assistant/frontend
|
||||
/tests/components/panel_custom/ @home-assistant/frontend
|
||||
/homeassistant/components/peblar/ @frenck
|
||||
/tests/components/peblar/ @frenck
|
||||
/homeassistant/components/peco/ @IceBotYT
|
||||
/tests/components/peco/ @IceBotYT
|
||||
/homeassistant/components/pegel_online/ @mib1185
|
||||
@@ -1143,8 +1133,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/plaato/ @JohNan
|
||||
/homeassistant/components/plex/ @jjlawren
|
||||
/tests/components/plex/ @jjlawren
|
||||
/homeassistant/components/plugwise/ @CoMPaTech @bouwew
|
||||
/tests/components/plugwise/ @CoMPaTech @bouwew
|
||||
/homeassistant/components/plugwise/ @CoMPaTech @bouwew @frenck
|
||||
/tests/components/plugwise/ @CoMPaTech @bouwew @frenck
|
||||
/homeassistant/components/plum_lightpad/ @ColinHarrington @prystupa
|
||||
/tests/components/plum_lightpad/ @ColinHarrington @prystupa
|
||||
/homeassistant/components/point/ @fredrike
|
||||
@@ -1486,8 +1476,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/system_bridge/ @timmo001
|
||||
/homeassistant/components/systemmonitor/ @gjohansson-ST
|
||||
/tests/components/systemmonitor/ @gjohansson-ST
|
||||
/homeassistant/components/tado/ @erwindouna
|
||||
/tests/components/tado/ @erwindouna
|
||||
/homeassistant/components/tado/ @chiefdragon @erwindouna
|
||||
/tests/components/tado/ @chiefdragon @erwindouna
|
||||
/homeassistant/components/tag/ @balloob @dmulcahey
|
||||
/tests/components/tag/ @balloob @dmulcahey
|
||||
/homeassistant/components/tailscale/ @frenck
|
||||
@@ -1581,8 +1571,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/triggercmd/ @rvmey
|
||||
/homeassistant/components/tts/ @home-assistant/core
|
||||
/tests/components/tts/ @home-assistant/core
|
||||
/homeassistant/components/tuya/ @Tuya @zlinoliver
|
||||
/tests/components/tuya/ @Tuya @zlinoliver
|
||||
/homeassistant/components/tuya/ @Tuya @zlinoliver @frenck
|
||||
/tests/components/tuya/ @Tuya @zlinoliver @frenck
|
||||
/homeassistant/components/twentemilieu/ @frenck
|
||||
/tests/components/twentemilieu/ @frenck
|
||||
/homeassistant/components/twinkly/ @dr1rrb @Robbie1221 @Olen
|
||||
@@ -1750,7 +1740,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/youless/ @gjong
|
||||
/homeassistant/components/youtube/ @joostlek
|
||||
/tests/components/youtube/ @joostlek
|
||||
/homeassistant/components/zabbix/ @kruton
|
||||
/homeassistant/components/zamg/ @killer0071234
|
||||
/tests/components/zamg/ @killer0071234
|
||||
/homeassistant/components/zengge/ @emontnemery
|
||||
|
||||
10
build.yaml
10
build.yaml
@@ -1,10 +1,10 @@
|
||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.12.0
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.12.0
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.12.0
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.12.0
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.12.0
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.12.1
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.12.1
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.12.1
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.12.1
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.12.1
|
||||
codenotary:
|
||||
signer: notary@home-assistant.io
|
||||
base_image: notary@home-assistant.io
|
||||
|
||||
@@ -64,9 +64,6 @@ def restore_backup_file_content(config_dir: Path) -> RestoreBackupFileContent |
|
||||
)
|
||||
except (FileNotFoundError, KeyError, json.JSONDecodeError):
|
||||
return None
|
||||
finally:
|
||||
# Always remove the backup instruction file to prevent a boot loop
|
||||
instruction_path.unlink(missing_ok=True)
|
||||
|
||||
|
||||
def _clear_configuration_directory(config_dir: Path, keep: Iterable[str]) -> None:
|
||||
|
||||
@@ -89,7 +89,7 @@ from .helpers import (
|
||||
)
|
||||
from .helpers.dispatcher import async_dispatcher_send_internal
|
||||
from .helpers.storage import get_internal_store_manager
|
||||
from .helpers.system_info import async_get_system_info
|
||||
from .helpers.system_info import async_get_system_info, is_official_image
|
||||
from .helpers.typing import ConfigType
|
||||
from .setup import (
|
||||
# _setup_started is marked as protected to make it clear
|
||||
@@ -106,7 +106,6 @@ from .util.async_ import create_eager_task
|
||||
from .util.hass_dict import HassKey
|
||||
from .util.logging import async_activate_log_queue_handler
|
||||
from .util.package import async_get_user_site, is_docker_env, is_virtual_env
|
||||
from .util.system_info import is_official_image
|
||||
|
||||
with contextlib.suppress(ImportError):
|
||||
# Ensure anyio backend is imported to avoid it being imported in the event loop
|
||||
@@ -253,7 +252,6 @@ PRELOAD_STORAGE = [
|
||||
"assist_pipeline.pipelines",
|
||||
"core.analytics",
|
||||
"auth_module.totp",
|
||||
"backup",
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
"domain": "microsoft",
|
||||
"name": "Microsoft",
|
||||
"integrations": [
|
||||
"azure_data_explorer",
|
||||
"azure_devops",
|
||||
"azure_event_hub",
|
||||
"azure_service_bus",
|
||||
|
||||
@@ -34,17 +34,17 @@
|
||||
"services": {
|
||||
"capture_image": {
|
||||
"name": "Capture image",
|
||||
"description": "Requests a new image capture from a camera device.",
|
||||
"description": "Request a new image capture from a camera device.",
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
"name": "Entity",
|
||||
"description": "Entity ID of the camera to request an image from."
|
||||
"description": "Entity id of the camera to request an image."
|
||||
}
|
||||
}
|
||||
},
|
||||
"change_setting": {
|
||||
"name": "Change setting",
|
||||
"description": "Changes an Abode system setting.",
|
||||
"description": "Change an Abode system setting.",
|
||||
"fields": {
|
||||
"setting": {
|
||||
"name": "Setting",
|
||||
@@ -58,11 +58,11 @@
|
||||
},
|
||||
"trigger_automation": {
|
||||
"name": "Trigger automation",
|
||||
"description": "Triggers an Abode automation.",
|
||||
"description": "Trigger an Abode automation.",
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
"name": "Entity",
|
||||
"description": "Entity ID of the automation to trigger."
|
||||
"description": "Entity id of the automation to trigger."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,5 +26,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aioacaia"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioacaia==0.1.13"]
|
||||
"requirements": ["aioacaia==0.1.11"]
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""The AEMET OpenData component."""
|
||||
|
||||
import logging
|
||||
import shutil
|
||||
|
||||
from aemet_opendata.exceptions import AemetError, TownNotFound
|
||||
from aemet_opendata.interface import AEMET, ConnectionOptions, UpdateFeature
|
||||
@@ -11,9 +10,8 @@ from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CON
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
from homeassistant.helpers.storage import STORAGE_DIR
|
||||
|
||||
from .const import CONF_RADAR_UPDATES, CONF_STATION_UPDATES, DOMAIN, PLATFORMS
|
||||
from .const import CONF_STATION_UPDATES, PLATFORMS
|
||||
from .coordinator import AemetConfigEntry, AemetData, WeatherUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -26,15 +24,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: AemetConfigEntry) -> boo
|
||||
latitude = entry.data[CONF_LATITUDE]
|
||||
longitude = entry.data[CONF_LONGITUDE]
|
||||
update_features: int = UpdateFeature.FORECAST
|
||||
if entry.options.get(CONF_RADAR_UPDATES, False):
|
||||
update_features |= UpdateFeature.RADAR
|
||||
if entry.options.get(CONF_STATION_UPDATES, True):
|
||||
update_features |= UpdateFeature.STATION
|
||||
|
||||
options = ConnectionOptions(api_key, update_features)
|
||||
aemet = AEMET(aiohttp_client.async_get_clientsession(hass), options)
|
||||
aemet.set_api_data_dir(hass.config.path(STORAGE_DIR, f"{DOMAIN}-{entry.unique_id}"))
|
||||
|
||||
try:
|
||||
await aemet.select_coordinates(latitude, longitude)
|
||||
except TownNotFound as err:
|
||||
@@ -63,11 +57,3 @@ async def async_update_options(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Remove a config entry."""
|
||||
await hass.async_add_executor_job(
|
||||
shutil.rmtree,
|
||||
hass.config.path(STORAGE_DIR, f"{DOMAIN}-{entry.unique_id}"),
|
||||
)
|
||||
|
||||
@@ -17,11 +17,10 @@ from homeassistant.helpers.schema_config_entry_flow import (
|
||||
SchemaOptionsFlowHandler,
|
||||
)
|
||||
|
||||
from .const import CONF_RADAR_UPDATES, CONF_STATION_UPDATES, DEFAULT_NAME, DOMAIN
|
||||
from .const import CONF_STATION_UPDATES, DEFAULT_NAME, DOMAIN
|
||||
|
||||
OPTIONS_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_RADAR_UPDATES, default=False): bool,
|
||||
vol.Required(CONF_STATION_UPDATES, default=True): bool,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -51,9 +51,8 @@ from homeassistant.components.weather import (
|
||||
from homeassistant.const import Platform
|
||||
|
||||
ATTRIBUTION = "Powered by AEMET OpenData"
|
||||
CONF_RADAR_UPDATES = "radar_updates"
|
||||
CONF_STATION_UPDATES = "station_updates"
|
||||
PLATFORMS = [Platform.IMAGE, Platform.SENSOR, Platform.WEATHER]
|
||||
PLATFORMS = [Platform.SENSOR, Platform.WEATHER]
|
||||
DEFAULT_NAME = "AEMET"
|
||||
DOMAIN = "aemet"
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from aemet_opendata.const import AOD_COORDS, AOD_IMG_BYTES
|
||||
from aemet_opendata.const import AOD_COORDS
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.const import (
|
||||
@@ -26,7 +26,6 @@ TO_REDACT_CONFIG = [
|
||||
|
||||
TO_REDACT_COORD = [
|
||||
AOD_COORDS,
|
||||
AOD_IMG_BYTES,
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -1,86 +0,0 @@
|
||||
"""Support for the AEMET OpenData images."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Final
|
||||
|
||||
from aemet_opendata.const import AOD_DATETIME, AOD_IMG_BYTES, AOD_IMG_TYPE, AOD_RADAR
|
||||
from aemet_opendata.helpers import dict_nested_value
|
||||
|
||||
from homeassistant.components.image import Image, ImageEntity, ImageEntityDescription
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .coordinator import AemetConfigEntry, WeatherUpdateCoordinator
|
||||
from .entity import AemetEntity
|
||||
|
||||
AEMET_IMAGES: Final[tuple[ImageEntityDescription, ...]] = (
|
||||
ImageEntityDescription(
|
||||
key=AOD_RADAR,
|
||||
translation_key="weather_radar",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: AemetConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up AEMET OpenData image entities based on a config entry."""
|
||||
domain_data = config_entry.runtime_data
|
||||
name = domain_data.name
|
||||
coordinator = domain_data.coordinator
|
||||
|
||||
unique_id = config_entry.unique_id
|
||||
assert unique_id is not None
|
||||
|
||||
async_add_entities(
|
||||
AemetImage(
|
||||
hass,
|
||||
name,
|
||||
coordinator,
|
||||
description,
|
||||
unique_id,
|
||||
)
|
||||
for description in AEMET_IMAGES
|
||||
if dict_nested_value(coordinator.data["lib"], [description.key]) is not None
|
||||
)
|
||||
|
||||
|
||||
class AemetImage(AemetEntity, ImageEntity):
|
||||
"""Implementation of an AEMET OpenData image."""
|
||||
|
||||
entity_description: ImageEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
name: str,
|
||||
coordinator: WeatherUpdateCoordinator,
|
||||
description: ImageEntityDescription,
|
||||
unique_id: str,
|
||||
) -> None:
|
||||
"""Initialize the image."""
|
||||
super().__init__(coordinator, name, unique_id)
|
||||
ImageEntity.__init__(self, hass)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{unique_id}-{description.key}"
|
||||
|
||||
self._async_update_attrs()
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Update attributes when the coordinator updates."""
|
||||
self._async_update_attrs()
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
@callback
|
||||
def _async_update_attrs(self) -> None:
|
||||
"""Update image attributes."""
|
||||
image_data = self.get_aemet_value([self.entity_description.key])
|
||||
self._cached_image = Image(
|
||||
content_type=image_data.get(AOD_IMG_TYPE),
|
||||
content=image_data.get(AOD_IMG_BYTES),
|
||||
)
|
||||
self._attr_image_last_updated = image_data.get(AOD_DATETIME)
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aemet",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aemet_opendata"],
|
||||
"requirements": ["AEMET-OpenData==0.6.4"]
|
||||
"requirements": ["AEMET-OpenData==0.6.3"]
|
||||
}
|
||||
|
||||
@@ -18,18 +18,10 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"image": {
|
||||
"weather_radar": {
|
||||
"name": "Weather radar"
|
||||
}
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"radar_updates": "Gather data from AEMET weather radar",
|
||||
"station_updates": "Gather data from AEMET weather stations"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,9 +31,7 @@ rules:
|
||||
# Silver
|
||||
action-exceptions: todo
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: No options to configure
|
||||
docs-configuration-parameters: todo
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
@@ -43,16 +41,12 @@ rules:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not require authentication.
|
||||
test-coverage: todo
|
||||
test-coverage: done
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
discovery-update-info:
|
||||
status: todo
|
||||
comment: DHCP is still possible
|
||||
discovery:
|
||||
status: todo
|
||||
comment: DHCP is still possible
|
||||
discovery-update-info: done
|
||||
discovery: done
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
|
||||
@@ -39,54 +39,45 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
key="temp",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"humidity": SensorEntityDescription(
|
||||
key="humidity",
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"pressure": SensorEntityDescription(
|
||||
key="pressure",
|
||||
device_class=SensorDeviceClass.ATMOSPHERIC_PRESSURE,
|
||||
native_unit_of_measurement=UnitOfPressure.MBAR,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"battery": SensorEntityDescription(
|
||||
key="battery",
|
||||
device_class=SensorDeviceClass.BATTERY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"co2": SensorEntityDescription(
|
||||
key="co2",
|
||||
device_class=SensorDeviceClass.CO2,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"voc": SensorEntityDescription(
|
||||
key="voc",
|
||||
device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS_PARTS,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_BILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"light": SensorEntityDescription(
|
||||
key="light",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
translation_key="light",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"virusRisk": SensorEntityDescription(
|
||||
key="virusRisk",
|
||||
translation_key="virus_risk",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"mold": SensorEntityDescription(
|
||||
key="mold",
|
||||
translation_key="mold",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"rssi": SensorEntityDescription(
|
||||
key="rssi",
|
||||
@@ -94,19 +85,16 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"pm1": SensorEntityDescription(
|
||||
key="pm1",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
device_class=SensorDeviceClass.PM1,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"pm25": SensorEntityDescription(
|
||||
key="pm25",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
device_class=SensorDeviceClass.PM25,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@@ -41,7 +41,7 @@
|
||||
}
|
||||
},
|
||||
"enable_motion_recording": {
|
||||
"name": "Enable motion recording",
|
||||
"name": "Enables motion recording",
|
||||
"description": "Enables recording a clip to camera storage when motion is detected.",
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
@@ -51,8 +51,8 @@
|
||||
}
|
||||
},
|
||||
"disable_motion_recording": {
|
||||
"name": "Disable motion recording",
|
||||
"description": "Disables recording a clip to camera storage when motion is detected.",
|
||||
"name": "Disables motion recording",
|
||||
"description": "Disable recording a clip to camera storage when motion is detected.",
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
"name": "[%key:component::amcrest::services::enable_recording::fields::entity_id::name%]",
|
||||
|
||||
@@ -11,7 +11,12 @@ from python_homeassistant_analytics import (
|
||||
from python_homeassistant_analytics.models import IntegrationType
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow
|
||||
from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import (
|
||||
@@ -20,7 +25,6 @@ from homeassistant.helpers.selector import (
|
||||
SelectSelectorConfig,
|
||||
)
|
||||
|
||||
from . import AnalyticsInsightsConfigEntry
|
||||
from .const import (
|
||||
CONF_TRACKED_ADDONS,
|
||||
CONF_TRACKED_CUSTOM_INTEGRATIONS,
|
||||
@@ -42,7 +46,7 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(
|
||||
config_entry: AnalyticsInsightsConfigEntry,
|
||||
config_entry: ConfigEntry,
|
||||
) -> HomeassistantAnalyticsOptionsFlowHandler:
|
||||
"""Get the options flow for this handler."""
|
||||
return HomeassistantAnalyticsOptionsFlowHandler()
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["python_homeassistant_analytics"],
|
||||
"requirements": ["python-homeassistant-analytics==0.8.1"],
|
||||
"requirements": ["python-homeassistant-analytics==0.8.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -1,100 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
docs-high-level-description: todo
|
||||
docs-installation-instructions: todo
|
||||
docs-removal-instructions: todo
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
Entities of this integration does not explicitly subscribe to events.
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide actions.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: todo
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable:
|
||||
status: done
|
||||
comment: |
|
||||
The coordinator handles this.
|
||||
integration-owner: done
|
||||
log-when-unavailable:
|
||||
status: done
|
||||
comment: |
|
||||
The coordinator handles this.
|
||||
parallel-updates: todo
|
||||
reauthentication-flow:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not require authentication.
|
||||
test-coverage: todo
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration is a cloud service and thus does not support discovery.
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration is a cloud service and thus does not support discovery.
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration has a fixed single service.
|
||||
entity-category: done
|
||||
entity-device-class:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have entities with device classes.
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: done
|
||||
reconfiguration-flow:
|
||||
status: exempt
|
||||
comment: All the options of this integration are managed via the options flow
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration doesn't have any cases where raising an issue is needed.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration has a fixed single service.
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: done
|
||||
@@ -21,7 +21,7 @@
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"invalid_unique_id": "Impossible to determine a valid unique ID for the device"
|
||||
"invalid_unique_id": "Impossible to determine a valid unique id for the device"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
@@ -38,17 +38,17 @@
|
||||
}
|
||||
},
|
||||
"apps": {
|
||||
"title": "Configure Android apps",
|
||||
"description": "Configure application ID {app_id}",
|
||||
"title": "Configure Android Apps",
|
||||
"description": "Configure application id {app_id}",
|
||||
"data": {
|
||||
"app_name": "Application name",
|
||||
"app_name": "Application Name",
|
||||
"app_id": "Application ID",
|
||||
"app_delete": "Check to delete this application"
|
||||
}
|
||||
},
|
||||
"rules": {
|
||||
"title": "Configure Android state detection rules",
|
||||
"description": "Configure detection rule for application ID {rule_id}",
|
||||
"description": "Configure detection rule for application id {rule_id}",
|
||||
"data": {
|
||||
"rule_id": "[%key:component::androidtv::options::step::apps::data::app_id%]",
|
||||
"rule_values": "List of state detection rules (see documentation)",
|
||||
|
||||
@@ -156,12 +156,7 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
# and one of them, which could end up being in discovery_info.host, is from a
|
||||
# different device. If any of the discovery_info.ip_addresses matches the
|
||||
# existing host, don't update the host.
|
||||
if (
|
||||
existing_config_entry
|
||||
# Ignored entries don't have host
|
||||
and CONF_HOST in existing_config_entry.data
|
||||
and len(discovery_info.ip_addresses) > 1
|
||||
):
|
||||
if existing_config_entry and len(discovery_info.ip_addresses) > 1:
|
||||
existing_host = existing_config_entry.data[CONF_HOST]
|
||||
if existing_host != self.host:
|
||||
if existing_host in [
|
||||
|
||||
@@ -44,12 +44,12 @@
|
||||
}
|
||||
},
|
||||
"apps": {
|
||||
"title": "Configure Android apps",
|
||||
"description": "Configure application ID {app_id}",
|
||||
"title": "Configure Android Apps",
|
||||
"description": "Configure application id {app_id}",
|
||||
"data": {
|
||||
"app_name": "Application name",
|
||||
"app_name": "Application Name",
|
||||
"app_id": "Application ID",
|
||||
"app_icon": "Application icon",
|
||||
"app_icon": "Application Icon",
|
||||
"app_delete": "Check to delete this application"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -98,6 +98,7 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
VERSION = 1
|
||||
|
||||
scan_filter: str | None = None
|
||||
all_identifiers: set[str]
|
||||
atv: BaseConfig | None = None
|
||||
atv_identifiers: list[str] | None = None
|
||||
_host: str # host in zeroconf discovery info, should not be accessed by other flows
|
||||
@@ -117,7 +118,6 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
def __init__(self) -> None:
|
||||
"""Initialize a new AppleTVConfigFlow."""
|
||||
self.credentials: dict[int, str | None] = {} # Protocol -> credentials
|
||||
self.all_identifiers: set[str] = set()
|
||||
|
||||
@property
|
||||
def device_identifier(self) -> str | None:
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["apprise"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["apprise==1.9.1"]
|
||||
"requirements": ["apprise==1.9.0"]
|
||||
}
|
||||
|
||||
@@ -120,8 +120,6 @@ class AprilaireCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
"""Wait for the client to be ready."""
|
||||
|
||||
if not self.data or Attribute.MAC_ADDRESS not in self.data:
|
||||
await self.client.read_mac_address()
|
||||
|
||||
data = await self.client.wait_for_response(
|
||||
FunctionalDomain.IDENTIFICATION, 2, WAIT_TIMEOUT
|
||||
)
|
||||
@@ -132,9 +130,12 @@ class AprilaireCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
|
||||
return False
|
||||
|
||||
if not self.data or Attribute.THERMOSTAT_MODES not in self.data:
|
||||
await self.client.read_thermostat_iaq_available()
|
||||
if not self.data or Attribute.NAME not in self.data:
|
||||
await self.client.wait_for_response(
|
||||
FunctionalDomain.IDENTIFICATION, 4, WAIT_TIMEOUT
|
||||
)
|
||||
|
||||
if not self.data or Attribute.THERMOSTAT_MODES not in self.data:
|
||||
await self.client.wait_for_response(
|
||||
FunctionalDomain.CONTROL, 7, WAIT_TIMEOUT
|
||||
)
|
||||
@@ -143,16 +144,10 @@ class AprilaireCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
not self.data
|
||||
or Attribute.INDOOR_TEMPERATURE_CONTROLLING_SENSOR_STATUS not in self.data
|
||||
):
|
||||
await self.client.read_sensors()
|
||||
|
||||
await self.client.wait_for_response(
|
||||
FunctionalDomain.SENSORS, 2, WAIT_TIMEOUT
|
||||
)
|
||||
|
||||
await self.client.read_thermostat_status()
|
||||
|
||||
await self.client.read_iaq_status()
|
||||
|
||||
await ready_callback(True)
|
||||
|
||||
return True
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyaprilaire"],
|
||||
"requirements": ["pyaprilaire==0.7.7"]
|
||||
"requirements": ["pyaprilaire==0.7.4"]
|
||||
}
|
||||
|
||||
@@ -29,8 +29,6 @@ class ApSystemsSensorData:
|
||||
class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]):
|
||||
"""Coordinator used for all sensors."""
|
||||
|
||||
device_version: str
|
||||
|
||||
def __init__(self, hass: HomeAssistant, api: APsystemsEZ1M) -> None:
|
||||
"""Initialize my coordinator."""
|
||||
super().__init__(
|
||||
@@ -48,7 +46,6 @@ class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]):
|
||||
raise UpdateFailed from None
|
||||
self.api.max_power = device_info.maxPower
|
||||
self.api.min_power = device_info.minPower
|
||||
self.device_version = device_info.devVer
|
||||
|
||||
async def _async_update_data(self) -> ApSystemsSensorData:
|
||||
try:
|
||||
|
||||
@@ -21,8 +21,7 @@ class ApSystemsEntity(Entity):
|
||||
"""Initialize the APsystems entity."""
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, data.device_id)},
|
||||
serial_number=data.device_id,
|
||||
manufacturer="APsystems",
|
||||
model="EZ1-M",
|
||||
serial_number=data.device_id,
|
||||
sw_version=data.coordinator.device_version.split(" ")[1],
|
||||
)
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from aiohttp import ClientConnectorError
|
||||
|
||||
from homeassistant.components.number import NumberDeviceClass, NumberEntity, NumberMode
|
||||
from homeassistant.const import UnitOfPower
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -22,7 +20,7 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up the sensor platform."""
|
||||
|
||||
add_entities([ApSystemsMaxOutputNumber(config_entry.runtime_data)], True)
|
||||
add_entities([ApSystemsMaxOutputNumber(config_entry.runtime_data)])
|
||||
|
||||
|
||||
class ApSystemsMaxOutputNumber(ApSystemsEntity, NumberEntity):
|
||||
@@ -47,13 +45,7 @@ class ApSystemsMaxOutputNumber(ApSystemsEntity, NumberEntity):
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Set the state with the value fetched from the inverter."""
|
||||
try:
|
||||
status = await self._api.get_max_power()
|
||||
except (TimeoutError, ClientConnectorError):
|
||||
self._attr_available = False
|
||||
else:
|
||||
self._attr_available = True
|
||||
self._attr_native_value = status
|
||||
self._attr_native_value = await self._api.get_max_power()
|
||||
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Set the desired output power."""
|
||||
|
||||
@@ -19,5 +19,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aranet",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["aranet4==2.5.0"]
|
||||
"requirements": ["aranet4==2.4.0"]
|
||||
}
|
||||
|
||||
@@ -22,7 +22,6 @@ from homeassistant.components.sensor import (
|
||||
)
|
||||
from homeassistant.const import (
|
||||
ATTR_MANUFACTURER,
|
||||
ATTR_MODEL,
|
||||
ATTR_NAME,
|
||||
ATTR_SW_VERSION,
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
@@ -143,7 +142,6 @@ def _sensor_device_info_to_hass(
|
||||
if adv.readings and adv.readings.name:
|
||||
hass_device_info[ATTR_NAME] = adv.readings.name
|
||||
hass_device_info[ATTR_MANUFACTURER] = ARANET_MANUFACTURER_NAME
|
||||
hass_device_info[ATTR_MODEL] = adv.readings.type.model
|
||||
if adv.manufacturer_data:
|
||||
hass_device_info[ATTR_SW_VERSION] = str(adv.manufacturer_data.version)
|
||||
return hass_device_info
|
||||
|
||||
@@ -90,7 +90,7 @@ class ArubaDeviceScanner(DeviceScanner):
|
||||
"""Retrieve data from Aruba Access Point and return parsed result."""
|
||||
|
||||
connect = f"ssh {self.username}@{self.host} -o HostKeyAlgorithms=ssh-rsa"
|
||||
ssh: pexpect.spawn[str] = pexpect.spawn(connect, encoding="utf-8")
|
||||
ssh = pexpect.spawn(connect)
|
||||
query = ssh.expect(
|
||||
[
|
||||
"password:",
|
||||
@@ -125,12 +125,12 @@ class ArubaDeviceScanner(DeviceScanner):
|
||||
ssh.expect("#")
|
||||
ssh.sendline("show clients")
|
||||
ssh.expect("#")
|
||||
devices_result = (ssh.before or "").splitlines()
|
||||
devices_result = ssh.before.split(b"\r\n")
|
||||
ssh.sendline("exit")
|
||||
|
||||
devices: dict[str, dict[str, str]] = {}
|
||||
for device in devices_result:
|
||||
if match := _DEVICES_REGEX.search(device):
|
||||
if match := _DEVICES_REGEX.search(device.decode("utf-8")):
|
||||
devices[match.group("ip")] = {
|
||||
"ip": match.group("ip"),
|
||||
"mac": match.group("mac").upper(),
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pexpect", "ptyprocess"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["pexpect==4.9.0"]
|
||||
"requirements": ["pexpect==4.6.0"]
|
||||
}
|
||||
|
||||
@@ -108,7 +108,6 @@ async def async_pipeline_from_audio_stream(
|
||||
device_id: str | None = None,
|
||||
start_stage: PipelineStage = PipelineStage.STT,
|
||||
end_stage: PipelineStage = PipelineStage.TTS,
|
||||
conversation_extra_system_prompt: str | None = None,
|
||||
) -> None:
|
||||
"""Create an audio pipeline from an audio stream.
|
||||
|
||||
@@ -120,7 +119,6 @@ async def async_pipeline_from_audio_stream(
|
||||
stt_metadata=stt_metadata,
|
||||
stt_stream=stt_stream,
|
||||
wake_word_phrase=wake_word_phrase,
|
||||
conversation_extra_system_prompt=conversation_extra_system_prompt,
|
||||
run=PipelineRun(
|
||||
hass,
|
||||
context=context,
|
||||
|
||||
@@ -1010,11 +1010,7 @@ class PipelineRun:
|
||||
self.intent_agent = agent_info.id
|
||||
|
||||
async def recognize_intent(
|
||||
self,
|
||||
intent_input: str,
|
||||
conversation_id: str | None,
|
||||
device_id: str | None,
|
||||
conversation_extra_system_prompt: str | None,
|
||||
self, intent_input: str, conversation_id: str | None, device_id: str | None
|
||||
) -> str:
|
||||
"""Run intent recognition portion of pipeline. Returns text to speak."""
|
||||
if self.intent_agent is None:
|
||||
@@ -1049,7 +1045,6 @@ class PipelineRun:
|
||||
device_id=device_id,
|
||||
language=input_language,
|
||||
agent_id=self.intent_agent,
|
||||
extra_system_prompt=conversation_extra_system_prompt,
|
||||
)
|
||||
processed_locally = self.intent_agent == conversation.HOME_ASSISTANT_AGENT
|
||||
|
||||
@@ -1397,13 +1392,8 @@ class PipelineInput:
|
||||
"""Input for text-to-speech. Required when start_stage = tts."""
|
||||
|
||||
conversation_id: str | None = None
|
||||
"""Identifier for the conversation."""
|
||||
|
||||
conversation_extra_system_prompt: str | None = None
|
||||
"""Extra prompt information for the conversation agent."""
|
||||
|
||||
device_id: str | None = None
|
||||
"""Identifier of the device that is processing the input/output of the pipeline."""
|
||||
|
||||
async def execute(self) -> None:
|
||||
"""Run pipeline."""
|
||||
@@ -1493,7 +1483,6 @@ class PipelineInput:
|
||||
intent_input,
|
||||
self.conversation_id,
|
||||
self.device_id,
|
||||
self.conversation_extra_system_prompt,
|
||||
)
|
||||
if tts_input.strip():
|
||||
current_stage = PipelineStage.TTS
|
||||
|
||||
@@ -75,7 +75,7 @@ class AudioBuffer:
|
||||
class VoiceCommandSegmenter:
|
||||
"""Segments an audio stream into voice commands."""
|
||||
|
||||
speech_seconds: float = 0.3
|
||||
speech_seconds: float = 0.1
|
||||
"""Seconds of speech before voice command has started."""
|
||||
|
||||
command_seconds: float = 1.0
|
||||
|
||||
@@ -31,8 +31,8 @@
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"invalid_unique_id": "Impossible to determine a valid unique ID for the device",
|
||||
"no_unique_id": "A device without a valid unique ID is already configured. Configuration of multiple instances is not possible"
|
||||
"invalid_unique_id": "Impossible to determine a valid unique id for the device",
|
||||
"no_unique_id": "A device without a valid unique id is already configured. Configuration of multiple instance is not possible"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
@@ -42,7 +42,7 @@
|
||||
"consider_home": "Seconds to wait before considering a device away",
|
||||
"track_unknown": "Track unknown / unnamed devices",
|
||||
"interface": "The interface that you want statistics from (e.g. eth0, eth1 etc)",
|
||||
"dnsmasq": "The location of the dnsmasq.leases file in the router",
|
||||
"dnsmasq": "The location in the router of the dnsmasq.leases files",
|
||||
"require_ip": "Devices must have IP (for access point mode)"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -28,5 +28,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pubnub", "yalexs"],
|
||||
"requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.6"]
|
||||
"requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.5"]
|
||||
}
|
||||
|
||||
@@ -5,10 +5,6 @@ from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
# Pre-import backup to avoid it being imported
|
||||
# later when the import executor is busy and delaying
|
||||
# startup
|
||||
from . import backup # noqa: F401
|
||||
from .agent import (
|
||||
BackupAgent,
|
||||
BackupAgentError,
|
||||
@@ -21,10 +17,8 @@ from .manager import (
|
||||
BackupManager,
|
||||
BackupPlatformProtocol,
|
||||
BackupReaderWriter,
|
||||
BackupReaderWriterError,
|
||||
CoreBackupReaderWriter,
|
||||
CreateBackupEvent,
|
||||
IncorrectPasswordError,
|
||||
ManagerBackup,
|
||||
NewBackup,
|
||||
WrittenBackup,
|
||||
@@ -41,10 +35,8 @@ __all__ = [
|
||||
"BackupAgentPlatformProtocol",
|
||||
"BackupPlatformProtocol",
|
||||
"BackupReaderWriter",
|
||||
"BackupReaderWriterError",
|
||||
"CreateBackupEvent",
|
||||
"Folder",
|
||||
"IncorrectPasswordError",
|
||||
"LocalBackupAgent",
|
||||
"NewBackup",
|
||||
"WrittenBackup",
|
||||
|
||||
@@ -17,7 +17,7 @@ from homeassistant.helpers.typing import UNDEFINED, UndefinedType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import LOGGER
|
||||
from .models import BackupManagerError, Folder
|
||||
from .models import Folder
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .manager import BackupManager, ManagerBackup
|
||||
@@ -33,8 +33,8 @@ class StoredBackupConfig(TypedDict):
|
||||
"""Represent the stored backup config."""
|
||||
|
||||
create_backup: StoredCreateBackupConfig
|
||||
last_attempted_automatic_backup: str | None
|
||||
last_completed_automatic_backup: str | None
|
||||
last_attempted_strategy_backup: str | None
|
||||
last_completed_strategy_backup: str | None
|
||||
retention: StoredRetentionConfig
|
||||
schedule: StoredBackupSchedule
|
||||
|
||||
@@ -44,8 +44,8 @@ class BackupConfigData:
|
||||
"""Represent loaded backup config data."""
|
||||
|
||||
create_backup: CreateBackupConfig
|
||||
last_attempted_automatic_backup: datetime | None = None
|
||||
last_completed_automatic_backup: datetime | None = None
|
||||
last_attempted_strategy_backup: datetime | None = None
|
||||
last_completed_strategy_backup: datetime | None = None
|
||||
retention: RetentionConfig
|
||||
schedule: BackupSchedule
|
||||
|
||||
@@ -59,12 +59,12 @@ class BackupConfigData:
|
||||
include_folders = None
|
||||
retention = data["retention"]
|
||||
|
||||
if last_attempted_str := data["last_attempted_automatic_backup"]:
|
||||
if last_attempted_str := data["last_attempted_strategy_backup"]:
|
||||
last_attempted = dt_util.parse_datetime(last_attempted_str)
|
||||
else:
|
||||
last_attempted = None
|
||||
|
||||
if last_attempted_str := data["last_completed_automatic_backup"]:
|
||||
if last_attempted_str := data["last_completed_strategy_backup"]:
|
||||
last_completed = dt_util.parse_datetime(last_attempted_str)
|
||||
else:
|
||||
last_completed = None
|
||||
@@ -79,8 +79,8 @@ class BackupConfigData:
|
||||
name=data["create_backup"]["name"],
|
||||
password=data["create_backup"]["password"],
|
||||
),
|
||||
last_attempted_automatic_backup=last_attempted,
|
||||
last_completed_automatic_backup=last_completed,
|
||||
last_attempted_strategy_backup=last_attempted,
|
||||
last_completed_strategy_backup=last_completed,
|
||||
retention=RetentionConfig(
|
||||
copies=retention["copies"],
|
||||
days=retention["days"],
|
||||
@@ -90,20 +90,20 @@ class BackupConfigData:
|
||||
|
||||
def to_dict(self) -> StoredBackupConfig:
|
||||
"""Convert backup config data to a dict."""
|
||||
if self.last_attempted_automatic_backup:
|
||||
last_attempted = self.last_attempted_automatic_backup.isoformat()
|
||||
if self.last_attempted_strategy_backup:
|
||||
last_attempted = self.last_attempted_strategy_backup.isoformat()
|
||||
else:
|
||||
last_attempted = None
|
||||
|
||||
if self.last_completed_automatic_backup:
|
||||
last_completed = self.last_completed_automatic_backup.isoformat()
|
||||
if self.last_completed_strategy_backup:
|
||||
last_completed = self.last_completed_strategy_backup.isoformat()
|
||||
else:
|
||||
last_completed = None
|
||||
|
||||
return StoredBackupConfig(
|
||||
create_backup=self.create_backup.to_dict(),
|
||||
last_attempted_automatic_backup=last_attempted,
|
||||
last_completed_automatic_backup=last_completed,
|
||||
last_attempted_strategy_backup=last_attempted,
|
||||
last_completed_strategy_backup=last_completed,
|
||||
retention=self.retention.to_dict(),
|
||||
schedule=self.schedule.to_dict(),
|
||||
)
|
||||
@@ -124,7 +124,6 @@ class BackupConfig:
|
||||
def load(self, stored_config: StoredBackupConfig) -> None:
|
||||
"""Load config."""
|
||||
self.data = BackupConfigData.from_dict(stored_config)
|
||||
self.data.retention.apply(self._manager)
|
||||
self.data.schedule.apply(self._manager)
|
||||
|
||||
async def update(
|
||||
@@ -161,13 +160,8 @@ class RetentionConfig:
|
||||
def apply(self, manager: BackupManager) -> None:
|
||||
"""Apply backup retention configuration."""
|
||||
if self.days is not None:
|
||||
LOGGER.debug(
|
||||
"Scheduling next automatic delete of backups older than %s in 1 day",
|
||||
self.days,
|
||||
)
|
||||
self._schedule_next(manager)
|
||||
else:
|
||||
LOGGER.debug("Unscheduling next automatic delete")
|
||||
self._unschedule_next(manager)
|
||||
|
||||
def to_dict(self) -> StoredRetentionConfig:
|
||||
@@ -292,7 +286,7 @@ class BackupSchedule:
|
||||
self._unschedule_next(manager)
|
||||
now = dt_util.now()
|
||||
if (cron_event := self.cron_event) is None:
|
||||
seed_time = manager.config.data.last_completed_automatic_backup or now
|
||||
seed_time = manager.config.data.last_completed_strategy_backup or now
|
||||
cron_event = self.cron_event = CronSim(cron_pattern, seed_time)
|
||||
next_time = next(cron_event)
|
||||
|
||||
@@ -322,11 +316,11 @@ class BackupSchedule:
|
||||
include_homeassistant=True, # always include HA
|
||||
name=config_data.create_backup.name,
|
||||
password=config_data.create_backup.password,
|
||||
with_automatic_settings=True,
|
||||
with_strategy_settings=True,
|
||||
)
|
||||
except BackupManagerError as err:
|
||||
LOGGER.error("Error creating backup: %s", err)
|
||||
except Exception: # noqa: BLE001
|
||||
# another more specific exception will be added
|
||||
# and handled in the future
|
||||
LOGGER.exception("Unexpected error creating automatic backup")
|
||||
|
||||
manager.remove_next_backup_event = async_track_point_in_time(
|
||||
@@ -410,14 +404,14 @@ async def _delete_filtered_backups(
|
||||
get_agent_errors,
|
||||
)
|
||||
|
||||
# only delete backups that are created with the saved automatic settings
|
||||
# only delete backups that are created by the backup strategy
|
||||
backups = {
|
||||
backup_id: backup
|
||||
for backup_id, backup in backups.items()
|
||||
if backup.with_automatic_settings
|
||||
if backup.with_strategy_settings
|
||||
}
|
||||
|
||||
LOGGER.debug("Total automatic backups: %s", backups)
|
||||
LOGGER.debug("Total strategy backups: %s", backups)
|
||||
|
||||
filtered_backups = backup_filter(backups)
|
||||
|
||||
@@ -473,7 +467,7 @@ async def delete_backups_exceeding_configured_count(manager: BackupManager) -> N
|
||||
sorted(
|
||||
backups.items(),
|
||||
key=lambda backup_item: backup_item[1].date,
|
||||
)[: max(len(backups) - manager.config.data.retention.copies, 0)]
|
||||
)[: len(backups) - manager.config.data.retention.copies]
|
||||
)
|
||||
|
||||
await _delete_filtered_backups(manager, _backups_filter)
|
||||
|
||||
@@ -23,11 +23,7 @@ from homeassistant.backup_restore import RESTORE_BACKUP_FILE, password_to_key
|
||||
from homeassistant.const import __version__ as HAVERSION
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import (
|
||||
instance_id,
|
||||
integration_platform,
|
||||
issue_registry as ir,
|
||||
)
|
||||
from homeassistant.helpers import integration_platform
|
||||
from homeassistant.helpers.json import json_bytes
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
@@ -46,9 +42,9 @@ from .const import (
|
||||
EXCLUDE_FROM_BACKUP,
|
||||
LOGGER,
|
||||
)
|
||||
from .models import AgentBackup, BackupManagerError, Folder
|
||||
from .models import AgentBackup, Folder
|
||||
from .store import BackupStore
|
||||
from .util import make_backup_dir, read_backup, validate_password
|
||||
from .util import make_backup_dir, read_backup
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True, slots=True)
|
||||
@@ -64,7 +60,7 @@ class ManagerBackup(AgentBackup):
|
||||
|
||||
agent_ids: list[str]
|
||||
failed_agent_ids: list[str]
|
||||
with_automatic_settings: bool | None
|
||||
with_strategy_settings: bool
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True, slots=True)
|
||||
@@ -204,7 +200,6 @@ class BackupReaderWriter(abc.ABC):
|
||||
*,
|
||||
agent_ids: list[str],
|
||||
backup_name: str,
|
||||
extra_metadata: dict[str, bool | str],
|
||||
include_addons: list[str] | None,
|
||||
include_all_addons: bool,
|
||||
include_database: bool,
|
||||
@@ -241,14 +236,6 @@ class BackupReaderWriter(abc.ABC):
|
||||
"""Restore a backup."""
|
||||
|
||||
|
||||
class BackupReaderWriterError(HomeAssistantError):
|
||||
"""Backup reader/writer error."""
|
||||
|
||||
|
||||
class IncorrectPasswordError(BackupReaderWriterError):
|
||||
"""Raised when the password is incorrect."""
|
||||
|
||||
|
||||
class BackupManager:
|
||||
"""Define the format that backup managers can have."""
|
||||
|
||||
@@ -377,9 +364,7 @@ class BackupManager:
|
||||
)
|
||||
for result in pre_backup_results:
|
||||
if isinstance(result, Exception):
|
||||
raise BackupManagerError(
|
||||
f"Error during pre-backup: {result}"
|
||||
) from result
|
||||
raise result
|
||||
|
||||
async def async_post_backup_actions(self) -> None:
|
||||
"""Perform post backup actions."""
|
||||
@@ -392,9 +377,7 @@ class BackupManager:
|
||||
)
|
||||
for result in post_backup_results:
|
||||
if isinstance(result, Exception):
|
||||
raise BackupManagerError(
|
||||
f"Error during post-backup: {result}"
|
||||
) from result
|
||||
raise result
|
||||
|
||||
async def load_platforms(self) -> None:
|
||||
"""Load backup platforms."""
|
||||
@@ -430,22 +413,11 @@ class BackupManager:
|
||||
return_exceptions=True,
|
||||
)
|
||||
for idx, result in enumerate(sync_backup_results):
|
||||
if isinstance(result, BackupReaderWriterError):
|
||||
# writer errors will affect all agents
|
||||
# no point in continuing
|
||||
raise BackupManagerError(str(result)) from result
|
||||
if isinstance(result, BackupAgentError):
|
||||
LOGGER.error("Error uploading to %s: %s", agent_ids[idx], result)
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
continue
|
||||
if isinstance(result, Exception):
|
||||
# trap bugs from agents
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
LOGGER.error("Unexpected error: %s", result, exc_info=result)
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result
|
||||
|
||||
LOGGER.exception(
|
||||
"Error during backup upload - %s", result, exc_info=result
|
||||
)
|
||||
return agent_errors
|
||||
|
||||
async def async_get_backups(
|
||||
@@ -468,23 +440,21 @@ class BackupManager:
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result # unexpected error
|
||||
raise result
|
||||
for agent_backup in result:
|
||||
if (backup_id := agent_backup.backup_id) not in backups:
|
||||
if known_backup := self.known_backups.get(backup_id):
|
||||
failed_agent_ids = known_backup.failed_agent_ids
|
||||
with_strategy_settings = known_backup.with_strategy_settings
|
||||
else:
|
||||
failed_agent_ids = []
|
||||
with_automatic_settings = self.is_our_automatic_backup(
|
||||
agent_backup, await instance_id.async_get(self.hass)
|
||||
)
|
||||
with_strategy_settings = False
|
||||
backups[backup_id] = ManagerBackup(
|
||||
agent_ids=[],
|
||||
addons=agent_backup.addons,
|
||||
backup_id=backup_id,
|
||||
date=agent_backup.date,
|
||||
database_included=agent_backup.database_included,
|
||||
extra_metadata=agent_backup.extra_metadata,
|
||||
failed_agent_ids=failed_agent_ids,
|
||||
folders=agent_backup.folders,
|
||||
homeassistant_included=agent_backup.homeassistant_included,
|
||||
@@ -492,7 +462,7 @@ class BackupManager:
|
||||
name=agent_backup.name,
|
||||
protected=agent_backup.protected,
|
||||
size=agent_backup.size,
|
||||
with_automatic_settings=with_automatic_settings,
|
||||
with_strategy_settings=with_strategy_settings,
|
||||
)
|
||||
backups[backup_id].agent_ids.append(agent_ids[idx])
|
||||
|
||||
@@ -518,24 +488,22 @@ class BackupManager:
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result # unexpected error
|
||||
raise result
|
||||
if not result:
|
||||
continue
|
||||
if backup is None:
|
||||
if known_backup := self.known_backups.get(backup_id):
|
||||
failed_agent_ids = known_backup.failed_agent_ids
|
||||
with_strategy_settings = known_backup.with_strategy_settings
|
||||
else:
|
||||
failed_agent_ids = []
|
||||
with_automatic_settings = self.is_our_automatic_backup(
|
||||
result, await instance_id.async_get(self.hass)
|
||||
)
|
||||
with_strategy_settings = False
|
||||
backup = ManagerBackup(
|
||||
agent_ids=[],
|
||||
addons=result.addons,
|
||||
backup_id=result.backup_id,
|
||||
date=result.date,
|
||||
database_included=result.database_included,
|
||||
extra_metadata=result.extra_metadata,
|
||||
failed_agent_ids=failed_agent_ids,
|
||||
folders=result.folders,
|
||||
homeassistant_included=result.homeassistant_included,
|
||||
@@ -543,28 +511,12 @@ class BackupManager:
|
||||
name=result.name,
|
||||
protected=result.protected,
|
||||
size=result.size,
|
||||
with_automatic_settings=with_automatic_settings,
|
||||
with_strategy_settings=with_strategy_settings,
|
||||
)
|
||||
backup.agent_ids.append(agent_ids[idx])
|
||||
|
||||
return (backup, agent_errors)
|
||||
|
||||
@staticmethod
|
||||
def is_our_automatic_backup(
|
||||
backup: AgentBackup, our_instance_id: str
|
||||
) -> bool | None:
|
||||
"""Check if a backup was created by us and return automatic_settings flag.
|
||||
|
||||
Returns `None` if the backup was not created by us, or if the
|
||||
automatic_settings flag is not a boolean.
|
||||
"""
|
||||
if backup.extra_metadata.get("instance_id") != our_instance_id:
|
||||
return None
|
||||
with_automatic_settings = backup.extra_metadata.get("with_automatic_settings")
|
||||
if not isinstance(with_automatic_settings, bool):
|
||||
return None
|
||||
return with_automatic_settings
|
||||
|
||||
async def async_delete_backup(self, backup_id: str) -> dict[str, Exception]:
|
||||
"""Delete a backup."""
|
||||
agent_errors: dict[str, Exception] = {}
|
||||
@@ -582,7 +534,7 @@ class BackupManager:
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result # unexpected error
|
||||
raise result
|
||||
|
||||
if not agent_errors:
|
||||
self.known_backups.remove(backup_id)
|
||||
@@ -597,7 +549,7 @@ class BackupManager:
|
||||
) -> None:
|
||||
"""Receive and store a backup file from upload."""
|
||||
if self.state is not BackupManagerState.IDLE:
|
||||
raise BackupManagerError(f"Backup manager busy: {self.state}")
|
||||
raise HomeAssistantError(f"Backup manager busy: {self.state}")
|
||||
self.async_on_backup_event(
|
||||
ReceiveBackupEvent(stage=None, state=ReceiveBackupState.IN_PROGRESS)
|
||||
)
|
||||
@@ -646,7 +598,7 @@ class BackupManager:
|
||||
open_stream=written_backup.open_stream,
|
||||
)
|
||||
await written_backup.release_stream()
|
||||
self.known_backups.add(written_backup.backup, agent_errors)
|
||||
self.known_backups.add(written_backup.backup, agent_errors, False)
|
||||
|
||||
async def async_create_backup(
|
||||
self,
|
||||
@@ -659,7 +611,7 @@ class BackupManager:
|
||||
include_homeassistant: bool,
|
||||
name: str | None,
|
||||
password: str | None,
|
||||
with_automatic_settings: bool = False,
|
||||
with_strategy_settings: bool = False,
|
||||
) -> NewBackup:
|
||||
"""Create a backup."""
|
||||
new_backup = await self.async_initiate_backup(
|
||||
@@ -671,8 +623,7 @@ class BackupManager:
|
||||
include_homeassistant=include_homeassistant,
|
||||
name=name,
|
||||
password=password,
|
||||
raise_task_error=True,
|
||||
with_automatic_settings=with_automatic_settings,
|
||||
with_strategy_settings=with_strategy_settings,
|
||||
)
|
||||
assert self._backup_finish_task
|
||||
await self._backup_finish_task
|
||||
@@ -689,15 +640,14 @@ class BackupManager:
|
||||
include_homeassistant: bool,
|
||||
name: str | None,
|
||||
password: str | None,
|
||||
raise_task_error: bool = False,
|
||||
with_automatic_settings: bool = False,
|
||||
with_strategy_settings: bool = False,
|
||||
) -> NewBackup:
|
||||
"""Initiate generating a backup."""
|
||||
if self.state is not BackupManagerState.IDLE:
|
||||
raise BackupManagerError(f"Backup manager busy: {self.state}")
|
||||
raise HomeAssistantError(f"Backup manager busy: {self.state}")
|
||||
|
||||
if with_automatic_settings:
|
||||
self.config.data.last_attempted_automatic_backup = dt_util.now()
|
||||
if with_strategy_settings:
|
||||
self.config.data.last_attempted_strategy_backup = dt_util.now()
|
||||
self.store.save()
|
||||
|
||||
self.async_on_backup_event(
|
||||
@@ -713,16 +663,13 @@ class BackupManager:
|
||||
include_homeassistant=include_homeassistant,
|
||||
name=name,
|
||||
password=password,
|
||||
raise_task_error=raise_task_error,
|
||||
with_automatic_settings=with_automatic_settings,
|
||||
with_strategy_settings=with_strategy_settings,
|
||||
)
|
||||
except Exception:
|
||||
self.async_on_backup_event(
|
||||
CreateBackupEvent(stage=None, state=CreateBackupState.FAILED)
|
||||
)
|
||||
self.async_on_backup_event(IdleEvent())
|
||||
if with_automatic_settings:
|
||||
self._update_issue_backup_failed()
|
||||
raise
|
||||
|
||||
async def _async_create_backup(
|
||||
@@ -736,81 +683,48 @@ class BackupManager:
|
||||
include_homeassistant: bool,
|
||||
name: str | None,
|
||||
password: str | None,
|
||||
raise_task_error: bool,
|
||||
with_automatic_settings: bool,
|
||||
with_strategy_settings: bool,
|
||||
) -> NewBackup:
|
||||
"""Initiate generating a backup."""
|
||||
if not agent_ids:
|
||||
raise BackupManagerError("At least one agent must be selected")
|
||||
if invalid_agents := [
|
||||
agent_id for agent_id in agent_ids if agent_id not in self.backup_agents
|
||||
]:
|
||||
raise BackupManagerError(f"Invalid agents selected: {invalid_agents}")
|
||||
raise HomeAssistantError("At least one agent must be selected")
|
||||
if any(agent_id not in self.backup_agents for agent_id in agent_ids):
|
||||
raise HomeAssistantError("Invalid agent selected")
|
||||
if include_all_addons and include_addons:
|
||||
raise BackupManagerError(
|
||||
raise HomeAssistantError(
|
||||
"Cannot include all addons and specify specific addons"
|
||||
)
|
||||
|
||||
backup_name = (
|
||||
name
|
||||
or f"{"Automatic" if with_automatic_settings else "Custom"} backup {HAVERSION}"
|
||||
backup_name = name or f"Core {HAVERSION}"
|
||||
new_backup, self._backup_task = await self._reader_writer.async_create_backup(
|
||||
agent_ids=agent_ids,
|
||||
backup_name=backup_name,
|
||||
include_addons=include_addons,
|
||||
include_all_addons=include_all_addons,
|
||||
include_database=include_database,
|
||||
include_folders=include_folders,
|
||||
include_homeassistant=include_homeassistant,
|
||||
on_progress=self.async_on_backup_event,
|
||||
password=password,
|
||||
)
|
||||
|
||||
try:
|
||||
(
|
||||
new_backup,
|
||||
self._backup_task,
|
||||
) = await self._reader_writer.async_create_backup(
|
||||
agent_ids=agent_ids,
|
||||
backup_name=backup_name,
|
||||
extra_metadata={
|
||||
"instance_id": await instance_id.async_get(self.hass),
|
||||
"with_automatic_settings": with_automatic_settings,
|
||||
},
|
||||
include_addons=include_addons,
|
||||
include_all_addons=include_all_addons,
|
||||
include_database=include_database,
|
||||
include_folders=include_folders,
|
||||
include_homeassistant=include_homeassistant,
|
||||
on_progress=self.async_on_backup_event,
|
||||
password=password,
|
||||
)
|
||||
except BackupReaderWriterError as err:
|
||||
raise BackupManagerError(str(err)) from err
|
||||
|
||||
backup_finish_task = self._backup_finish_task = self.hass.async_create_task(
|
||||
self._async_finish_backup(agent_ids, with_automatic_settings),
|
||||
self._backup_finish_task = self.hass.async_create_task(
|
||||
self._async_finish_backup(agent_ids, with_strategy_settings),
|
||||
name="backup_manager_finish_backup",
|
||||
)
|
||||
if not raise_task_error:
|
||||
|
||||
def log_finish_task_error(task: asyncio.Task[None]) -> None:
|
||||
if task.done() and not task.cancelled() and (err := task.exception()):
|
||||
if isinstance(err, BackupManagerError):
|
||||
LOGGER.error("Error creating backup: %s", err)
|
||||
else:
|
||||
LOGGER.error("Unexpected error: %s", err, exc_info=err)
|
||||
|
||||
backup_finish_task.add_done_callback(log_finish_task_error)
|
||||
|
||||
return new_backup
|
||||
|
||||
async def _async_finish_backup(
|
||||
self, agent_ids: list[str], with_automatic_settings: bool
|
||||
self, agent_ids: list[str], with_strategy_settings: bool
|
||||
) -> None:
|
||||
"""Finish a backup."""
|
||||
if TYPE_CHECKING:
|
||||
assert self._backup_task is not None
|
||||
backup_success = False
|
||||
try:
|
||||
written_backup = await self._backup_task
|
||||
except Exception as err:
|
||||
if with_automatic_settings:
|
||||
self._update_issue_backup_failed()
|
||||
|
||||
if isinstance(err, BackupReaderWriterError):
|
||||
raise BackupManagerError(str(err)) from err
|
||||
raise # unexpected error
|
||||
except Exception as err: # noqa: BLE001
|
||||
LOGGER.debug("Generating backup failed", exc_info=err)
|
||||
self.async_on_backup_event(
|
||||
CreateBackupEvent(stage=None, state=CreateBackupState.FAILED)
|
||||
)
|
||||
else:
|
||||
LOGGER.debug(
|
||||
"Generated new backup with backup_id %s, uploading to agents %s",
|
||||
@@ -823,40 +737,29 @@ class BackupManager:
|
||||
state=CreateBackupState.IN_PROGRESS,
|
||||
)
|
||||
)
|
||||
agent_errors = await self._async_upload_backup(
|
||||
backup=written_backup.backup,
|
||||
agent_ids=agent_ids,
|
||||
open_stream=written_backup.open_stream,
|
||||
)
|
||||
await written_backup.release_stream()
|
||||
if with_strategy_settings:
|
||||
# create backup was successful, update last_completed_strategy_backup
|
||||
self.config.data.last_completed_strategy_backup = dt_util.now()
|
||||
self.store.save()
|
||||
self.known_backups.add(
|
||||
written_backup.backup, agent_errors, with_strategy_settings
|
||||
)
|
||||
|
||||
try:
|
||||
agent_errors = await self._async_upload_backup(
|
||||
backup=written_backup.backup,
|
||||
agent_ids=agent_ids,
|
||||
open_stream=written_backup.open_stream,
|
||||
)
|
||||
finally:
|
||||
await written_backup.release_stream()
|
||||
self.known_backups.add(written_backup.backup, agent_errors)
|
||||
if not agent_errors:
|
||||
if with_automatic_settings:
|
||||
# create backup was successful, update last_completed_automatic_backup
|
||||
self.config.data.last_completed_automatic_backup = dt_util.now()
|
||||
self.store.save()
|
||||
backup_success = True
|
||||
|
||||
if with_automatic_settings:
|
||||
self._update_issue_after_agent_upload(agent_errors)
|
||||
# delete old backups more numerous than copies
|
||||
# try this regardless of agent errors above
|
||||
await delete_backups_exceeding_configured_count(self)
|
||||
|
||||
self.async_on_backup_event(
|
||||
CreateBackupEvent(stage=None, state=CreateBackupState.COMPLETED)
|
||||
)
|
||||
finally:
|
||||
self._backup_task = None
|
||||
self._backup_finish_task = None
|
||||
self.async_on_backup_event(
|
||||
CreateBackupEvent(
|
||||
stage=None,
|
||||
state=CreateBackupState.COMPLETED
|
||||
if backup_success
|
||||
else CreateBackupState.FAILED,
|
||||
)
|
||||
)
|
||||
self.async_on_backup_event(IdleEvent())
|
||||
|
||||
async def async_restore_backup(
|
||||
@@ -872,7 +775,7 @@ class BackupManager:
|
||||
) -> None:
|
||||
"""Initiate restoring a backup."""
|
||||
if self.state is not BackupManagerState.IDLE:
|
||||
raise BackupManagerError(f"Backup manager busy: {self.state}")
|
||||
raise HomeAssistantError(f"Backup manager busy: {self.state}")
|
||||
|
||||
self.async_on_backup_event(
|
||||
RestoreBackupEvent(stage=None, state=RestoreBackupState.IN_PROGRESS)
|
||||
@@ -887,9 +790,6 @@ class BackupManager:
|
||||
restore_folders=restore_folders,
|
||||
restore_homeassistant=restore_homeassistant,
|
||||
)
|
||||
self.async_on_backup_event(
|
||||
RestoreBackupEvent(stage=None, state=RestoreBackupState.COMPLETED)
|
||||
)
|
||||
except Exception:
|
||||
self.async_on_backup_event(
|
||||
RestoreBackupEvent(stage=None, state=RestoreBackupState.FAILED)
|
||||
@@ -912,7 +812,7 @@ class BackupManager:
|
||||
"""Initiate restoring a backup."""
|
||||
agent = self.backup_agents[agent_id]
|
||||
if not await agent.async_get_backup(backup_id):
|
||||
raise BackupManagerError(
|
||||
raise HomeAssistantError(
|
||||
f"Backup {backup_id} not found in agent {agent_id}"
|
||||
)
|
||||
|
||||
@@ -955,38 +855,6 @@ class BackupManager:
|
||||
self._backup_event_subscriptions.append(on_event)
|
||||
return remove_subscription
|
||||
|
||||
def _update_issue_backup_failed(self) -> None:
|
||||
"""Update issue registry when a backup fails."""
|
||||
ir.async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
"automatic_backup_failed",
|
||||
is_fixable=False,
|
||||
is_persistent=True,
|
||||
learn_more_url="homeassistant://config/backup",
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="automatic_backup_failed_create",
|
||||
)
|
||||
|
||||
def _update_issue_after_agent_upload(
|
||||
self, agent_errors: dict[str, Exception]
|
||||
) -> None:
|
||||
"""Update issue registry after a backup is uploaded to agents."""
|
||||
if not agent_errors:
|
||||
ir.async_delete_issue(self.hass, DOMAIN, "automatic_backup_failed")
|
||||
return
|
||||
ir.async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
"automatic_backup_failed",
|
||||
is_fixable=False,
|
||||
is_persistent=True,
|
||||
learn_more_url="homeassistant://config/backup",
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="automatic_backup_failed_upload_agents",
|
||||
translation_placeholders={"failed_agents": ", ".join(agent_errors)},
|
||||
)
|
||||
|
||||
|
||||
class KnownBackups:
|
||||
"""Track known backups."""
|
||||
@@ -1002,6 +870,7 @@ class KnownBackups:
|
||||
backup["backup_id"]: KnownBackup(
|
||||
backup_id=backup["backup_id"],
|
||||
failed_agent_ids=backup["failed_agent_ids"],
|
||||
with_strategy_settings=backup["with_strategy_settings"],
|
||||
)
|
||||
for backup in stored_backups
|
||||
}
|
||||
@@ -1014,11 +883,13 @@ class KnownBackups:
|
||||
self,
|
||||
backup: AgentBackup,
|
||||
agent_errors: dict[str, Exception],
|
||||
with_strategy_settings: bool,
|
||||
) -> None:
|
||||
"""Add a backup."""
|
||||
self._backups[backup.backup_id] = KnownBackup(
|
||||
backup_id=backup.backup_id,
|
||||
failed_agent_ids=list(agent_errors),
|
||||
with_strategy_settings=with_strategy_settings,
|
||||
)
|
||||
self._manager.store.save()
|
||||
|
||||
@@ -1040,12 +911,14 @@ class KnownBackup:
|
||||
|
||||
backup_id: str
|
||||
failed_agent_ids: list[str]
|
||||
with_strategy_settings: bool
|
||||
|
||||
def to_dict(self) -> StoredKnownBackup:
|
||||
"""Convert known backup to a dict."""
|
||||
return {
|
||||
"backup_id": self.backup_id,
|
||||
"failed_agent_ids": self.failed_agent_ids,
|
||||
"with_strategy_settings": self.with_strategy_settings,
|
||||
}
|
||||
|
||||
|
||||
@@ -1054,6 +927,7 @@ class StoredKnownBackup(TypedDict):
|
||||
|
||||
backup_id: str
|
||||
failed_agent_ids: list[str]
|
||||
with_strategy_settings: bool
|
||||
|
||||
|
||||
class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
@@ -1071,7 +945,6 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
*,
|
||||
agent_ids: list[str],
|
||||
backup_name: str,
|
||||
extra_metadata: dict[str, bool | str],
|
||||
include_addons: list[str] | None,
|
||||
include_all_addons: bool,
|
||||
include_database: bool,
|
||||
@@ -1085,18 +958,17 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
backup_id = _generate_backup_id(date_str, backup_name)
|
||||
|
||||
if include_addons or include_all_addons or include_folders:
|
||||
raise BackupReaderWriterError(
|
||||
raise HomeAssistantError(
|
||||
"Addons and folders are not supported by core backup"
|
||||
)
|
||||
if not include_homeassistant:
|
||||
raise BackupReaderWriterError("Home Assistant must be included in backup")
|
||||
raise HomeAssistantError("Home Assistant must be included in backup")
|
||||
|
||||
backup_task = self._hass.async_create_task(
|
||||
self._async_create_backup(
|
||||
agent_ids=agent_ids,
|
||||
backup_id=backup_id,
|
||||
backup_name=backup_name,
|
||||
extra_metadata=extra_metadata,
|
||||
include_database=include_database,
|
||||
date_str=date_str,
|
||||
on_progress=on_progress,
|
||||
@@ -1115,7 +987,6 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
backup_id: str,
|
||||
backup_name: str,
|
||||
date_str: str,
|
||||
extra_metadata: dict[str, bool | str],
|
||||
include_database: bool,
|
||||
on_progress: Callable[[ManagerStateEvent], None],
|
||||
password: str | None,
|
||||
@@ -1141,7 +1012,6 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
backup_data = {
|
||||
"compressed": True,
|
||||
"date": date_str,
|
||||
"extra": extra_metadata,
|
||||
"homeassistant": {
|
||||
"exclude_database": not include_database,
|
||||
"version": HAVERSION,
|
||||
@@ -1160,19 +1030,11 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
password,
|
||||
local_agent_tar_file_path,
|
||||
)
|
||||
except (BackupManagerError, OSError, tarfile.TarError, ValueError) as err:
|
||||
# BackupManagerError from async_pre_backup_actions
|
||||
# OSError from file operations
|
||||
# TarError from tarfile
|
||||
# ValueError from json_bytes
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
else:
|
||||
backup = AgentBackup(
|
||||
addons=[],
|
||||
backup_id=backup_id,
|
||||
database_included=include_database,
|
||||
date=date_str,
|
||||
extra_metadata=extra_metadata,
|
||||
folders=[],
|
||||
homeassistant_included=True,
|
||||
homeassistant_version=HAVERSION,
|
||||
@@ -1184,15 +1046,12 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
async_add_executor_job = self._hass.async_add_executor_job
|
||||
|
||||
async def send_backup() -> AsyncIterator[bytes]:
|
||||
f = await async_add_executor_job(tar_file_path.open, "rb")
|
||||
try:
|
||||
f = await async_add_executor_job(tar_file_path.open, "rb")
|
||||
try:
|
||||
while chunk := await async_add_executor_job(f.read, 2**20):
|
||||
yield chunk
|
||||
finally:
|
||||
await async_add_executor_job(f.close)
|
||||
except OSError as err:
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
while chunk := await async_add_executor_job(f.read, 2**20):
|
||||
yield chunk
|
||||
finally:
|
||||
await async_add_executor_job(f.close)
|
||||
|
||||
async def open_backup() -> AsyncIterator[bytes]:
|
||||
return send_backup()
|
||||
@@ -1200,20 +1059,14 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
async def remove_backup() -> None:
|
||||
if local_agent_tar_file_path:
|
||||
return
|
||||
try:
|
||||
await async_add_executor_job(tar_file_path.unlink, True)
|
||||
except OSError as err:
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
await async_add_executor_job(tar_file_path.unlink, True)
|
||||
|
||||
return WrittenBackup(
|
||||
backup=backup, open_stream=open_backup, release_stream=remove_backup
|
||||
)
|
||||
finally:
|
||||
# Inform integrations the backup is done
|
||||
try:
|
||||
await manager.async_post_backup_actions()
|
||||
except BackupManagerError as err:
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
await manager.async_post_backup_actions()
|
||||
|
||||
def _mkdir_and_generate_backup_contents(
|
||||
self,
|
||||
@@ -1283,7 +1136,6 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
if self._local_agent_id in agent_ids:
|
||||
local_agent = manager.local_backup_agents[self._local_agent_id]
|
||||
tar_file_path = local_agent.get_backup_path(backup.backup_id)
|
||||
await async_add_executor_job(make_backup_dir, tar_file_path.parent)
|
||||
await async_add_executor_job(shutil.move, temp_file, tar_file_path)
|
||||
else:
|
||||
tar_file_path = temp_file
|
||||
@@ -1327,11 +1179,11 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
"""
|
||||
|
||||
if restore_addons or restore_folders:
|
||||
raise BackupReaderWriterError(
|
||||
raise HomeAssistantError(
|
||||
"Addons and folders are not supported in core restore"
|
||||
)
|
||||
if not restore_homeassistant and not restore_database:
|
||||
raise BackupReaderWriterError(
|
||||
raise HomeAssistantError(
|
||||
"Home Assistant or database must be included in restore"
|
||||
)
|
||||
|
||||
@@ -1354,12 +1206,6 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
|
||||
remove_after_restore = True
|
||||
|
||||
password_valid = await self._hass.async_add_executor_job(
|
||||
validate_password, path, password
|
||||
)
|
||||
if not password_valid:
|
||||
raise IncorrectPasswordError("The password provided is incorrect.")
|
||||
|
||||
def _write_restore_file() -> None:
|
||||
"""Write the restore file."""
|
||||
Path(self._hass.config.path(RESTORE_BACKUP_FILE)).write_text(
|
||||
@@ -1376,7 +1222,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
)
|
||||
|
||||
await self._hass.async_add_executor_job(_write_restore_file)
|
||||
await self._hass.services.async_call("homeassistant", "restart", blocking=True)
|
||||
await self._hass.services.async_call("homeassistant", "restart", {})
|
||||
|
||||
|
||||
def _generate_backup_id(date: str, name: str) -> str:
|
||||
|
||||
@@ -6,8 +6,6 @@ from dataclasses import asdict, dataclass
|
||||
from enum import StrEnum
|
||||
from typing import Any, Self
|
||||
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AddonInfo:
|
||||
@@ -35,7 +33,6 @@ class AgentBackup:
|
||||
backup_id: str
|
||||
date: str
|
||||
database_included: bool
|
||||
extra_metadata: dict[str, bool | str]
|
||||
folders: list[Folder]
|
||||
homeassistant_included: bool
|
||||
homeassistant_version: str | None # None if homeassistant_included is False
|
||||
@@ -47,12 +44,6 @@ class AgentBackup:
|
||||
"""Return a dict representation of this backup."""
|
||||
return asdict(self)
|
||||
|
||||
def as_frontend_json(self) -> dict:
|
||||
"""Return a dict representation of this backup for sending to frontend."""
|
||||
return {
|
||||
key: val for key, val in asdict(self).items() if key != "extra_metadata"
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict[str, Any]) -> Self:
|
||||
"""Create an instance from a JSON serialization."""
|
||||
@@ -61,7 +52,6 @@ class AgentBackup:
|
||||
backup_id=data["backup_id"],
|
||||
date=data["date"],
|
||||
database_included=data["database_included"],
|
||||
extra_metadata=data["extra_metadata"],
|
||||
folders=[Folder(folder) for folder in data["folders"]],
|
||||
homeassistant_included=data["homeassistant_included"],
|
||||
homeassistant_version=data["homeassistant_version"],
|
||||
@@ -69,7 +59,3 @@ class AgentBackup:
|
||||
protected=data["protected"],
|
||||
size=data["size"],
|
||||
)
|
||||
|
||||
|
||||
class BackupManagerError(HomeAssistantError):
|
||||
"""Backup manager error."""
|
||||
|
||||
@@ -1,14 +1,4 @@
|
||||
{
|
||||
"issues": {
|
||||
"automatic_backup_failed_create": {
|
||||
"title": "Automatic backup could not be created",
|
||||
"description": "The automatic backup could not be created. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
},
|
||||
"automatic_backup_failed_upload_agents": {
|
||||
"title": "Automatic backup could not be uploaded to the configured locations",
|
||||
"description": "The automatic backup could not be uploaded to the configured locations {failed_agents}. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"create": {
|
||||
"name": "Create backup",
|
||||
|
||||
@@ -9,13 +9,11 @@ import tarfile
|
||||
from typing import cast
|
||||
|
||||
import aiohttp
|
||||
from securetar import SecureTarFile
|
||||
|
||||
from homeassistant.backup_restore import password_to_key
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.util.json import JsonObjectType, json_loads_object
|
||||
|
||||
from .const import BUF_SIZE, LOGGER
|
||||
from .const import BUF_SIZE
|
||||
from .models import AddonInfo, AgentBackup, Folder
|
||||
|
||||
|
||||
@@ -52,7 +50,6 @@ def read_backup(backup_path: Path) -> AgentBackup:
|
||||
if (
|
||||
homeassistant := cast(JsonObjectType, data.get("homeassistant"))
|
||||
) and "version" in homeassistant:
|
||||
homeassistant_included = True
|
||||
homeassistant_version = cast(str, homeassistant["version"])
|
||||
database_included = not cast(
|
||||
bool, homeassistant.get("exclude_database", False)
|
||||
@@ -63,7 +60,6 @@ def read_backup(backup_path: Path) -> AgentBackup:
|
||||
backup_id=cast(str, data["slug"]),
|
||||
database_included=database_included,
|
||||
date=cast(str, data["date"]),
|
||||
extra_metadata=cast(dict[str, bool | str], data.get("extra", {})),
|
||||
folders=folders,
|
||||
homeassistant_included=homeassistant_included,
|
||||
homeassistant_version=homeassistant_version,
|
||||
@@ -73,39 +69,6 @@ def read_backup(backup_path: Path) -> AgentBackup:
|
||||
)
|
||||
|
||||
|
||||
def validate_password(path: Path, password: str | None) -> bool:
|
||||
"""Validate the password."""
|
||||
with tarfile.open(path, "r:", bufsize=BUF_SIZE) as backup_file:
|
||||
compressed = False
|
||||
ha_tar_name = "homeassistant.tar"
|
||||
try:
|
||||
ha_tar = backup_file.extractfile(ha_tar_name)
|
||||
except KeyError:
|
||||
compressed = True
|
||||
ha_tar_name = "homeassistant.tar.gz"
|
||||
try:
|
||||
ha_tar = backup_file.extractfile(ha_tar_name)
|
||||
except KeyError:
|
||||
LOGGER.error("No homeassistant.tar or homeassistant.tar.gz found")
|
||||
return False
|
||||
try:
|
||||
with SecureTarFile(
|
||||
path, # Not used
|
||||
gzip=compressed,
|
||||
key=password_to_key(password) if password is not None else None,
|
||||
mode="r",
|
||||
fileobj=ha_tar,
|
||||
):
|
||||
# If we can read the tar file, the password is correct
|
||||
return True
|
||||
except tarfile.ReadError:
|
||||
LOGGER.debug("Invalid password")
|
||||
return False
|
||||
except Exception: # noqa: BLE001
|
||||
LOGGER.exception("Unexpected error validating password")
|
||||
return False
|
||||
|
||||
|
||||
async def receive_file(
|
||||
hass: HomeAssistant, contents: aiohttp.BodyPartReader, path: Path
|
||||
) -> None:
|
||||
|
||||
@@ -9,7 +9,7 @@ from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from .config import ScheduleState
|
||||
from .const import DATA_MANAGER, LOGGER
|
||||
from .manager import IncorrectPasswordError, ManagerStateEvent
|
||||
from .manager import ManagerStateEvent
|
||||
from .models import Folder
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ def async_register_websocket_handlers(hass: HomeAssistant, with_hassio: bool) ->
|
||||
websocket_api.async_register_command(hass, handle_details)
|
||||
websocket_api.async_register_command(hass, handle_info)
|
||||
websocket_api.async_register_command(hass, handle_create)
|
||||
websocket_api.async_register_command(hass, handle_create_with_automatic_settings)
|
||||
websocket_api.async_register_command(hass, handle_create_with_strategy_settings)
|
||||
websocket_api.async_register_command(hass, handle_delete)
|
||||
websocket_api.async_register_command(hass, handle_restore)
|
||||
websocket_api.async_register_command(hass, handle_subscribe_events)
|
||||
@@ -51,9 +51,9 @@ async def handle_info(
|
||||
"agent_errors": {
|
||||
agent_id: str(err) for agent_id, err in agent_errors.items()
|
||||
},
|
||||
"backups": [backup.as_frontend_json() for backup in backups.values()],
|
||||
"last_attempted_automatic_backup": manager.config.data.last_attempted_automatic_backup,
|
||||
"last_completed_automatic_backup": manager.config.data.last_completed_automatic_backup,
|
||||
"backups": list(backups.values()),
|
||||
"last_attempted_strategy_backup": manager.config.data.last_attempted_strategy_backup,
|
||||
"last_completed_strategy_backup": manager.config.data.last_completed_strategy_backup,
|
||||
},
|
||||
)
|
||||
|
||||
@@ -81,7 +81,7 @@ async def handle_details(
|
||||
"agent_errors": {
|
||||
agent_id: str(err) for agent_id, err in agent_errors.items()
|
||||
},
|
||||
"backup": backup.as_frontend_json() if backup else None,
|
||||
"backup": backup,
|
||||
},
|
||||
)
|
||||
|
||||
@@ -131,20 +131,16 @@ async def handle_restore(
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Restore a backup."""
|
||||
try:
|
||||
await hass.data[DATA_MANAGER].async_restore_backup(
|
||||
msg["backup_id"],
|
||||
agent_id=msg["agent_id"],
|
||||
password=msg.get("password"),
|
||||
restore_addons=msg.get("restore_addons"),
|
||||
restore_database=msg["restore_database"],
|
||||
restore_folders=msg.get("restore_folders"),
|
||||
restore_homeassistant=msg["restore_homeassistant"],
|
||||
)
|
||||
except IncorrectPasswordError:
|
||||
connection.send_error(msg["id"], "password_incorrect", "Incorrect password")
|
||||
else:
|
||||
connection.send_result(msg["id"])
|
||||
await hass.data[DATA_MANAGER].async_restore_backup(
|
||||
msg["backup_id"],
|
||||
agent_id=msg["agent_id"],
|
||||
password=msg.get("password"),
|
||||
restore_addons=msg.get("restore_addons"),
|
||||
restore_database=msg["restore_database"],
|
||||
restore_folders=msg.get("restore_folders"),
|
||||
restore_homeassistant=msg["restore_homeassistant"],
|
||||
)
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@@ -185,11 +181,11 @@ async def handle_create(
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "backup/generate_with_automatic_settings",
|
||||
vol.Required("type"): "backup/generate_with_strategy_settings",
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def handle_create_with_automatic_settings(
|
||||
async def handle_create_with_strategy_settings(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
@@ -206,7 +202,7 @@ async def handle_create_with_automatic_settings(
|
||||
include_homeassistant=True, # always include HA
|
||||
name=config_data.create_backup.name,
|
||||
password=config_data.create_backup.password,
|
||||
with_automatic_settings=True,
|
||||
with_strategy_settings=True,
|
||||
)
|
||||
connection.send_result(msg["id"], backup)
|
||||
|
||||
@@ -295,15 +291,11 @@ async def handle_config_info(
|
||||
vol.Required("type"): "backup/config/update",
|
||||
vol.Optional("create_backup"): vol.Schema(
|
||||
{
|
||||
vol.Optional("agent_ids"): vol.All([str], vol.Unique()),
|
||||
vol.Optional("include_addons"): vol.Any(
|
||||
vol.All([str], vol.Unique()), None
|
||||
),
|
||||
vol.Optional("agent_ids"): vol.All(list[str]),
|
||||
vol.Optional("include_addons"): vol.Any(list[str], None),
|
||||
vol.Optional("include_all_addons"): bool,
|
||||
vol.Optional("include_database"): bool,
|
||||
vol.Optional("include_folders"): vol.Any(
|
||||
vol.All([vol.Coerce(Folder)], vol.Unique()), None
|
||||
),
|
||||
vol.Optional("include_folders"): vol.Any([vol.Coerce(Folder)], None),
|
||||
vol.Optional("name"): vol.Any(str, None),
|
||||
vol.Optional("password"): vol.Any(str, None),
|
||||
},
|
||||
|
||||
@@ -34,7 +34,7 @@ class BangOlufsenData:
|
||||
|
||||
type BangOlufsenConfigEntry = ConfigEntry[BangOlufsenData]
|
||||
|
||||
PLATFORMS = [Platform.EVENT, Platform.MEDIA_PLAYER]
|
||||
PLATFORMS = [Platform.MEDIA_PLAYER]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: BangOlufsenConfigEntry) -> bool:
|
||||
|
||||
@@ -79,7 +79,6 @@ class WebsocketNotification(StrEnum):
|
||||
"""Enum for WebSocket notification types."""
|
||||
|
||||
ACTIVE_LISTENING_MODE = "active_listening_mode"
|
||||
BUTTON = "button"
|
||||
PLAYBACK_ERROR = "playback_error"
|
||||
PLAYBACK_METADATA = "playback_metadata"
|
||||
PLAYBACK_PROGRESS = "playback_progress"
|
||||
@@ -204,60 +203,14 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray(
|
||||
),
|
||||
]
|
||||
)
|
||||
# Map for storing compatibility of devices.
|
||||
|
||||
MODEL_SUPPORT_DEVICE_BUTTONS: Final[str] = "device_buttons"
|
||||
|
||||
MODEL_SUPPORT_MAP = {
|
||||
MODEL_SUPPORT_DEVICE_BUTTONS: (
|
||||
BangOlufsenModel.BEOLAB_8,
|
||||
BangOlufsenModel.BEOLAB_28,
|
||||
BangOlufsenModel.BEOSOUND_2,
|
||||
BangOlufsenModel.BEOSOUND_A5,
|
||||
BangOlufsenModel.BEOSOUND_A9,
|
||||
BangOlufsenModel.BEOSOUND_BALANCE,
|
||||
BangOlufsenModel.BEOSOUND_EMERGE,
|
||||
BangOlufsenModel.BEOSOUND_LEVEL,
|
||||
BangOlufsenModel.BEOSOUND_THEATRE,
|
||||
)
|
||||
}
|
||||
|
||||
# Device events
|
||||
BANG_OLUFSEN_WEBSOCKET_EVENT: Final[str] = f"{DOMAIN}_websocket_event"
|
||||
|
||||
# Dict used to translate native Bang & Olufsen event names to string.json compatible ones
|
||||
EVENT_TRANSLATION_MAP: dict[str, str] = {
|
||||
"shortPress (Release)": "short_press_release",
|
||||
"longPress (Timeout)": "long_press_timeout",
|
||||
"longPress (Release)": "long_press_release",
|
||||
"veryLongPress (Timeout)": "very_long_press_timeout",
|
||||
"veryLongPress (Release)": "very_long_press_release",
|
||||
}
|
||||
|
||||
CONNECTION_STATUS: Final[str] = "CONNECTION_STATUS"
|
||||
|
||||
DEVICE_BUTTONS: Final[list[str]] = [
|
||||
"Bluetooth",
|
||||
"Microphone",
|
||||
"Next",
|
||||
"PlayPause",
|
||||
"Preset1",
|
||||
"Preset2",
|
||||
"Preset3",
|
||||
"Preset4",
|
||||
"Previous",
|
||||
"Volume",
|
||||
]
|
||||
|
||||
|
||||
DEVICE_BUTTON_EVENTS: Final[list[str]] = [
|
||||
"short_press_release",
|
||||
"long_press_timeout",
|
||||
"long_press_release",
|
||||
"very_long_press_timeout",
|
||||
"very_long_press_release",
|
||||
]
|
||||
|
||||
# Beolink Converter NL/ML sources need to be transformed to upper case
|
||||
BEOLINK_JOIN_SOURCES_TO_UPPER = (
|
||||
"aux_a",
|
||||
|
||||
@@ -1,76 +0,0 @@
|
||||
"""Event entities for the Bang & Olufsen integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.components.event import EventDeviceClass, EventEntity
|
||||
from homeassistant.const import CONF_MODEL
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import BangOlufsenConfigEntry
|
||||
from .const import (
|
||||
CONNECTION_STATUS,
|
||||
DEVICE_BUTTON_EVENTS,
|
||||
DEVICE_BUTTONS,
|
||||
MODEL_SUPPORT_DEVICE_BUTTONS,
|
||||
MODEL_SUPPORT_MAP,
|
||||
WebsocketNotification,
|
||||
)
|
||||
from .entity import BangOlufsenEntity
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: BangOlufsenConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Sensor entities from config entry."""
|
||||
|
||||
if config_entry.data[CONF_MODEL] in MODEL_SUPPORT_MAP[MODEL_SUPPORT_DEVICE_BUTTONS]:
|
||||
async_add_entities(
|
||||
BangOlufsenButtonEvent(config_entry, button_type)
|
||||
for button_type in DEVICE_BUTTONS
|
||||
)
|
||||
|
||||
|
||||
class BangOlufsenButtonEvent(BangOlufsenEntity, EventEntity):
|
||||
"""Event class for Button events."""
|
||||
|
||||
_attr_device_class = EventDeviceClass.BUTTON
|
||||
_attr_entity_registry_enabled_default = False
|
||||
_attr_event_types = DEVICE_BUTTON_EVENTS
|
||||
|
||||
def __init__(self, config_entry: BangOlufsenConfigEntry, button_type: str) -> None:
|
||||
"""Initialize Button."""
|
||||
super().__init__(config_entry, config_entry.runtime_data.client)
|
||||
|
||||
self._attr_unique_id = f"{self._unique_id}_{button_type}"
|
||||
|
||||
# Make the native button name Home Assistant compatible
|
||||
self._attr_translation_key = button_type.lower()
|
||||
|
||||
self._button_type = button_type
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Listen to WebSocket button events."""
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{CONNECTION_STATUS}",
|
||||
self._async_update_connection_state,
|
||||
)
|
||||
)
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.BUTTON}_{self._button_type}",
|
||||
self._async_handle_event,
|
||||
)
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_handle_event(self, event: str) -> None:
|
||||
"""Handle event."""
|
||||
self._trigger_event(event)
|
||||
self.async_write_ha_state()
|
||||
@@ -1,12 +1,7 @@
|
||||
{
|
||||
"common": {
|
||||
"jid_options_description": "Advanced grouping options, where devices' unique Beolink IDs (Called JIDs) are used directly. JIDs can be found in the state attributes of the media player entity.",
|
||||
"jid_options_name": "JID options",
|
||||
"long_press_release": "Release of long press",
|
||||
"long_press_timeout": "Long press",
|
||||
"short_press_release": "Release of short press",
|
||||
"very_long_press_release": "Release of very long press",
|
||||
"very_long_press_timeout": "Very long press"
|
||||
"jid_options_description": "Advanced grouping options, where devices' unique Beolink IDs (Called JIDs) are used directly. JIDs can be found in the state attributes of the media player entity."
|
||||
},
|
||||
"config": {
|
||||
"error": {
|
||||
@@ -34,150 +29,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"event": {
|
||||
"bluetooth": {
|
||||
"name": "Bluetooth",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"microphone": {
|
||||
"name": "Microphone",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"next": {
|
||||
"name": "Next",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"playpause": {
|
||||
"name": "Play / Pause",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"preset1": {
|
||||
"name": "Favourite 1",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"preset2": {
|
||||
"name": "Favourite 2",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"preset3": {
|
||||
"name": "Favourite 3",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"preset4": {
|
||||
"name": "Favourite 4",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"previous": {
|
||||
"name": "Previous",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"volume": {
|
||||
"name": "Volume",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"source_ids": {
|
||||
"options": {
|
||||
|
||||
@@ -3,10 +3,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from mozart_api.models import (
|
||||
ButtonEvent,
|
||||
ListeningModeProps,
|
||||
PlaybackContentMetadata,
|
||||
PlaybackError,
|
||||
@@ -28,7 +26,6 @@ from homeassistant.util.enum import try_parse_enum
|
||||
from .const import (
|
||||
BANG_OLUFSEN_WEBSOCKET_EVENT,
|
||||
CONNECTION_STATUS,
|
||||
EVENT_TRANSLATION_MAP,
|
||||
WebsocketNotification,
|
||||
)
|
||||
from .entity import BangOlufsenBase
|
||||
@@ -57,8 +54,6 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
self._client.get_active_listening_mode_notifications(
|
||||
self.on_active_listening_mode
|
||||
)
|
||||
self._client.get_button_notifications(self.on_button_notification)
|
||||
|
||||
self._client.get_playback_error_notifications(
|
||||
self.on_playback_error_notification
|
||||
)
|
||||
@@ -109,19 +104,6 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
notification,
|
||||
)
|
||||
|
||||
def on_button_notification(self, notification: ButtonEvent) -> None:
|
||||
"""Send button dispatch."""
|
||||
# State is expected to always be available.
|
||||
if TYPE_CHECKING:
|
||||
assert notification.state
|
||||
|
||||
# Send to event entity
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.BUTTON}_{notification.button}",
|
||||
EVENT_TRANSLATION_MAP[notification.state],
|
||||
)
|
||||
|
||||
def on_notification_notification(
|
||||
self, notification: WebsocketNotificationTag
|
||||
) -> None:
|
||||
|
||||
@@ -84,16 +84,16 @@
|
||||
}
|
||||
},
|
||||
"send_pin": {
|
||||
"name": "Send PIN",
|
||||
"description": "Sends a new PIN to Blink for 2FA.",
|
||||
"name": "Send pin",
|
||||
"description": "Sends a new PIN to blink for 2FA.",
|
||||
"fields": {
|
||||
"pin": {
|
||||
"name": "PIN",
|
||||
"description": "PIN received from Blink. Leave empty if you only received a verification email."
|
||||
"name": "Pin",
|
||||
"description": "PIN received from blink. Leave empty if you only received a verification email."
|
||||
},
|
||||
"config_entry_id": {
|
||||
"name": "Integration ID",
|
||||
"description": "The Blink Integration ID."
|
||||
"description": "The Blink Integration id."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -71,6 +71,27 @@ class BluesoundConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
),
|
||||
)
|
||||
|
||||
async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Import bluesound config entry from configuration.yaml."""
|
||||
session = async_get_clientsession(self.hass)
|
||||
async with Player(
|
||||
import_data[CONF_HOST], import_data[CONF_PORT], session=session
|
||||
) as player:
|
||||
try:
|
||||
sync_status = await player.sync_status(timeout=1)
|
||||
except PlayerUnreachableError:
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
|
||||
await self.async_set_unique_id(
|
||||
format_unique_id(sync_status.mac, import_data[CONF_PORT])
|
||||
)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(
|
||||
title=sync_status.name,
|
||||
data=import_data,
|
||||
)
|
||||
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: zeroconf.ZeroconfServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
|
||||
@@ -15,6 +15,7 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components import media_source
|
||||
from homeassistant.components.media_player import (
|
||||
PLATFORM_SCHEMA as MEDIA_PLAYER_PLATFORM_SCHEMA,
|
||||
BrowseMedia,
|
||||
MediaPlayerEntity,
|
||||
MediaPlayerEntityFeature,
|
||||
@@ -22,10 +23,16 @@ from homeassistant.components.media_player import (
|
||||
MediaType,
|
||||
async_process_play_media_url,
|
||||
)
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.config_entries import SOURCE_IMPORT
|
||||
from homeassistant.const import CONF_HOST, CONF_HOSTS, CONF_NAME, CONF_PORT
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv, entity_platform
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
entity_platform,
|
||||
issue_registry as ir,
|
||||
)
|
||||
from homeassistant.helpers.device_registry import (
|
||||
CONNECTION_NETWORK_MAC,
|
||||
DeviceInfo,
|
||||
@@ -36,9 +43,10 @@ from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_send,
|
||||
)
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from .const import ATTR_BLUESOUND_GROUP, ATTR_MASTER, DOMAIN
|
||||
from .const import ATTR_BLUESOUND_GROUP, ATTR_MASTER, DOMAIN, INTEGRATION_TITLE
|
||||
from .utils import dispatcher_join_signal, dispatcher_unjoin_signal, format_unique_id
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -63,6 +71,64 @@ SYNC_STATUS_INTERVAL = timedelta(minutes=5)
|
||||
|
||||
POLL_TIMEOUT = 120
|
||||
|
||||
PLATFORM_SCHEMA = MEDIA_PLAYER_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_HOSTS): vol.All(
|
||||
cv.ensure_list,
|
||||
[
|
||||
{
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
}
|
||||
],
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def _async_import(hass: HomeAssistant, config: ConfigType) -> None:
|
||||
"""Import config entry from configuration.yaml."""
|
||||
if not hass.config_entries.async_entries(DOMAIN):
|
||||
# Start import flow
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_IMPORT}, data=config
|
||||
)
|
||||
if (
|
||||
result["type"] == FlowResultType.ABORT
|
||||
and result["reason"] == "cannot_connect"
|
||||
):
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"deprecated_yaml_import_issue_{result['reason']}",
|
||||
breaks_in_ha_version="2025.2.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key=f"deprecated_yaml_import_issue_{result['reason']}",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": INTEGRATION_TITLE,
|
||||
},
|
||||
)
|
||||
return
|
||||
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_yaml_{DOMAIN}",
|
||||
breaks_in_ha_version="2025.2.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="deprecated_yaml",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": INTEGRATION_TITLE,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -93,6 +159,22 @@ async def async_setup_entry(
|
||||
async_add_entities([bluesound_player], update_before_add=True)
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None,
|
||||
) -> None:
|
||||
"""Trigger import flows."""
|
||||
hosts = config.get(CONF_HOSTS, [])
|
||||
for host in hosts:
|
||||
import_data = {
|
||||
CONF_HOST: host[CONF_HOST],
|
||||
CONF_PORT: host.get(CONF_PORT, 11000),
|
||||
}
|
||||
hass.async_create_task(_async_import(hass, import_data))
|
||||
|
||||
|
||||
class BluesoundPlayer(MediaPlayerEntity):
|
||||
"""Representation of a Bluesound Player."""
|
||||
|
||||
|
||||
@@ -20,6 +20,6 @@
|
||||
"bluetooth-auto-recovery==1.4.2",
|
||||
"bluetooth-data-tools==1.20.0",
|
||||
"dbus-fast==2.24.3",
|
||||
"habluetooth==3.7.0"
|
||||
"habluetooth==3.6.0"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -103,10 +103,9 @@ class BMWConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
VERSION = 1
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
self.data: dict[str, Any] = {}
|
||||
self._existing_entry_data: dict[str, Any] = {}
|
||||
data: dict[str, Any] = {}
|
||||
|
||||
_existing_entry_data: Mapping[str, Any] | None = None
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -176,15 +175,19 @@ class BMWConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Show the change password step."""
|
||||
existing_data = (
|
||||
dict(self._existing_entry_data) if self._existing_entry_data else {}
|
||||
)
|
||||
|
||||
if user_input is not None:
|
||||
return await self.async_step_user(self._existing_entry_data | user_input)
|
||||
return await self.async_step_user(existing_data | user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="change_password",
|
||||
data_schema=RECONFIGURE_SCHEMA,
|
||||
description_placeholders={
|
||||
CONF_USERNAME: self._existing_entry_data[CONF_USERNAME],
|
||||
CONF_REGION: self._existing_entry_data[CONF_REGION],
|
||||
CONF_USERNAME: existing_data[CONF_USERNAME],
|
||||
CONF_REGION: existing_data[CONF_REGION],
|
||||
},
|
||||
)
|
||||
|
||||
@@ -192,14 +195,14 @@ class BMWConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle configuration by re-auth."""
|
||||
self._existing_entry_data = dict(entry_data)
|
||||
self._existing_entry_data = entry_data
|
||||
return await self.async_step_change_password()
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a reconfiguration flow initialized by the user."""
|
||||
self._existing_entry_data = dict(self._get_reconfigure_entry().data)
|
||||
self._existing_entry_data = self._get_reconfigure_entry().data
|
||||
return await self.async_step_change_password()
|
||||
|
||||
async def async_step_captcha(
|
||||
|
||||
@@ -6,6 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/bring",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["bring_api"],
|
||||
"requirements": ["bring-api==0.9.1"]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/caldav",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["caldav", "vobject"],
|
||||
"requirements": ["caldav==1.3.9", "icalendar==6.1.0"]
|
||||
"requirements": ["caldav==1.3.9"]
|
||||
}
|
||||
|
||||
@@ -1,85 +0,0 @@
|
||||
"""Support for media browsing."""
|
||||
|
||||
from aiostreammagic import StreamMagicClient
|
||||
from aiostreammagic.models import Preset
|
||||
|
||||
from homeassistant.components.media_player import BrowseMedia, MediaClass
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
|
||||
async def async_browse_media(
|
||||
hass: HomeAssistant,
|
||||
client: StreamMagicClient,
|
||||
media_content_id: str | None,
|
||||
media_content_type: str | None,
|
||||
) -> BrowseMedia:
|
||||
"""Browse media."""
|
||||
|
||||
if media_content_type == "presets":
|
||||
return await _presets_payload(client.preset_list.presets)
|
||||
|
||||
return await _root_payload(
|
||||
hass,
|
||||
client,
|
||||
)
|
||||
|
||||
|
||||
async def _root_payload(
|
||||
hass: HomeAssistant,
|
||||
client: StreamMagicClient,
|
||||
) -> BrowseMedia:
|
||||
"""Return root payload for Cambridge Audio."""
|
||||
children: list[BrowseMedia] = []
|
||||
|
||||
if client.preset_list.presets:
|
||||
children.append(
|
||||
BrowseMedia(
|
||||
title="Presets",
|
||||
media_class=MediaClass.DIRECTORY,
|
||||
media_content_id="",
|
||||
media_content_type="presets",
|
||||
thumbnail="https://brands.home-assistant.io/_/cambridge_audio/logo.png",
|
||||
can_play=False,
|
||||
can_expand=True,
|
||||
)
|
||||
)
|
||||
|
||||
return BrowseMedia(
|
||||
title="Cambridge Audio",
|
||||
media_class=MediaClass.DIRECTORY,
|
||||
media_content_id="",
|
||||
media_content_type="root",
|
||||
can_play=False,
|
||||
can_expand=True,
|
||||
children=children,
|
||||
)
|
||||
|
||||
|
||||
async def _presets_payload(presets: list[Preset]) -> BrowseMedia:
|
||||
"""Create payload to list presets."""
|
||||
|
||||
children: list[BrowseMedia] = []
|
||||
for preset in presets:
|
||||
if preset.state != "OK":
|
||||
continue
|
||||
children.append(
|
||||
BrowseMedia(
|
||||
title=preset.name,
|
||||
media_class=MediaClass.MUSIC,
|
||||
media_content_id=str(preset.preset_id),
|
||||
media_content_type="preset",
|
||||
can_play=True,
|
||||
can_expand=False,
|
||||
thumbnail=preset.art_url,
|
||||
)
|
||||
)
|
||||
|
||||
return BrowseMedia(
|
||||
title="Presets",
|
||||
media_class=MediaClass.DIRECTORY,
|
||||
media_content_id="",
|
||||
media_content_type="presets",
|
||||
can_play=False,
|
||||
can_expand=True,
|
||||
children=children,
|
||||
)
|
||||
@@ -13,7 +13,6 @@ from aiostreammagic import (
|
||||
)
|
||||
|
||||
from homeassistant.components.media_player import (
|
||||
BrowseMedia,
|
||||
MediaPlayerDeviceClass,
|
||||
MediaPlayerEntity,
|
||||
MediaPlayerEntityFeature,
|
||||
@@ -25,7 +24,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import CambridgeAudioConfigEntry, media_browser
|
||||
from . import CambridgeAudioConfigEntry
|
||||
from .const import (
|
||||
CAMBRIDGE_MEDIA_TYPE_AIRABLE,
|
||||
CAMBRIDGE_MEDIA_TYPE_INTERNET_RADIO,
|
||||
@@ -35,8 +34,7 @@ from .const import (
|
||||
from .entity import CambridgeAudioEntity, command
|
||||
|
||||
BASE_FEATURES = (
|
||||
MediaPlayerEntityFeature.BROWSE_MEDIA
|
||||
| MediaPlayerEntityFeature.SELECT_SOURCE
|
||||
MediaPlayerEntityFeature.SELECT_SOURCE
|
||||
| MediaPlayerEntityFeature.TURN_OFF
|
||||
| MediaPlayerEntityFeature.TURN_ON
|
||||
| MediaPlayerEntityFeature.PLAY_MEDIA
|
||||
@@ -340,13 +338,3 @@ class CambridgeAudioDevice(CambridgeAudioEntity, MediaPlayerEntity):
|
||||
|
||||
if media_type == CAMBRIDGE_MEDIA_TYPE_INTERNET_RADIO:
|
||||
await self.client.play_radio_url("Radio", media_id)
|
||||
|
||||
async def async_browse_media(
|
||||
self,
|
||||
media_content_type: MediaType | str | None = None,
|
||||
media_content_id: str | None = None,
|
||||
) -> BrowseMedia:
|
||||
"""Implement the media browsing helper."""
|
||||
return await media_browser.async_browse_media(
|
||||
self.hass, self.client, media_content_id, media_content_type
|
||||
)
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
}
|
||||
},
|
||||
"discovery_confirm": {
|
||||
"description": "Do you want to set up {name}?"
|
||||
"description": "Do you want to setup {name}?"
|
||||
},
|
||||
"reconfigure": {
|
||||
"description": "Reconfigure your Cambridge Audio Streamer.",
|
||||
@@ -28,7 +28,7 @@
|
||||
"cannot_connect": "Failed to connect to Cambridge Audio device. Please make sure the device is powered up and connected to the network. Try power-cycling the device if it does not connect."
|
||||
},
|
||||
"abort": {
|
||||
"wrong_device": "This Cambridge Audio device does not match the existing device ID. Please make sure you entered the correct IP address.",
|
||||
"wrong_device": "This Cambridge Audio device does not match the existing device id. Please make sure you entered the correct IP address.",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
|
||||
@@ -516,19 +516,6 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
"""Flag supported features."""
|
||||
return self._attr_supported_features
|
||||
|
||||
@property
|
||||
def supported_features_compat(self) -> CameraEntityFeature:
|
||||
"""Return the supported features as CameraEntityFeature.
|
||||
|
||||
Remove this compatibility shim in 2025.1 or later.
|
||||
"""
|
||||
features = self.supported_features
|
||||
if type(features) is int: # noqa: E721
|
||||
new_features = CameraEntityFeature(features)
|
||||
self._report_deprecated_supported_features_values(new_features)
|
||||
return new_features
|
||||
return features
|
||||
|
||||
@cached_property
|
||||
def is_recording(self) -> bool:
|
||||
"""Return true if the device is recording."""
|
||||
@@ -582,7 +569,7 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
|
||||
self._deprecate_attr_frontend_stream_type_logged = True
|
||||
return self._attr_frontend_stream_type
|
||||
if CameraEntityFeature.STREAM not in self.supported_features_compat:
|
||||
if CameraEntityFeature.STREAM not in self.supported_features:
|
||||
return None
|
||||
if (
|
||||
self._webrtc_provider
|
||||
@@ -811,9 +798,7 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
async def async_internal_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_internal_added_to_hass()
|
||||
self.__supports_stream = (
|
||||
self.supported_features_compat & CameraEntityFeature.STREAM
|
||||
)
|
||||
self.__supports_stream = self.supported_features & CameraEntityFeature.STREAM
|
||||
await self.async_refresh_providers(write_state=False)
|
||||
|
||||
async def async_refresh_providers(self, *, write_state: bool = True) -> None:
|
||||
@@ -853,7 +838,7 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
self, fn: Callable[[HomeAssistant, Camera], Coroutine[None, None, _T | None]]
|
||||
) -> _T | None:
|
||||
"""Get first provider that supports this camera."""
|
||||
if CameraEntityFeature.STREAM not in self.supported_features_compat:
|
||||
if CameraEntityFeature.STREAM not in self.supported_features:
|
||||
return None
|
||||
|
||||
return await fn(self.hass, self)
|
||||
@@ -911,7 +896,7 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
def camera_capabilities(self) -> CameraCapabilities:
|
||||
"""Return the camera capabilities."""
|
||||
frontend_stream_types = set()
|
||||
if CameraEntityFeature.STREAM in self.supported_features_compat:
|
||||
if CameraEntityFeature.STREAM in self.supported_features:
|
||||
if self._supports_native_sync_webrtc or self._supports_native_async_webrtc:
|
||||
# The camera has a native WebRTC implementation
|
||||
frontend_stream_types.add(StreamType.WEB_RTC)
|
||||
@@ -931,8 +916,7 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
"""
|
||||
super().async_write_ha_state()
|
||||
if self.__supports_stream != (
|
||||
supports_stream := self.supported_features_compat
|
||||
& CameraEntityFeature.STREAM
|
||||
supports_stream := self.supported_features & CameraEntityFeature.STREAM
|
||||
):
|
||||
self.__supports_stream = supports_stream
|
||||
self._invalidate_camera_capabilities_cache()
|
||||
|
||||
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
import configparser
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, ClassVar
|
||||
from typing import TYPE_CHECKING
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import aiohttp
|
||||
@@ -129,7 +129,7 @@ class ChromecastInfo:
|
||||
class ChromeCastZeroconf:
|
||||
"""Class to hold a zeroconf instance."""
|
||||
|
||||
__zconf: ClassVar[zeroconf.HaZeroconf | None] = None
|
||||
__zconf: zeroconf.HaZeroconf | None = None
|
||||
|
||||
@classmethod
|
||||
def set_zeroconf(cls, zconf: zeroconf.HaZeroconf) -> None:
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import re
|
||||
|
||||
from pexpect import pxssh
|
||||
import voluptuous as vol
|
||||
@@ -100,11 +101,11 @@ class CiscoDeviceScanner(DeviceScanner):
|
||||
|
||||
return False
|
||||
|
||||
def _get_arp_data(self) -> str | None:
|
||||
def _get_arp_data(self):
|
||||
"""Open connection to the router and get arp entries."""
|
||||
|
||||
try:
|
||||
cisco_ssh: pxssh.pxssh[str] = pxssh.pxssh(encoding="uft-8")
|
||||
cisco_ssh = pxssh.pxssh()
|
||||
cisco_ssh.login(
|
||||
self.host,
|
||||
self.username,
|
||||
@@ -114,11 +115,12 @@ class CiscoDeviceScanner(DeviceScanner):
|
||||
)
|
||||
|
||||
# Find the hostname
|
||||
initial_line = (cisco_ssh.before or "").splitlines()
|
||||
initial_line = cisco_ssh.before.decode("utf-8").splitlines()
|
||||
router_hostname = initial_line[len(initial_line) - 1]
|
||||
router_hostname += "#"
|
||||
# Set the discovered hostname as prompt
|
||||
cisco_ssh.PROMPT = f"(?i)^{router_hostname}"
|
||||
regex_expression = f"(?i)^{router_hostname}".encode()
|
||||
cisco_ssh.PROMPT = re.compile(regex_expression, re.MULTILINE)
|
||||
# Allow full arp table to print at once
|
||||
cisco_ssh.sendline("terminal length 0")
|
||||
cisco_ssh.prompt(1)
|
||||
@@ -126,11 +128,13 @@ class CiscoDeviceScanner(DeviceScanner):
|
||||
cisco_ssh.sendline("show ip arp")
|
||||
cisco_ssh.prompt(1)
|
||||
|
||||
devices_result = cisco_ssh.before
|
||||
|
||||
return devices_result.decode("utf-8")
|
||||
except pxssh.ExceptionPxssh as px_e:
|
||||
_LOGGER.error("Failed to login via pxssh: %s", px_e)
|
||||
return None
|
||||
|
||||
return cisco_ssh.before
|
||||
return None
|
||||
|
||||
|
||||
def _parse_cisco_mac_address(cisco_hardware_addr):
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pexpect", "ptyprocess"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["pexpect==4.9.0"]
|
||||
"requirements": ["pexpect==4.6.0"]
|
||||
}
|
||||
|
||||
@@ -36,14 +36,7 @@ from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import bind_hass
|
||||
from homeassistant.util.signal_type import SignalType
|
||||
|
||||
# Pre-import backup to avoid it being imported
|
||||
# later when the import executor is busy and delaying
|
||||
# startup
|
||||
from . import (
|
||||
account_link,
|
||||
backup, # noqa: F401
|
||||
http_api,
|
||||
)
|
||||
from . import account_link, http_api
|
||||
from .client import CloudClient
|
||||
from .const import (
|
||||
CONF_ACCOUNT_LINK_SERVER,
|
||||
|
||||
@@ -3,12 +3,11 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine, Mapping
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine
|
||||
import hashlib
|
||||
import logging
|
||||
from typing import Any
|
||||
from typing import Any, Self
|
||||
|
||||
from aiohttp import ClientError, ClientTimeout
|
||||
from aiohttp import ClientError, ClientTimeout, StreamReader
|
||||
from hass_nabucasa import Cloud, CloudError
|
||||
from hass_nabucasa.cloud_api import (
|
||||
async_files_delete_file,
|
||||
@@ -19,13 +18,10 @@ from hass_nabucasa.cloud_api import (
|
||||
|
||||
from homeassistant.components.backup import AgentBackup, BackupAgent, BackupAgentError
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.aiohttp_client import ChunkAsyncStreamIterator
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
|
||||
from .client import CloudClient
|
||||
from .const import DATA_CLOUD, DOMAIN, EVENT_CLOUD_EVENT
|
||||
from .const import DATA_CLOUD, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
_STORAGE_BACKUP = "backup"
|
||||
|
||||
|
||||
@@ -49,29 +45,29 @@ async def async_get_backup_agents(
|
||||
return [CloudBackupAgent(hass=hass, cloud=cloud)]
|
||||
|
||||
|
||||
@callback
|
||||
def async_register_backup_agents_listener(
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
listener: Callable[[], None],
|
||||
**kwargs: Any,
|
||||
) -> Callable[[], None]:
|
||||
"""Register a listener to be called when agents are added or removed."""
|
||||
class ChunkAsyncStreamIterator:
|
||||
"""Async iterator for chunked streams.
|
||||
|
||||
@callback
|
||||
def unsub() -> None:
|
||||
"""Unsubscribe from events."""
|
||||
unsub_signal()
|
||||
Based on aiohttp.streams.ChunkTupleAsyncStreamIterator, but yields
|
||||
bytes instead of tuple[bytes, bool].
|
||||
"""
|
||||
|
||||
@callback
|
||||
def handle_event(data: Mapping[str, Any]) -> None:
|
||||
"""Handle event."""
|
||||
if data["type"] not in ("login", "logout"):
|
||||
return
|
||||
listener()
|
||||
__slots__ = ("_stream",)
|
||||
|
||||
unsub_signal = async_dispatcher_connect(hass, EVENT_CLOUD_EVENT, handle_event)
|
||||
return unsub
|
||||
def __init__(self, stream: StreamReader) -> None:
|
||||
"""Initialize."""
|
||||
self._stream = stream
|
||||
|
||||
def __aiter__(self) -> Self:
|
||||
"""Iterate."""
|
||||
return self
|
||||
|
||||
async def __anext__(self) -> bytes:
|
||||
"""Yield next chunk."""
|
||||
rv = await self._stream.readchunk()
|
||||
if rv == (b"", False):
|
||||
raise StopAsyncIteration
|
||||
return rv[0]
|
||||
|
||||
|
||||
class CloudBackupAgent(BackupAgent):
|
||||
@@ -157,11 +153,6 @@ class CloudBackupAgent(BackupAgent):
|
||||
headers=details["headers"] | {"content-length": str(backup.size)},
|
||||
timeout=ClientTimeout(connect=10.0, total=43200.0), # 43200s == 12h
|
||||
)
|
||||
_LOGGER.log(
|
||||
logging.DEBUG if upload_status.status < 400 else logging.WARNING,
|
||||
"Backup upload status: %s",
|
||||
upload_status.status,
|
||||
)
|
||||
upload_status.raise_for_status()
|
||||
except (TimeoutError, ClientError) as err:
|
||||
raise BackupAgentError("Failed to upload backup") from err
|
||||
@@ -191,7 +182,6 @@ class CloudBackupAgent(BackupAgent):
|
||||
"""List backups."""
|
||||
try:
|
||||
backups = await async_files_list(self._cloud, storage_type=_STORAGE_BACKUP)
|
||||
_LOGGER.debug("Cloud backups: %s", backups)
|
||||
except (ClientError, CloudError) as err:
|
||||
raise BackupAgentError("Failed to list backups") from err
|
||||
|
||||
|
||||
@@ -306,7 +306,6 @@ class CloudClient(Interface):
|
||||
},
|
||||
"version": HA_VERSION,
|
||||
"instance_id": self.prefs.instance_id,
|
||||
"name": self._hass.config.location_name,
|
||||
}
|
||||
|
||||
async def async_alexa_message(self, payload: dict[Any, Any]) -> dict[Any, Any]:
|
||||
|
||||
@@ -18,8 +18,6 @@ DATA_CLOUD: HassKey[Cloud[CloudClient]] = HassKey(DOMAIN)
|
||||
DATA_PLATFORMS_SETUP: HassKey[dict[str, asyncio.Event]] = HassKey(
|
||||
"cloud_platforms_setup"
|
||||
)
|
||||
EVENT_CLOUD_EVENT = "cloud_event"
|
||||
|
||||
REQUEST_TIMEOUT = 10
|
||||
|
||||
PREF_ENABLE_ALEXA = "alexa_enabled"
|
||||
|
||||
@@ -34,7 +34,6 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.util.location import async_detect_location_info
|
||||
|
||||
from .alexa_config import entity_supported as entity_supported_by_alexa
|
||||
@@ -42,7 +41,6 @@ from .assist_pipeline import async_create_cloud_pipeline
|
||||
from .client import CloudClient
|
||||
from .const import (
|
||||
DATA_CLOUD,
|
||||
EVENT_CLOUD_EVENT,
|
||||
LOGIN_MFA_TIMEOUT,
|
||||
PREF_ALEXA_REPORT_STATE,
|
||||
PREF_DISABLE_2FA,
|
||||
@@ -280,8 +278,6 @@ class CloudLoginView(HomeAssistantView):
|
||||
new_cloud_pipeline_id = await async_create_cloud_pipeline(hass)
|
||||
else:
|
||||
new_cloud_pipeline_id = None
|
||||
|
||||
async_dispatcher_send(hass, EVENT_CLOUD_EVENT, {"type": "login"})
|
||||
return self.json({"success": True, "cloud_pipeline": new_cloud_pipeline_id})
|
||||
|
||||
|
||||
@@ -301,7 +297,6 @@ class CloudLogoutView(HomeAssistantView):
|
||||
async with asyncio.timeout(REQUEST_TIMEOUT):
|
||||
await cloud.logout()
|
||||
|
||||
async_dispatcher_send(hass, EVENT_CLOUD_EVENT, {"type": "logout"})
|
||||
return self.json_message("ok")
|
||||
|
||||
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aiocomelit"],
|
||||
"requirements": ["aiocomelit==0.10.1"]
|
||||
"requirements": ["aiocomelit==0.9.1"]
|
||||
}
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/compensation",
|
||||
"iot_class": "calculated",
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["numpy==2.2.1"]
|
||||
"requirements": ["numpy==2.2.0"]
|
||||
}
|
||||
|
||||
@@ -326,9 +326,6 @@ class SubentryManagerFlowIndexView(
|
||||
"""Return context."""
|
||||
context = super().get_context(data)
|
||||
context["source"] = config_entries.SOURCE_USER
|
||||
if subentry_id := data.get("subentry_id"):
|
||||
context["source"] = config_entries.SOURCE_RECONFIGURE
|
||||
context["subentry_id"] = subentry_id
|
||||
return context
|
||||
|
||||
|
||||
@@ -681,11 +678,10 @@ async def config_subentry_list(
|
||||
result = [
|
||||
{
|
||||
"subentry_id": subentry.subentry_id,
|
||||
"subentry_type": subentry.subentry_type,
|
||||
"title": subentry.title,
|
||||
"unique_id": subentry.unique_id,
|
||||
}
|
||||
for subentry in entry.subentries.values()
|
||||
for subentry_id, subentry in entry.subentries.items()
|
||||
]
|
||||
connection.send_result(msg["id"], result)
|
||||
|
||||
|
||||
@@ -75,7 +75,6 @@ async def async_converse(
|
||||
language: str | None = None,
|
||||
agent_id: str | None = None,
|
||||
device_id: str | None = None,
|
||||
extra_system_prompt: str | None = None,
|
||||
) -> ConversationResult:
|
||||
"""Process text and get intent."""
|
||||
agent = async_get_agent(hass, agent_id)
|
||||
@@ -100,7 +99,6 @@ async def async_converse(
|
||||
device_id=device_id,
|
||||
language=language,
|
||||
agent_id=agent_id,
|
||||
extra_system_prompt=extra_system_prompt,
|
||||
)
|
||||
with async_conversation_trace() as trace:
|
||||
trace.add_event(
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==2.1.0", "home-assistant-intents==2025.1.1"]
|
||||
"requirements": ["hassil==2.0.5", "home-assistant-intents==2024.12.9"]
|
||||
}
|
||||
|
||||
@@ -40,9 +40,6 @@ class ConversationInput:
|
||||
agent_id: str | None = None
|
||||
"""Agent to use for processing."""
|
||||
|
||||
extra_system_prompt: str | None = None
|
||||
"""Extra prompt to provide extra info to LLMs how to understand the command."""
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
"""Return input as a dict."""
|
||||
return {
|
||||
@@ -52,7 +49,6 @@ class ConversationInput:
|
||||
"device_id": self.device_id,
|
||||
"language": self.language,
|
||||
"agent_id": self.agent_id,
|
||||
"extra_system_prompt": self.extra_system_prompt,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from cookidoo_api import Cookidoo, CookidooConfig, get_localization_options
|
||||
from cookidoo_api import Cookidoo, CookidooConfig, CookidooLocalizationConfig
|
||||
|
||||
from homeassistant.const import (
|
||||
CONF_COUNTRY,
|
||||
@@ -16,23 +16,21 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .coordinator import CookidooConfigEntry, CookidooDataUpdateCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.BUTTON, Platform.TODO]
|
||||
PLATFORMS: list[Platform] = [Platform.TODO]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: CookidooConfigEntry) -> bool:
|
||||
"""Set up Cookidoo from a config entry."""
|
||||
|
||||
localizations = await get_localization_options(
|
||||
country=entry.data[CONF_COUNTRY].lower(),
|
||||
language=entry.data[CONF_LANGUAGE],
|
||||
)
|
||||
|
||||
cookidoo = Cookidoo(
|
||||
async_get_clientsession(hass),
|
||||
CookidooConfig(
|
||||
email=entry.data[CONF_EMAIL],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
localization=localizations[0],
|
||||
localization=CookidooLocalizationConfig(
|
||||
country_code=entry.data[CONF_COUNTRY].lower(),
|
||||
language=entry.data[CONF_LANGUAGE],
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -1,70 +0,0 @@
|
||||
"""Support for Cookidoo buttons."""
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from cookidoo_api import Cookidoo, CookidooException
|
||||
|
||||
from homeassistant.components.button import ButtonEntity, ButtonEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import CookidooConfigEntry, CookidooDataUpdateCoordinator
|
||||
from .entity import CookidooBaseEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class CookidooButtonEntityDescription(ButtonEntityDescription):
|
||||
"""Describes cookidoo button entity."""
|
||||
|
||||
press_fn: Callable[[Cookidoo], Awaitable[None]]
|
||||
|
||||
|
||||
TODO_CLEAR = CookidooButtonEntityDescription(
|
||||
key="todo_clear",
|
||||
translation_key="todo_clear",
|
||||
press_fn=lambda client: client.clear_shopping_list(),
|
||||
entity_registry_enabled_default=False,
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: CookidooConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Cookidoo button entities based on a config entry."""
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities([CookidooButton(coordinator, TODO_CLEAR)])
|
||||
|
||||
|
||||
class CookidooButton(CookidooBaseEntity, ButtonEntity):
|
||||
"""Defines an Cookidoo button."""
|
||||
|
||||
entity_description: CookidooButtonEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: CookidooDataUpdateCoordinator,
|
||||
description: CookidooButtonEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize cookidoo button."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{description.key}"
|
||||
|
||||
async def async_press(self) -> None:
|
||||
"""Press the button."""
|
||||
try:
|
||||
await self.entity_description.press_fn(self.coordinator.cookidoo)
|
||||
except CookidooException as e:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="button_clear_todo_failed",
|
||||
) from e
|
||||
await self.coordinator.async_refresh()
|
||||
@@ -10,6 +10,7 @@ from cookidoo_api import (
|
||||
Cookidoo,
|
||||
CookidooAuthException,
|
||||
CookidooConfig,
|
||||
CookidooLocalizationConfig,
|
||||
CookidooRequestException,
|
||||
get_country_options,
|
||||
get_localization_options,
|
||||
@@ -218,19 +219,18 @@ class CookidooConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
else:
|
||||
data_input[CONF_LANGUAGE] = (
|
||||
await get_localization_options(country=data_input[CONF_COUNTRY].lower())
|
||||
)[0].language # Pick any language to test login
|
||||
|
||||
localizations = await get_localization_options(
|
||||
country=data_input[CONF_COUNTRY].lower(),
|
||||
language=data_input[CONF_LANGUAGE],
|
||||
)
|
||||
)[0] # Pick any language to test login
|
||||
|
||||
session = async_get_clientsession(self.hass)
|
||||
cookidoo = Cookidoo(
|
||||
async_get_clientsession(self.hass),
|
||||
session,
|
||||
CookidooConfig(
|
||||
email=data_input[CONF_EMAIL],
|
||||
password=data_input[CONF_PASSWORD],
|
||||
localization=localizations[0],
|
||||
localization=CookidooLocalizationConfig(
|
||||
country_code=data_input[CONF_COUNTRY].lower(),
|
||||
language=data_input[CONF_LANGUAGE],
|
||||
),
|
||||
),
|
||||
)
|
||||
try:
|
||||
|
||||
@@ -1,10 +1,5 @@
|
||||
{
|
||||
"entity": {
|
||||
"button": {
|
||||
"todo_clear": {
|
||||
"default": "mdi:cart-off"
|
||||
}
|
||||
},
|
||||
"todo": {
|
||||
"ingredient_list": {
|
||||
"default": "mdi:cart-plus"
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/cookidoo",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["cookidoo_api"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["cookidoo-api==0.11.2"]
|
||||
"requirements": ["cookidoo-api==0.10.0"]
|
||||
}
|
||||
|
||||
@@ -48,11 +48,6 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"button": {
|
||||
"todo_clear": {
|
||||
"name": "Clear shopping list and additional purchases"
|
||||
}
|
||||
},
|
||||
"todo": {
|
||||
"ingredient_list": {
|
||||
"name": "Shopping list"
|
||||
@@ -63,9 +58,6 @@
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"button_clear_todo_failed": {
|
||||
"message": "Failed to clear all items from the Cookidoo shopping list"
|
||||
},
|
||||
"todo_save_item_failed": {
|
||||
"message": "Failed to save {name} to Cookidoo shopping list"
|
||||
},
|
||||
|
||||
@@ -300,10 +300,6 @@ class CoverEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
def supported_features(self) -> CoverEntityFeature:
|
||||
"""Flag supported features."""
|
||||
if (features := self._attr_supported_features) is not None:
|
||||
if type(features) is int: # noqa: E721
|
||||
new_features = CoverEntityFeature(features)
|
||||
self._report_deprecated_supported_features_values(new_features)
|
||||
return new_features
|
||||
return features
|
||||
|
||||
supported_features = (
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Sequence
|
||||
import os
|
||||
|
||||
from serial.tools.list_ports_common import ListPortInfo
|
||||
@@ -13,7 +12,7 @@ from .const import DONT_USE_USB, MANUAL_PATH, REFRESH_LIST
|
||||
|
||||
|
||||
def list_ports_as_str(
|
||||
serial_ports: Sequence[ListPortInfo], no_usb_option: bool = True
|
||||
serial_ports: list[ListPortInfo], no_usb_option: bool = True
|
||||
) -> list[str]:
|
||||
"""Represent currently available serial ports as string.
|
||||
|
||||
|
||||
@@ -266,7 +266,7 @@ class DeconzBaseLight[_LightDeviceT: Group | Light](
|
||||
@property
|
||||
def color_temp_kelvin(self) -> int | None:
|
||||
"""Return the CT color value."""
|
||||
if self._device.color_temp is None or self._device.color_temp == 0:
|
||||
if self._device.color_temp is None:
|
||||
return None
|
||||
return color_temperature_mired_to_kelvin(self._device.color_temp)
|
||||
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
"""Virtual integration: Decorquip."""
|
||||
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"domain": "decorquip",
|
||||
"name": "Decorquip Dream",
|
||||
"integration_type": "virtual",
|
||||
"supported_by": "motion_blinds"
|
||||
}
|
||||
@@ -50,7 +50,7 @@
|
||||
"services": {
|
||||
"get_command": {
|
||||
"name": "Get command",
|
||||
"description": "Sends a generic HTTP get command.",
|
||||
"description": "Send sa generic HTTP get command.",
|
||||
"fields": {
|
||||
"command": {
|
||||
"name": "Command",
|
||||
|
||||
@@ -81,8 +81,14 @@ class DevoloBinaryDeviceEntity(DevoloDeviceEntity, BinarySensorEntity):
|
||||
or self._binary_sensor_property.sensor_type
|
||||
)
|
||||
|
||||
if device_instance.binary_sensor_property[element_uid].sub_type == "overload":
|
||||
self._attr_translation_key = "overload"
|
||||
if device_instance.binary_sensor_property[element_uid].sub_type != "":
|
||||
self._attr_name = device_instance.binary_sensor_property[
|
||||
element_uid
|
||||
].sub_type.capitalize()
|
||||
else:
|
||||
self._attr_name = device_instance.binary_sensor_property[
|
||||
element_uid
|
||||
].sensor_type.capitalize()
|
||||
|
||||
self._value = self._binary_sensor_property.state
|
||||
|
||||
@@ -123,8 +129,7 @@ class DevoloRemoteControl(DevoloDeviceEntity, BinarySensorEntity):
|
||||
|
||||
self._key = key
|
||||
self._attr_is_on = False
|
||||
self._attr_translation_key = "button"
|
||||
self._attr_translation_placeholders = {"key": str(key)}
|
||||
self._attr_name = f"Button {key}"
|
||||
|
||||
def _sync(self, message: tuple) -> None:
|
||||
"""Update the binary sensor state."""
|
||||
|
||||
@@ -116,11 +116,9 @@ class DevoloGenericMultiLevelDeviceEntity(DevoloMultiLevelDeviceEntity):
|
||||
self._multi_level_sensor_property.sensor_type
|
||||
)
|
||||
self._attr_native_unit_of_measurement = self._multi_level_sensor_property.unit
|
||||
self._attr_name = self._multi_level_sensor_property.sensor_type.capitalize()
|
||||
self._value = self._multi_level_sensor_property.value
|
||||
|
||||
if self._multi_level_sensor_property.sensor_type == "light":
|
||||
self._attr_translation_key = "brightness"
|
||||
|
||||
if element_uid.startswith("devolo.VoltageMultiLevelSensor:"):
|
||||
self._attr_entity_registry_enabled_default = False
|
||||
|
||||
@@ -130,6 +128,7 @@ class DevoloBatteryEntity(DevoloMultiLevelDeviceEntity):
|
||||
|
||||
_attr_entity_category = EntityCategory.DIAGNOSTIC
|
||||
_attr_native_unit_of_measurement = PERCENTAGE
|
||||
_attr_name = "Battery level"
|
||||
_attr_device_class = SensorDeviceClass.BATTERY
|
||||
_attr_state_class = SensorStateClass.MEASUREMENT
|
||||
|
||||
@@ -176,6 +175,8 @@ class DevoloConsumptionEntity(DevoloMultiLevelDeviceEntity):
|
||||
device_instance.consumption_property[element_uid], consumption
|
||||
)
|
||||
|
||||
self._attr_name = f"{consumption.capitalize()} consumption"
|
||||
|
||||
@property
|
||||
def unique_id(self) -> str:
|
||||
"""Return the unique ID of the entity.
|
||||
|
||||
@@ -30,20 +30,5 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"button": {
|
||||
"name": "Button {key}"
|
||||
},
|
||||
"overload": {
|
||||
"name": "Overload"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"brightness": {
|
||||
"name": "Brightness"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/dlna_dmr",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["async_upnp_client"],
|
||||
"requirements": ["async-upnp-client==0.42.0", "getmac==0.9.4"],
|
||||
"requirements": ["async-upnp-client==0.41.0", "getmac==0.9.4"],
|
||||
"ssdp": [
|
||||
{
|
||||
"deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1",
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"dependencies": ["ssdp"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/dlna_dms",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["async-upnp-client==0.42.0"],
|
||||
"requirements": ["async-upnp-client==0.41.0"],
|
||||
"ssdp": [
|
||||
{
|
||||
"deviceType": "urn:schemas-upnp-org:device:MediaServer:1",
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pydoods"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["pydoods==1.0.2", "Pillow==11.1.0"]
|
||||
"requirements": ["pydoods==1.0.2", "Pillow==11.0.0"]
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user