mirror of
https://github.com/home-assistant/core.git
synced 2026-05-05 12:24:48 +02:00
Merge branch 'dev' into flussButtonApi
This commit is contained in:
@@ -1,5 +1,6 @@
|
||||
name: Report an issue with Home Assistant Core
|
||||
description: Report an issue with Home Assistant Core.
|
||||
type: Bug
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
|
||||
@@ -32,7 +32,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -116,7 +116,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -175,7 +175,7 @@ jobs:
|
||||
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@@ -324,7 +324,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@v3.8.1
|
||||
uses: sigstore/cosign-installer@v3.8.2
|
||||
with:
|
||||
cosign-release: "v2.2.3"
|
||||
|
||||
@@ -457,12 +457,12 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@@ -509,7 +509,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build Docker image
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
@@ -522,7 +522,7 @@ jobs:
|
||||
- name: Push Docker image
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
id: push
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
@@ -531,7 +531,7 @@ jobs:
|
||||
|
||||
- name: Generate artifact attestation
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
uses: actions/attest-build-provenance@c074443f1aee8d4aeeae555aebba3282517141b2 # v2.2.3
|
||||
uses: actions/attest-build-provenance@db473fddc028af60658334401dc6fa3ffd8669fd # v2.3.0
|
||||
with:
|
||||
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
|
||||
+24
-24
@@ -40,7 +40,7 @@ env:
|
||||
CACHE_VERSION: 12
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 9
|
||||
HA_SHORT_VERSION: "2025.4"
|
||||
HA_SHORT_VERSION: "2025.5"
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
ALL_PYTHON_VERSIONS: "['3.13']"
|
||||
# 10.3 is the oldest supported version
|
||||
@@ -249,7 +249,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -294,7 +294,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
@@ -334,7 +334,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
@@ -374,7 +374,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
@@ -484,7 +484,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -587,7 +587,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -620,7 +620,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -653,7 +653,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Dependency review
|
||||
uses: actions/dependency-review-action@v4.5.0
|
||||
uses: actions/dependency-review-action@v4.6.0
|
||||
with:
|
||||
license-check: false # We use our own license audit checks
|
||||
|
||||
@@ -677,7 +677,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -720,7 +720,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -767,7 +767,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -812,7 +812,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -889,7 +889,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -949,7 +949,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -968,7 +968,7 @@ jobs:
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
|
||||
- name: Download pytest_buckets
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
name: pytest_buckets
|
||||
- name: Compile English translations
|
||||
@@ -1074,7 +1074,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -1208,7 +1208,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -1312,12 +1312,12 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
uses: codecov/codecov-action@v5.4.0
|
||||
uses: codecov/codecov-action@v5.4.2
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
@@ -1359,7 +1359,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -1454,12 +1454,12 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
uses: codecov/codecov-action@v5.4.0
|
||||
uses: codecov/codecov-action@v5.4.2
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -1479,7 +1479,7 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
pattern: test-results-*
|
||||
- name: Upload test results to Codecov
|
||||
|
||||
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.28.12
|
||||
uses: github/codeql-action/init@v3.28.16
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.28.12
|
||||
uses: github/codeql-action/analyze@v3.28.16
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
@@ -22,7 +22,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
|
||||
@@ -36,7 +36,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.4.0
|
||||
uses: actions/setup-python@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -138,17 +138,17 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
@@ -159,7 +159,7 @@ jobs:
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2025.02.0
|
||||
uses: home-assistant/wheels@2025.03.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
@@ -187,22 +187,22 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
- name: Download requirements_all_wheels
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
|
||||
@@ -219,7 +219,7 @@ jobs:
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2025.02.0
|
||||
uses: home-assistant/wheels@2025.03.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
|
||||
@@ -119,6 +119,7 @@ homeassistant.components.bluetooth_adapters.*
|
||||
homeassistant.components.bluetooth_tracker.*
|
||||
homeassistant.components.bmw_connected_drive.*
|
||||
homeassistant.components.bond.*
|
||||
homeassistant.components.bosch_alarm.*
|
||||
homeassistant.components.braviatv.*
|
||||
homeassistant.components.bring.*
|
||||
homeassistant.components.brother.*
|
||||
@@ -290,6 +291,7 @@ homeassistant.components.kaleidescape.*
|
||||
homeassistant.components.knocki.*
|
||||
homeassistant.components.knx.*
|
||||
homeassistant.components.kraken.*
|
||||
homeassistant.components.kulersky.*
|
||||
homeassistant.components.lacrosse.*
|
||||
homeassistant.components.lacrosse_view.*
|
||||
homeassistant.components.lamarzocco.*
|
||||
@@ -361,8 +363,10 @@ homeassistant.components.no_ip.*
|
||||
homeassistant.components.nordpool.*
|
||||
homeassistant.components.notify.*
|
||||
homeassistant.components.notion.*
|
||||
homeassistant.components.ntfy.*
|
||||
homeassistant.components.number.*
|
||||
homeassistant.components.nut.*
|
||||
homeassistant.components.ohme.*
|
||||
homeassistant.components.onboarding.*
|
||||
homeassistant.components.oncue.*
|
||||
homeassistant.components.onedrive.*
|
||||
@@ -382,6 +386,7 @@ homeassistant.components.pandora.*
|
||||
homeassistant.components.panel_custom.*
|
||||
homeassistant.components.peblar.*
|
||||
homeassistant.components.peco.*
|
||||
homeassistant.components.pegel_online.*
|
||||
homeassistant.components.persistent_notification.*
|
||||
homeassistant.components.person.*
|
||||
homeassistant.components.pi_hole.*
|
||||
@@ -458,6 +463,7 @@ homeassistant.components.slack.*
|
||||
homeassistant.components.sleepiq.*
|
||||
homeassistant.components.smhi.*
|
||||
homeassistant.components.smlight.*
|
||||
homeassistant.components.smtp.*
|
||||
homeassistant.components.snooz.*
|
||||
homeassistant.components.solarlog.*
|
||||
homeassistant.components.sonarr.*
|
||||
|
||||
Generated
+17
-7
@@ -216,6 +216,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/bmw_connected_drive/ @gerard33 @rikroe
|
||||
/homeassistant/components/bond/ @bdraco @prystupa @joshs85 @marciogranzotto
|
||||
/tests/components/bond/ @bdraco @prystupa @joshs85 @marciogranzotto
|
||||
/homeassistant/components/bosch_alarm/ @mag1024 @sanjay900
|
||||
/tests/components/bosch_alarm/ @mag1024 @sanjay900
|
||||
/homeassistant/components/bosch_shc/ @tschamm
|
||||
/tests/components/bosch_shc/ @tschamm
|
||||
/homeassistant/components/braviatv/ @bieniu @Drafteed
|
||||
@@ -430,7 +432,7 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/entur_public_transport/ @hfurubotten
|
||||
/homeassistant/components/environment_canada/ @gwww @michaeldavie
|
||||
/tests/components/environment_canada/ @gwww @michaeldavie
|
||||
/homeassistant/components/ephember/ @ttroy50
|
||||
/homeassistant/components/ephember/ @ttroy50 @roberty99
|
||||
/homeassistant/components/epic_games_store/ @hacf-fr @Quentame
|
||||
/tests/components/epic_games_store/ @hacf-fr @Quentame
|
||||
/homeassistant/components/epion/ @lhgravendeel
|
||||
@@ -704,6 +706,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/image_upload/ @home-assistant/core
|
||||
/homeassistant/components/imap/ @jbouwh
|
||||
/tests/components/imap/ @jbouwh
|
||||
/homeassistant/components/imeon_inverter/ @Imeon-Energy
|
||||
/tests/components/imeon_inverter/ @Imeon-Energy
|
||||
/homeassistant/components/imgw_pib/ @bieniu
|
||||
/tests/components/imgw_pib/ @bieniu
|
||||
/homeassistant/components/improv_ble/ @emontnemery
|
||||
@@ -935,6 +939,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/metoffice/ @MrHarcombe @avee87
|
||||
/homeassistant/components/microbees/ @microBeesTech
|
||||
/tests/components/microbees/ @microBeesTech
|
||||
/homeassistant/components/miele/ @astrandb
|
||||
/tests/components/miele/ @astrandb
|
||||
/homeassistant/components/mikrotik/ @engrbm87
|
||||
/tests/components/mikrotik/ @engrbm87
|
||||
/homeassistant/components/mill/ @danielhiversen
|
||||
@@ -1047,6 +1053,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/nsw_fuel_station/ @nickw444
|
||||
/homeassistant/components/nsw_rural_fire_service_feed/ @exxamalte
|
||||
/tests/components/nsw_rural_fire_service_feed/ @exxamalte
|
||||
/homeassistant/components/ntfy/ @tr4nt0r
|
||||
/tests/components/ntfy/ @tr4nt0r
|
||||
/homeassistant/components/nuheat/ @tstabrawa
|
||||
/tests/components/nuheat/ @tstabrawa
|
||||
/homeassistant/components/nuki/ @pschmitt @pvizeli @pree
|
||||
@@ -1185,6 +1193,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/prusalink/ @balloob
|
||||
/homeassistant/components/ps4/ @ktnrg45
|
||||
/tests/components/ps4/ @ktnrg45
|
||||
/homeassistant/components/pterodactyl/ @elmurato
|
||||
/tests/components/pterodactyl/ @elmurato
|
||||
/homeassistant/components/pure_energie/ @klaasnicolaas
|
||||
/tests/components/pure_energie/ @klaasnicolaas
|
||||
/homeassistant/components/purpleair/ @bachya
|
||||
@@ -1310,6 +1320,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/ruuvitag_ble/ @akx
|
||||
/homeassistant/components/rympro/ @OnFreund @elad-bar @maorcc
|
||||
/tests/components/rympro/ @OnFreund @elad-bar @maorcc
|
||||
/homeassistant/components/s3/ @tomasbedrich
|
||||
/tests/components/s3/ @tomasbedrich
|
||||
/homeassistant/components/sabnzbd/ @shaiu @jpbede
|
||||
/tests/components/sabnzbd/ @shaiu @jpbede
|
||||
/homeassistant/components/saj/ @fredericvl
|
||||
@@ -1385,7 +1397,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/siren/ @home-assistant/core @raman325
|
||||
/tests/components/siren/ @home-assistant/core @raman325
|
||||
/homeassistant/components/sisyphus/ @jkeljo
|
||||
/homeassistant/components/sky_hub/ @rogerselwyn
|
||||
/homeassistant/components/sky_remote/ @dunnmj @saty9
|
||||
/tests/components/sky_remote/ @dunnmj @saty9
|
||||
/homeassistant/components/skybell/ @tkdrob
|
||||
@@ -1432,8 +1443,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/solarlog/ @Ernst79 @dontinelli
|
||||
/homeassistant/components/solax/ @squishykid @Darsstar
|
||||
/tests/components/solax/ @squishykid @Darsstar
|
||||
/homeassistant/components/soma/ @ratsept @sebfortier2288
|
||||
/tests/components/soma/ @ratsept @sebfortier2288
|
||||
/homeassistant/components/soma/ @ratsept
|
||||
/tests/components/soma/ @ratsept
|
||||
/homeassistant/components/sonarr/ @ctalkington
|
||||
/tests/components/sonarr/ @ctalkington
|
||||
/homeassistant/components/songpal/ @rytilahti @shenxn
|
||||
@@ -1465,7 +1476,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/steam_online/ @tkdrob
|
||||
/homeassistant/components/steamist/ @bdraco
|
||||
/tests/components/steamist/ @bdraco
|
||||
/homeassistant/components/stiebel_eltron/ @fucm
|
||||
/homeassistant/components/stiebel_eltron/ @fucm @ThyMYthOS
|
||||
/tests/components/stiebel_eltron/ @fucm @ThyMYthOS
|
||||
/homeassistant/components/stookwijzer/ @fwestenberg
|
||||
/tests/components/stookwijzer/ @fwestenberg
|
||||
/homeassistant/components/stream/ @hunterjm @uvjustin @allenporter
|
||||
@@ -1478,8 +1490,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/suez_water/ @ooii @jb101010-2
|
||||
/homeassistant/components/sun/ @Swamp-Ig
|
||||
/tests/components/sun/ @Swamp-Ig
|
||||
/homeassistant/components/sunweg/ @rokam
|
||||
/tests/components/sunweg/ @rokam
|
||||
/homeassistant/components/supla/ @mwegrzynek
|
||||
/homeassistant/components/surepetcare/ @benleb @danielhiversen
|
||||
/tests/components/surepetcare/ @benleb @danielhiversen
|
||||
|
||||
Generated
+1
-1
@@ -31,7 +31,7 @@ RUN \
|
||||
&& go2rtc --version
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv==0.6.8
|
||||
RUN pip3 install uv==0.6.10
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
|
||||
+1
-1
@@ -19,4 +19,4 @@ labels:
|
||||
org.opencontainers.image.authors: The Home Assistant Authors
|
||||
org.opencontainers.image.url: https://www.home-assistant.io/
|
||||
org.opencontainers.image.documentation: https://www.home-assistant.io/docs/
|
||||
org.opencontainers.image.licenses: Apache License 2.0
|
||||
org.opencontainers.image.licenses: Apache-2.0
|
||||
|
||||
+12
-17
@@ -53,6 +53,7 @@ from .components import (
|
||||
logbook as logbook_pre_import, # noqa: F401
|
||||
lovelace as lovelace_pre_import, # noqa: F401
|
||||
onboarding as onboarding_pre_import, # noqa: F401
|
||||
person as person_pre_import, # noqa: F401
|
||||
recorder as recorder_import, # noqa: F401 - not named pre_import since it has requirements
|
||||
repairs as repairs_pre_import, # noqa: F401
|
||||
search as search_pre_import, # noqa: F401
|
||||
@@ -859,8 +860,14 @@ async def _async_set_up_integrations(
|
||||
integrations, all_integrations = await _async_resolve_domains_and_preload(
|
||||
hass, config
|
||||
)
|
||||
all_domains = set(all_integrations)
|
||||
domains = set(integrations)
|
||||
# Detect all cycles
|
||||
integrations_after_dependencies = (
|
||||
await loader.resolve_integrations_after_dependencies(
|
||||
hass, all_integrations.values(), set(all_integrations)
|
||||
)
|
||||
)
|
||||
all_domains = set(integrations_after_dependencies)
|
||||
domains = set(integrations) & all_domains
|
||||
|
||||
_LOGGER.info(
|
||||
"Domains to be set up: %s | %s",
|
||||
@@ -868,6 +875,8 @@ async def _async_set_up_integrations(
|
||||
all_domains - domains,
|
||||
)
|
||||
|
||||
async_set_domains_to_be_loaded(hass, all_domains)
|
||||
|
||||
# Initialize recorder
|
||||
if "recorder" in all_domains:
|
||||
recorder.async_initialize_recorder(hass)
|
||||
@@ -900,24 +909,12 @@ async def _async_set_up_integrations(
|
||||
stage_dep_domains_unfiltered = {
|
||||
dep
|
||||
for domain in stage_domains
|
||||
for dep in all_integrations[domain].all_dependencies
|
||||
for dep in integrations_after_dependencies[domain]
|
||||
if dep not in stage_domains
|
||||
}
|
||||
stage_dep_domains = stage_dep_domains_unfiltered - hass.config.components
|
||||
|
||||
stage_all_domains = stage_domains | stage_dep_domains
|
||||
stage_all_integrations = {
|
||||
domain: all_integrations[domain] for domain in stage_all_domains
|
||||
}
|
||||
# Detect all cycles
|
||||
stage_integrations_after_dependencies = (
|
||||
await loader.resolve_integrations_after_dependencies(
|
||||
hass, stage_all_integrations.values(), stage_all_domains
|
||||
)
|
||||
)
|
||||
stage_all_domains = set(stage_integrations_after_dependencies)
|
||||
stage_domains &= stage_all_domains
|
||||
stage_dep_domains &= stage_all_domains
|
||||
|
||||
_LOGGER.info(
|
||||
"Setting up stage %s: %s | %s\nDependencies: %s | %s",
|
||||
@@ -928,8 +925,6 @@ async def _async_set_up_integrations(
|
||||
stage_dep_domains_unfiltered - stage_dep_domains,
|
||||
)
|
||||
|
||||
async_set_domains_to_be_loaded(hass, stage_all_domains)
|
||||
|
||||
if timeout is None:
|
||||
await _async_setup_multi_components(hass, stage_all_domains, config)
|
||||
continue
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"domain": "bosch",
|
||||
"name": "Bosch",
|
||||
"integrations": ["bosch_alarm", "bosch_shc", "home_connect"]
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"domain": "eve",
|
||||
"name": "Eve",
|
||||
"iot_standards": ["matter"]
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
{
|
||||
"domain": "motionblinds",
|
||||
"name": "Motionblinds",
|
||||
"integrations": ["motion_blinds", "motionblinds_ble"]
|
||||
"integrations": ["motion_blinds", "motionblinds_ble"],
|
||||
"iot_standards": ["matter"]
|
||||
}
|
||||
|
||||
@@ -72,10 +72,10 @@
|
||||
"level": {
|
||||
"name": "Level",
|
||||
"state": {
|
||||
"high": "High",
|
||||
"low": "Low",
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"moderate": "Moderate",
|
||||
"very_high": "Very high"
|
||||
"very_high": "[%key:common::state::very_high%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -89,10 +89,10 @@
|
||||
"level": {
|
||||
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
||||
"state": {
|
||||
"high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::high%]",
|
||||
"low": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::low%]",
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
|
||||
"very_high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::very_high%]"
|
||||
"very_high": "[%key:common::state::very_high%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -123,10 +123,10 @@
|
||||
"level": {
|
||||
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
||||
"state": {
|
||||
"high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::high%]",
|
||||
"low": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::low%]",
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
|
||||
"very_high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::very_high%]"
|
||||
"very_high": "[%key:common::state::very_high%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -167,10 +167,10 @@
|
||||
"level": {
|
||||
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
||||
"state": {
|
||||
"high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::high%]",
|
||||
"low": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::low%]",
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
|
||||
"very_high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::very_high%]"
|
||||
"very_high": "[%key:common::state::very_high%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -181,10 +181,10 @@
|
||||
"level": {
|
||||
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
||||
"state": {
|
||||
"high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::high%]",
|
||||
"low": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::low%]",
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
|
||||
"very_high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::very_high%]"
|
||||
"very_high": "[%key:common::state::very_high%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -195,10 +195,10 @@
|
||||
"level": {
|
||||
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
||||
"state": {
|
||||
"high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::high%]",
|
||||
"low": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::low%]",
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
|
||||
"very_high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::very_high%]"
|
||||
"very_high": "[%key:common::state::very_high%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -68,8 +68,8 @@
|
||||
"led_bar_mode": {
|
||||
"name": "LED bar mode",
|
||||
"state": {
|
||||
"off": "Off",
|
||||
"co2": "Carbon dioxide",
|
||||
"off": "[%key:common::state::off%]",
|
||||
"co2": "[%key:component::sensor::entity_component::carbon_dioxide::name%]",
|
||||
"pm": "Particulate matter"
|
||||
}
|
||||
},
|
||||
@@ -143,8 +143,8 @@
|
||||
"led_bar_mode": {
|
||||
"name": "[%key:component::airgradient::entity::select::led_bar_mode::name%]",
|
||||
"state": {
|
||||
"off": "[%key:component::airgradient::entity::select::led_bar_mode::state::off%]",
|
||||
"co2": "[%key:component::airgradient::entity::select::led_bar_mode::state::co2%]",
|
||||
"off": "[%key:common::state::off%]",
|
||||
"co2": "[%key:component::sensor::entity_component::carbon_dioxide::name%]",
|
||||
"pm": "[%key:component::airgradient::entity::select::led_bar_mode::state::pm%]"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -8,7 +8,7 @@ from aiohttp import ClientSession
|
||||
from aiohttp.client_exceptions import ClientConnectorError
|
||||
from pyairnow import WebServiceAPI
|
||||
from pyairnow.conv import aqi_to_concentration
|
||||
from pyairnow.errors import AirNowError
|
||||
from pyairnow.errors import AirNowError, InvalidJsonError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -79,7 +79,7 @@ class AirNowDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
distance=self.distance,
|
||||
)
|
||||
|
||||
except (AirNowError, ClientConnectorError) as error:
|
||||
except (AirNowError, ClientConnectorError, InvalidJsonError) as error:
|
||||
raise UpdateFailed(error) from error
|
||||
|
||||
if not obs:
|
||||
|
||||
@@ -102,7 +102,8 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
device = await self._get_device_data(discovery_info)
|
||||
except AirthingsDeviceUpdateError:
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
except Exception: # noqa: BLE001
|
||||
except Exception:
|
||||
_LOGGER.exception("Unknown error occurred")
|
||||
return self.async_abort(reason="unknown")
|
||||
|
||||
name = get_name(device)
|
||||
@@ -160,7 +161,8 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
device = await self._get_device_data(discovery_info)
|
||||
except AirthingsDeviceUpdateError:
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
except Exception: # noqa: BLE001
|
||||
except Exception:
|
||||
_LOGGER.exception("Unknown error occurred")
|
||||
return self.async_abort(reason="unknown")
|
||||
name = get_name(device)
|
||||
self._discovered_devices[address] = Discovery(name, discovery_info, device)
|
||||
|
||||
@@ -32,7 +32,8 @@ class AirTouch5ConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
client = Airtouch5SimpleClient(user_input[CONF_HOST])
|
||||
try:
|
||||
await client.test_connection()
|
||||
except Exception: # noqa: BLE001
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors = {"base": "cannot_connect"}
|
||||
else:
|
||||
await self.async_set_unique_id(user_input[CONF_HOST])
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"geography_by_coords": {
|
||||
"title": "Configure a Geography",
|
||||
"title": "Configure a geography",
|
||||
"description": "Use the AirVisual cloud API to monitor a latitude/longitude.",
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
@@ -16,8 +16,8 @@
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
"city": "City",
|
||||
"country": "Country",
|
||||
"state": "State"
|
||||
"state": "State",
|
||||
"country": "[%key:common::config_flow::data::country%]"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
@@ -56,12 +56,12 @@
|
||||
"sensor": {
|
||||
"pollutant_label": {
|
||||
"state": {
|
||||
"co": "Carbon Monoxide",
|
||||
"n2": "Nitrogen Dioxide",
|
||||
"o3": "Ozone",
|
||||
"p1": "PM10",
|
||||
"p2": "PM2.5",
|
||||
"s2": "Sulfur Dioxide"
|
||||
"co": "[%key:component::sensor::entity_component::carbon_monoxide::name%]",
|
||||
"n2": "[%key:component::sensor::entity_component::nitrogen_dioxide::name%]",
|
||||
"o3": "[%key:component::sensor::entity_component::ozone::name%]",
|
||||
"p1": "[%key:component::sensor::entity_component::pm10::name%]",
|
||||
"p2": "[%key:component::sensor::entity_component::pm25::name%]",
|
||||
"s2": "[%key:component::sensor::entity_component::sulphur_dioxide::name%]"
|
||||
}
|
||||
},
|
||||
"pollutant_level": {
|
||||
|
||||
@@ -11,5 +11,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairzone"],
|
||||
"requirements": ["aioairzone==0.9.9"]
|
||||
"requirements": ["aioairzone==1.0.0"]
|
||||
}
|
||||
|
||||
@@ -9,6 +9,8 @@ from aioairzone.const import (
|
||||
AZD_HUMIDITY,
|
||||
AZD_TEMP,
|
||||
AZD_TEMP_UNIT,
|
||||
AZD_THERMOSTAT_BATTERY,
|
||||
AZD_THERMOSTAT_SIGNAL,
|
||||
AZD_WEBSERVER,
|
||||
AZD_WIFI_RSSI,
|
||||
AZD_ZONES,
|
||||
@@ -73,6 +75,20 @@ ZONE_SENSOR_TYPES: Final[tuple[SensorEntityDescription, ...]] = (
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
device_class=SensorDeviceClass.BATTERY,
|
||||
key=AZD_THERMOSTAT_BATTERY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
key=AZD_THERMOSTAT_SIGNAL,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="thermostat_signal",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -76,6 +76,9 @@
|
||||
"sensor": {
|
||||
"rssi": {
|
||||
"name": "RSSI"
|
||||
},
|
||||
"thermostat_signal": {
|
||||
"name": "Signal strength"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aioairzone_cloud"],
|
||||
"requirements": ["aioairzone-cloud==0.6.10"]
|
||||
"requirements": ["aioairzone-cloud==0.6.12"]
|
||||
}
|
||||
|
||||
@@ -32,9 +32,9 @@
|
||||
"air_quality": {
|
||||
"name": "Air Quality mode",
|
||||
"state": {
|
||||
"off": "Off",
|
||||
"on": "On",
|
||||
"auto": "Auto"
|
||||
"off": "[%key:common::state::off%]",
|
||||
"on": "[%key:common::state::on%]",
|
||||
"auto": "[%key:common::state::auto%]"
|
||||
}
|
||||
},
|
||||
"modes": {
|
||||
|
||||
@@ -1438,7 +1438,7 @@ class AlexaModeController(AlexaCapability):
|
||||
# Fan preset_mode
|
||||
if self.instance == f"{fan.DOMAIN}.{fan.ATTR_PRESET_MODE}":
|
||||
mode = self.entity.attributes.get(fan.ATTR_PRESET_MODE, None)
|
||||
if mode in self.entity.attributes.get(fan.ATTR_PRESET_MODES, None):
|
||||
if mode in self.entity.attributes.get(fan.ATTR_PRESET_MODES, ()):
|
||||
return f"{fan.ATTR_PRESET_MODE}.{mode}"
|
||||
|
||||
# Humidifier mode
|
||||
|
||||
@@ -719,7 +719,7 @@ class LockCapabilities(AlexaEntity):
|
||||
yield Alexa(self.entity)
|
||||
|
||||
|
||||
@ENTITY_ADAPTERS.register(media_player.const.DOMAIN)
|
||||
@ENTITY_ADAPTERS.register(media_player.DOMAIN)
|
||||
class MediaPlayerCapabilities(AlexaEntity):
|
||||
"""Class to represent MediaPlayer capabilities."""
|
||||
|
||||
@@ -757,9 +757,7 @@ class MediaPlayerCapabilities(AlexaEntity):
|
||||
|
||||
if supported & media_player.MediaPlayerEntityFeature.SELECT_SOURCE:
|
||||
inputs = AlexaInputController.get_valid_inputs(
|
||||
self.entity.attributes.get(
|
||||
media_player.const.ATTR_INPUT_SOURCE_LIST, []
|
||||
)
|
||||
self.entity.attributes.get(media_player.ATTR_INPUT_SOURCE_LIST, [])
|
||||
)
|
||||
if len(inputs) > 0:
|
||||
yield AlexaInputController(self.entity)
|
||||
@@ -776,8 +774,7 @@ class MediaPlayerCapabilities(AlexaEntity):
|
||||
and domain != "denonavr"
|
||||
):
|
||||
inputs = AlexaEqualizerController.get_valid_inputs(
|
||||
self.entity.attributes.get(media_player.const.ATTR_SOUND_MODE_LIST)
|
||||
or []
|
||||
self.entity.attributes.get(media_player.ATTR_SOUND_MODE_LIST) or []
|
||||
)
|
||||
if len(inputs) > 0:
|
||||
yield AlexaEqualizerController(self.entity)
|
||||
|
||||
@@ -566,7 +566,7 @@ async def async_api_set_volume(
|
||||
|
||||
data: dict[str, Any] = {
|
||||
ATTR_ENTITY_ID: entity.entity_id,
|
||||
media_player.const.ATTR_MEDIA_VOLUME_LEVEL: volume,
|
||||
media_player.ATTR_MEDIA_VOLUME_LEVEL: volume,
|
||||
}
|
||||
|
||||
await hass.services.async_call(
|
||||
@@ -589,7 +589,7 @@ async def async_api_select_input(
|
||||
|
||||
# Attempt to map the ALL UPPERCASE payload name to a source.
|
||||
# Strips trailing 1 to match single input devices.
|
||||
source_list = entity.attributes.get(media_player.const.ATTR_INPUT_SOURCE_LIST) or []
|
||||
source_list = entity.attributes.get(media_player.ATTR_INPUT_SOURCE_LIST) or []
|
||||
for source in source_list:
|
||||
formatted_source = (
|
||||
source.lower().replace("-", "").replace("_", "").replace(" ", "")
|
||||
@@ -611,7 +611,7 @@ async def async_api_select_input(
|
||||
|
||||
data: dict[str, Any] = {
|
||||
ATTR_ENTITY_ID: entity.entity_id,
|
||||
media_player.const.ATTR_INPUT_SOURCE: media_input,
|
||||
media_player.ATTR_INPUT_SOURCE: media_input,
|
||||
}
|
||||
|
||||
await hass.services.async_call(
|
||||
@@ -636,7 +636,7 @@ async def async_api_adjust_volume(
|
||||
volume_delta = int(directive.payload["volume"])
|
||||
|
||||
entity = directive.entity
|
||||
current_level = entity.attributes[media_player.const.ATTR_MEDIA_VOLUME_LEVEL]
|
||||
current_level = entity.attributes[media_player.ATTR_MEDIA_VOLUME_LEVEL]
|
||||
|
||||
# read current state
|
||||
try:
|
||||
@@ -648,7 +648,7 @@ async def async_api_adjust_volume(
|
||||
|
||||
data: dict[str, Any] = {
|
||||
ATTR_ENTITY_ID: entity.entity_id,
|
||||
media_player.const.ATTR_MEDIA_VOLUME_LEVEL: volume,
|
||||
media_player.ATTR_MEDIA_VOLUME_LEVEL: volume,
|
||||
}
|
||||
|
||||
await hass.services.async_call(
|
||||
@@ -709,7 +709,7 @@ async def async_api_set_mute(
|
||||
entity = directive.entity
|
||||
data: dict[str, Any] = {
|
||||
ATTR_ENTITY_ID: entity.entity_id,
|
||||
media_player.const.ATTR_MEDIA_VOLUME_MUTED: mute,
|
||||
media_player.ATTR_MEDIA_VOLUME_MUTED: mute,
|
||||
}
|
||||
|
||||
await hass.services.async_call(
|
||||
@@ -1708,15 +1708,13 @@ async def async_api_changechannel(
|
||||
|
||||
data: dict[str, Any] = {
|
||||
ATTR_ENTITY_ID: entity.entity_id,
|
||||
media_player.const.ATTR_MEDIA_CONTENT_ID: channel,
|
||||
media_player.const.ATTR_MEDIA_CONTENT_TYPE: (
|
||||
media_player.const.MEDIA_TYPE_CHANNEL
|
||||
),
|
||||
media_player.ATTR_MEDIA_CONTENT_ID: channel,
|
||||
media_player.ATTR_MEDIA_CONTENT_TYPE: (media_player.MediaType.CHANNEL),
|
||||
}
|
||||
|
||||
await hass.services.async_call(
|
||||
entity.domain,
|
||||
media_player.const.SERVICE_PLAY_MEDIA,
|
||||
media_player.SERVICE_PLAY_MEDIA,
|
||||
data,
|
||||
blocking=False,
|
||||
context=context,
|
||||
@@ -1825,13 +1823,13 @@ async def async_api_set_eq_mode(
|
||||
context: ha.Context,
|
||||
) -> AlexaResponse:
|
||||
"""Process a SetMode request for EqualizerController."""
|
||||
mode = directive.payload["mode"]
|
||||
mode: str = directive.payload["mode"]
|
||||
entity = directive.entity
|
||||
data: dict[str, Any] = {ATTR_ENTITY_ID: entity.entity_id}
|
||||
|
||||
sound_mode_list = entity.attributes.get(media_player.const.ATTR_SOUND_MODE_LIST)
|
||||
sound_mode_list = entity.attributes.get(media_player.ATTR_SOUND_MODE_LIST)
|
||||
if sound_mode_list and mode.lower() in sound_mode_list:
|
||||
data[media_player.const.ATTR_SOUND_MODE] = mode.lower()
|
||||
data[media_player.ATTR_SOUND_MODE] = mode.lower()
|
||||
else:
|
||||
msg = f"failed to map sound mode {mode} to a mode on {entity.entity_id}"
|
||||
raise AlexaInvalidValueError(msg)
|
||||
|
||||
@@ -3,10 +3,10 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from asyncio import timeout
|
||||
from collections.abc import Mapping
|
||||
from http import HTTPStatus
|
||||
import json
|
||||
import logging
|
||||
from types import MappingProxyType
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
from uuid import uuid4
|
||||
|
||||
@@ -260,10 +260,10 @@ async def async_enable_proactive_mode(
|
||||
def extra_significant_check(
|
||||
hass: HomeAssistant,
|
||||
old_state: str,
|
||||
old_attrs: dict[Any, Any] | MappingProxyType[Any, Any],
|
||||
old_attrs: Mapping[Any, Any],
|
||||
old_extra_arg: Any,
|
||||
new_state: str,
|
||||
new_attrs: dict[str, Any] | MappingProxyType[Any, Any],
|
||||
new_attrs: Mapping[Any, Any],
|
||||
new_extra_arg: Any,
|
||||
) -> bool:
|
||||
"""Check if the serialized data has changed."""
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["boto3", "botocore", "s3transfer"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["boto3==1.34.131"]
|
||||
"requirements": ["boto3==1.37.1"]
|
||||
}
|
||||
|
||||
@@ -240,6 +240,7 @@ SENSOR_DESCRIPTIONS = (
|
||||
suggested_display_precision=0,
|
||||
entity_registry_enabled_default=False,
|
||||
device_class=SensorDeviceClass.WIND_DIRECTION,
|
||||
state_class=SensorStateClass.MEASUREMENT_ANGLE,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_WINDGUSTMPH,
|
||||
|
||||
@@ -609,6 +609,7 @@ SENSOR_DESCRIPTIONS = (
|
||||
translation_key="wind_direction",
|
||||
native_unit_of_measurement=DEGREE,
|
||||
device_class=SensorDeviceClass.WIND_DIRECTION,
|
||||
state_class=SensorStateClass.MEASUREMENT_ANGLE,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key=TYPE_WINDDIR_AVG10M,
|
||||
|
||||
@@ -3,12 +3,12 @@
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"tracked_addons": "Addons",
|
||||
"tracked_addons": "Add-ons",
|
||||
"tracked_integrations": "Integrations",
|
||||
"tracked_custom_integrations": "Custom integrations"
|
||||
},
|
||||
"data_description": {
|
||||
"tracked_addons": "Select the addons you want to track",
|
||||
"tracked_addons": "Select the add-ons you want to track",
|
||||
"tracked_integrations": "Select the integrations you want to track",
|
||||
"tracked_custom_integrations": "Select the custom integrations you want to track"
|
||||
}
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/android_ip_webcam",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["pydroid-ipcam==2.0.0"]
|
||||
"requirements": ["pydroid-ipcam==3.0.0"]
|
||||
}
|
||||
|
||||
@@ -73,7 +73,7 @@ class AndroidTVRemoteBaseEntity(Entity):
|
||||
self._api.send_key_command(key_code, direction)
|
||||
except ConnectionClosed as exc:
|
||||
raise HomeAssistantError(
|
||||
"Connection to Android TV device is closed"
|
||||
translation_domain=DOMAIN, translation_key="connection_closed"
|
||||
) from exc
|
||||
|
||||
def _send_launch_app_command(self, app_link: str) -> None:
|
||||
@@ -85,5 +85,5 @@ class AndroidTVRemoteBaseEntity(Entity):
|
||||
self._api.send_launch_app_command(app_link)
|
||||
except ConnectionClosed as exc:
|
||||
raise HomeAssistantError(
|
||||
"Connection to Android TV device is closed"
|
||||
translation_domain=DOMAIN, translation_key="connection_closed"
|
||||
) from exc
|
||||
|
||||
@@ -21,7 +21,7 @@ from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AndroidTVRemoteConfigEntry
|
||||
from .const import CONF_APP_ICON, CONF_APP_NAME
|
||||
from .const import CONF_APP_ICON, CONF_APP_NAME, DOMAIN
|
||||
from .entity import AndroidTVRemoteBaseEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -233,5 +233,5 @@ class AndroidTVRemoteMediaPlayerEntity(AndroidTVRemoteBaseEntity, MediaPlayerEnt
|
||||
await asyncio.sleep(delay_secs)
|
||||
except ConnectionClosed as exc:
|
||||
raise HomeAssistantError(
|
||||
"Connection to Android TV device is closed"
|
||||
translation_domain=DOMAIN, translation_key="connection_closed"
|
||||
) from exc
|
||||
|
||||
@@ -54,5 +54,10 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"connection_closed": {
|
||||
"message": "Connection to the Android TV device is closed"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from anova_wifi import AnovaApi, InvalidLogin
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -11,8 +13,10 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
class AnovaConfligFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
class AnovaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Sets up a config flow for Anova."""
|
||||
|
||||
VERSION = 1
|
||||
@@ -35,7 +39,8 @@ class AnovaConfligFlow(ConfigFlow, domain=DOMAIN):
|
||||
await api.authenticate()
|
||||
except InvalidLogin:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception: # noqa: BLE001
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
return self.async_create_entry(
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
from functools import partial
|
||||
import logging
|
||||
from types import MappingProxyType
|
||||
@@ -52,7 +53,7 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
|
||||
RECOMMENDED_OPTIONS = {
|
||||
CONF_RECOMMENDED: True,
|
||||
CONF_LLM_HASS_API: llm.LLM_API_ASSIST,
|
||||
CONF_LLM_HASS_API: [llm.LLM_API_ASSIST],
|
||||
CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT,
|
||||
}
|
||||
|
||||
@@ -134,9 +135,8 @@ class AnthropicOptionsFlow(OptionsFlow):
|
||||
|
||||
if user_input is not None:
|
||||
if user_input[CONF_RECOMMENDED] == self.last_rendered_recommended:
|
||||
if user_input[CONF_LLM_HASS_API] == "none":
|
||||
user_input.pop(CONF_LLM_HASS_API)
|
||||
|
||||
if not user_input.get(CONF_LLM_HASS_API):
|
||||
user_input.pop(CONF_LLM_HASS_API, None)
|
||||
if user_input.get(
|
||||
CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET
|
||||
) >= user_input.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS):
|
||||
@@ -151,12 +151,16 @@ class AnthropicOptionsFlow(OptionsFlow):
|
||||
options = {
|
||||
CONF_RECOMMENDED: user_input[CONF_RECOMMENDED],
|
||||
CONF_PROMPT: user_input[CONF_PROMPT],
|
||||
CONF_LLM_HASS_API: user_input[CONF_LLM_HASS_API],
|
||||
CONF_LLM_HASS_API: user_input.get(CONF_LLM_HASS_API),
|
||||
}
|
||||
|
||||
suggested_values = options.copy()
|
||||
if not suggested_values.get(CONF_PROMPT):
|
||||
suggested_values[CONF_PROMPT] = llm.DEFAULT_INSTRUCTIONS_PROMPT
|
||||
if (
|
||||
suggested_llm_apis := suggested_values.get(CONF_LLM_HASS_API)
|
||||
) and isinstance(suggested_llm_apis, str):
|
||||
suggested_values[CONF_LLM_HASS_API] = [suggested_llm_apis]
|
||||
|
||||
schema = self.add_suggested_values_to_schema(
|
||||
vol.Schema(anthropic_config_option_schema(self.hass, options)),
|
||||
@@ -172,28 +176,22 @@ class AnthropicOptionsFlow(OptionsFlow):
|
||||
|
||||
def anthropic_config_option_schema(
|
||||
hass: HomeAssistant,
|
||||
options: dict[str, Any] | MappingProxyType[str, Any],
|
||||
options: Mapping[str, Any],
|
||||
) -> dict:
|
||||
"""Return a schema for Anthropic completion options."""
|
||||
hass_apis: list[SelectOptionDict] = [
|
||||
SelectOptionDict(
|
||||
label="No control",
|
||||
value="none",
|
||||
)
|
||||
]
|
||||
hass_apis.extend(
|
||||
SelectOptionDict(
|
||||
label=api.name,
|
||||
value=api.id,
|
||||
)
|
||||
for api in llm.async_get_apis(hass)
|
||||
)
|
||||
]
|
||||
|
||||
schema = {
|
||||
vol.Optional(CONF_PROMPT): TemplateSelector(),
|
||||
vol.Optional(CONF_LLM_HASS_API, default="none"): SelectSelector(
|
||||
SelectSelectorConfig(options=hass_apis)
|
||||
),
|
||||
vol.Optional(
|
||||
CONF_LLM_HASS_API,
|
||||
): SelectSelector(SelectSelectorConfig(options=hass_apis, multiple=True)),
|
||||
vol.Required(
|
||||
CONF_RECOMMENDED, default=options.get(CONF_RECOMMENDED, False)
|
||||
): bool,
|
||||
|
||||
@@ -9,11 +9,13 @@ from anthropic import AsyncStream
|
||||
from anthropic._types import NOT_GIVEN
|
||||
from anthropic.types import (
|
||||
InputJSONDelta,
|
||||
MessageDeltaUsage,
|
||||
MessageParam,
|
||||
MessageStreamEvent,
|
||||
RawContentBlockDeltaEvent,
|
||||
RawContentBlockStartEvent,
|
||||
RawContentBlockStopEvent,
|
||||
RawMessageDeltaEvent,
|
||||
RawMessageStartEvent,
|
||||
RawMessageStopEvent,
|
||||
RedactedThinkingBlock,
|
||||
@@ -31,6 +33,7 @@ from anthropic.types import (
|
||||
ToolResultBlockParam,
|
||||
ToolUseBlock,
|
||||
ToolUseBlockParam,
|
||||
Usage,
|
||||
)
|
||||
from voluptuous_openapi import convert
|
||||
|
||||
@@ -162,7 +165,8 @@ def _convert_content(
|
||||
return messages
|
||||
|
||||
|
||||
async def _transform_stream(
|
||||
async def _transform_stream( # noqa: C901 - This is complex, but better to have it in one place
|
||||
chat_log: conversation.ChatLog,
|
||||
result: AsyncStream[MessageStreamEvent],
|
||||
messages: list[MessageParam],
|
||||
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
|
||||
@@ -207,6 +211,7 @@ async def _transform_stream(
|
||||
| None
|
||||
) = None
|
||||
current_tool_args: str
|
||||
input_usage: Usage | None = None
|
||||
|
||||
async for response in result:
|
||||
LOGGER.debug("Received response: %s", response)
|
||||
@@ -215,6 +220,7 @@ async def _transform_stream(
|
||||
if response.message.role != "assistant":
|
||||
raise ValueError("Unexpected message role")
|
||||
current_message = MessageParam(role=response.message.role, content=[])
|
||||
input_usage = response.message.usage
|
||||
elif isinstance(response, RawContentBlockStartEvent):
|
||||
if isinstance(response.content_block, ToolUseBlock):
|
||||
current_block = ToolUseBlockParam(
|
||||
@@ -265,32 +271,54 @@ async def _transform_stream(
|
||||
if current_block is None:
|
||||
raise ValueError("Unexpected stop event without a current block")
|
||||
if current_block["type"] == "tool_use":
|
||||
tool_block = cast(ToolUseBlockParam, current_block)
|
||||
tool_args = json.loads(current_tool_args)
|
||||
tool_block["input"] = tool_args
|
||||
# tool block
|
||||
tool_args = json.loads(current_tool_args) if current_tool_args else {}
|
||||
current_block["input"] = tool_args
|
||||
yield {
|
||||
"tool_calls": [
|
||||
llm.ToolInput(
|
||||
id=tool_block["id"],
|
||||
tool_name=tool_block["name"],
|
||||
id=current_block["id"],
|
||||
tool_name=current_block["name"],
|
||||
tool_args=tool_args,
|
||||
)
|
||||
]
|
||||
}
|
||||
elif current_block["type"] == "thinking":
|
||||
thinking_block = cast(ThinkingBlockParam, current_block)
|
||||
LOGGER.debug("Thinking: %s", thinking_block["thinking"])
|
||||
# thinking block
|
||||
LOGGER.debug("Thinking: %s", current_block["thinking"])
|
||||
|
||||
if current_message is None:
|
||||
raise ValueError("Unexpected stop event without a current message")
|
||||
current_message["content"].append(current_block) # type: ignore[union-attr]
|
||||
current_block = None
|
||||
elif isinstance(response, RawMessageDeltaEvent):
|
||||
if (usage := response.usage) is not None:
|
||||
chat_log.async_trace(_create_token_stats(input_usage, usage))
|
||||
elif isinstance(response, RawMessageStopEvent):
|
||||
if current_message is not None:
|
||||
messages.append(current_message)
|
||||
current_message = None
|
||||
|
||||
|
||||
def _create_token_stats(
|
||||
input_usage: Usage | None, response_usage: MessageDeltaUsage
|
||||
) -> dict[str, Any]:
|
||||
"""Create token stats for conversation agent tracing."""
|
||||
input_tokens = 0
|
||||
cached_input_tokens = 0
|
||||
if input_usage:
|
||||
input_tokens = input_usage.input_tokens
|
||||
cached_input_tokens = input_usage.cache_creation_input_tokens or 0
|
||||
output_tokens = response_usage.output_tokens
|
||||
return {
|
||||
"stats": {
|
||||
"input_tokens": input_tokens,
|
||||
"cached_input_tokens": cached_input_tokens,
|
||||
"output_tokens": output_tokens,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class AnthropicConversationEntity(
|
||||
conversation.ConversationEntity, conversation.AbstractConversationAgent
|
||||
):
|
||||
@@ -393,7 +421,8 @@ class AnthropicConversationEntity(
|
||||
[
|
||||
content
|
||||
async for content in chat_log.async_add_delta_content_stream(
|
||||
user_input.agent_id, _transform_stream(stream, messages)
|
||||
user_input.agent_id,
|
||||
_transform_stream(chat_log, stream, messages),
|
||||
)
|
||||
if not isinstance(content, conversation.AssistantContent)
|
||||
]
|
||||
|
||||
@@ -53,10 +53,8 @@ class OnlineStatus(CoordinatorEntity[APCUPSdCoordinator], BinarySensorEntity):
|
||||
"""Initialize the APCUPSd binary device."""
|
||||
super().__init__(coordinator, context=description.key.upper())
|
||||
|
||||
# Set up unique id and device info if serial number is available.
|
||||
if (serial_no := coordinator.data.serial_no) is not None:
|
||||
self._attr_unique_id = f"{serial_no}_{description.key}"
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.unique_device_id}_{description.key}"
|
||||
self._attr_device_info = coordinator.device_info
|
||||
|
||||
@property
|
||||
|
||||
@@ -85,11 +85,16 @@ class APCUPSdCoordinator(DataUpdateCoordinator[APCUPSdData]):
|
||||
self._host = host
|
||||
self._port = port
|
||||
|
||||
@property
|
||||
def unique_device_id(self) -> str:
|
||||
"""Return a unique ID of the device, which is the serial number (if available) or the config entry ID."""
|
||||
return self.data.serial_no or self.config_entry.entry_id
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
"""Return the DeviceInfo of this APC UPS, if serial number is available."""
|
||||
return DeviceInfo(
|
||||
identifiers={(DOMAIN, self.data.serial_no or self.config_entry.entry_id)},
|
||||
identifiers={(DOMAIN, self.unique_device_id)},
|
||||
model=self.data.model,
|
||||
manufacturer="APC",
|
||||
name=self.data.name or "APC UPS",
|
||||
@@ -108,4 +113,7 @@ class APCUPSdCoordinator(DataUpdateCoordinator[APCUPSdData]):
|
||||
data = await aioapcaccess.request_status(self._host, self._port)
|
||||
return APCUPSdData(data)
|
||||
except (OSError, asyncio.IncompleteReadError) as error:
|
||||
raise UpdateFailed(error) from error
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect",
|
||||
) from error
|
||||
|
||||
@@ -458,11 +458,8 @@ class APCUPSdSensor(CoordinatorEntity[APCUPSdCoordinator], SensorEntity):
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator=coordinator, context=description.key.upper())
|
||||
|
||||
# Set up unique id and device info if serial number is available.
|
||||
if (serial_no := coordinator.data.serial_no) is not None:
|
||||
self._attr_unique_id = f"{serial_no}_{description.key}"
|
||||
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.unique_device_id}_{description.key}"
|
||||
self._attr_device_info = coordinator.device_info
|
||||
|
||||
# Initial update of attributes.
|
||||
|
||||
@@ -93,7 +93,7 @@
|
||||
"name": "Internal temperature"
|
||||
},
|
||||
"last_self_test": {
|
||||
"name": "Last self test"
|
||||
"name": "Last self-test"
|
||||
},
|
||||
"last_transfer": {
|
||||
"name": "Last transfer"
|
||||
@@ -177,7 +177,7 @@
|
||||
"name": "Restore requirement"
|
||||
},
|
||||
"self_test_result": {
|
||||
"name": "Self test result"
|
||||
"name": "Self-test result"
|
||||
},
|
||||
"sensitivity": {
|
||||
"name": "Sensitivity"
|
||||
@@ -195,7 +195,7 @@
|
||||
"name": "Status"
|
||||
},
|
||||
"self_test_interval": {
|
||||
"name": "Self test interval"
|
||||
"name": "Self-test interval"
|
||||
},
|
||||
"time_left": {
|
||||
"name": "Time left"
|
||||
@@ -219,5 +219,10 @@
|
||||
"name": "Transfer to battery"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"cannot_connect": {
|
||||
"message": "Cannot connect to APC UPS Daemon."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,6 +20,7 @@ import voluptuous as vol
|
||||
from homeassistant.components import zeroconf
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_IGNORE,
|
||||
SOURCE_REAUTH,
|
||||
SOURCE_ZEROCONF,
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
@@ -381,7 +382,9 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
CONF_IDENTIFIERS: list(combined_identifiers),
|
||||
},
|
||||
)
|
||||
if entry.source != SOURCE_IGNORE:
|
||||
# Don't reload ignored entries or in the middle of reauth,
|
||||
# e.g. if the user is entering a new PIN
|
||||
if entry.source != SOURCE_IGNORE and self.source != SOURCE_REAUTH:
|
||||
self.hass.config_entries.async_schedule_reload(entry.entry_id)
|
||||
if not allow_exist:
|
||||
raise DeviceAlreadyConfigured
|
||||
|
||||
@@ -120,6 +120,7 @@ class AppleTvMediaPlayer(
|
||||
"""Initialize the Apple TV media player."""
|
||||
super().__init__(name, identifier, manager)
|
||||
self._playing: Playing | None = None
|
||||
self._playing_last_updated: datetime | None = None
|
||||
self._app_list: dict[str, str] = {}
|
||||
|
||||
@callback
|
||||
@@ -209,6 +210,7 @@ class AppleTvMediaPlayer(
|
||||
This is a callback function from pyatv.interface.PushListener.
|
||||
"""
|
||||
self._playing = playstatus
|
||||
self._playing_last_updated = dt_util.utcnow()
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
@@ -316,7 +318,7 @@ class AppleTvMediaPlayer(
|
||||
def media_position_updated_at(self) -> datetime | None:
|
||||
"""Last valid time of media position."""
|
||||
if self.state in {MediaPlayerState.PLAYING, MediaPlayerState.PAUSED}:
|
||||
return dt_util.utcnow()
|
||||
return self._playing_last_updated
|
||||
return None
|
||||
|
||||
async def async_play_media(
|
||||
|
||||
@@ -43,6 +43,7 @@ class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]):
|
||||
|
||||
config_entry: ApSystemsConfigEntry
|
||||
device_version: str
|
||||
battery_system: bool
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -68,6 +69,7 @@ class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]):
|
||||
self.api.max_power = device_info.maxPower
|
||||
self.api.min_power = device_info.minPower
|
||||
self.device_version = device_info.devVer
|
||||
self.battery_system = device_info.isBatterySystem
|
||||
|
||||
async def _async_update_data(self) -> ApSystemsSensorData:
|
||||
try:
|
||||
|
||||
@@ -6,5 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/apsystems",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["apsystems-ez1==2.4.0"]
|
||||
"loggers": ["APsystemsEZ1"],
|
||||
"requirements": ["apsystems-ez1==2.6.0"]
|
||||
}
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"off_grid_status": {
|
||||
"name": "Off grid status"
|
||||
"name": "Off-grid status"
|
||||
},
|
||||
"dc_1_short_circuit_error_status": {
|
||||
"name": "DC 1 short circuit error status"
|
||||
|
||||
@@ -36,6 +36,8 @@ class ApSystemsInverterSwitch(ApSystemsEntity, SwitchEntity):
|
||||
super().__init__(data)
|
||||
self._api = data.coordinator.api
|
||||
self._attr_unique_id = f"{data.device_id}_inverter_status"
|
||||
if data.coordinator.battery_system:
|
||||
self._attr_available = False
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update switch status and availability."""
|
||||
|
||||
@@ -60,7 +60,7 @@ class AquaCellConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors["base"] = "cannot_connect"
|
||||
except AuthenticationFailed:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception: # pylint: disable=broad-except
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
|
||||
@@ -36,9 +36,9 @@
|
||||
"wi_fi_strength": {
|
||||
"name": "Wi-Fi strength",
|
||||
"state": {
|
||||
"low": "Low",
|
||||
"medium": "Medium",
|
||||
"high": "High"
|
||||
"low": "[%key:common::state::low%]",
|
||||
"medium": "[%key:common::state::medium%]",
|
||||
"high": "[%key:common::state::high%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,7 +26,7 @@
|
||||
"sensor": {
|
||||
"threshold": {
|
||||
"state": {
|
||||
"error": "Error",
|
||||
"error": "[%key:common::state::error%]",
|
||||
"green": "Green",
|
||||
"yellow": "Yellow",
|
||||
"red": "Red"
|
||||
|
||||
@@ -6,7 +6,11 @@ import logging
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components import mqtt
|
||||
from homeassistant.components.sensor import SensorDeviceClass, SensorEntity
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import DEGREE, UnitOfPrecipitationDepth, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
@@ -98,6 +102,7 @@ def discover_sensors(topic: str, payload: dict[str, Any]) -> list[ArwnSensor] |
|
||||
DEGREE,
|
||||
"mdi:compass",
|
||||
device_class=SensorDeviceClass.WIND_DIRECTION,
|
||||
state_class=SensorStateClass.MEASUREMENT_ANGLE,
|
||||
),
|
||||
]
|
||||
return None
|
||||
@@ -178,6 +183,7 @@ class ArwnSensor(SensorEntity):
|
||||
units: str,
|
||||
icon: str | None = None,
|
||||
device_class: SensorDeviceClass | None = None,
|
||||
state_class: SensorStateClass | None = None,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
self.entity_id = _slug(name)
|
||||
@@ -188,6 +194,7 @@ class ArwnSensor(SensorEntity):
|
||||
self._attr_native_unit_of_measurement = units
|
||||
self._attr_icon = icon
|
||||
self._attr_device_class = device_class
|
||||
self._attr_state_class = state_class
|
||||
|
||||
def set_event(self, event: dict[str, Any]) -> None:
|
||||
"""Update the sensor with the most recent event."""
|
||||
|
||||
@@ -125,7 +125,7 @@ SAVE_DELAY = 10
|
||||
@callback
|
||||
def _async_local_fallback_intent_filter(result: RecognizeResult) -> bool:
|
||||
"""Filter out intents that are not local fallback."""
|
||||
return result.intent.name in (intent.INTENT_GET_STATE, intent.INTENT_NEVERMIND)
|
||||
return result.intent.name in (intent.INTENT_GET_STATE)
|
||||
|
||||
|
||||
@callback
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
"""Base class for assist satellite entities."""
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.http import StaticPathConfig
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
@@ -15,6 +17,8 @@ from .const import (
|
||||
CONNECTION_TEST_DATA,
|
||||
DATA_COMPONENT,
|
||||
DOMAIN,
|
||||
PREANNOUNCE_FILENAME,
|
||||
PREANNOUNCE_URL,
|
||||
AssistSatelliteEntityFeature,
|
||||
)
|
||||
from .entity import (
|
||||
@@ -56,6 +60,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
{
|
||||
vol.Optional("message"): str,
|
||||
vol.Optional("media_id"): str,
|
||||
vol.Optional("preannounce"): bool,
|
||||
vol.Optional("preannounce_media_id"): str,
|
||||
}
|
||||
),
|
||||
cv.has_at_least_one_key("message", "media_id"),
|
||||
@@ -70,6 +76,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
{
|
||||
vol.Optional("start_message"): str,
|
||||
vol.Optional("start_media_id"): str,
|
||||
vol.Optional("preannounce"): bool,
|
||||
vol.Optional("preannounce_media_id"): str,
|
||||
vol.Optional("extra_system_prompt"): str,
|
||||
}
|
||||
),
|
||||
@@ -82,6 +90,15 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
async_register_websocket_api(hass)
|
||||
hass.http.register_view(ConnectionTestView())
|
||||
|
||||
# Default preannounce sound
|
||||
await hass.http.async_register_static_paths(
|
||||
[
|
||||
StaticPathConfig(
|
||||
PREANNOUNCE_URL, str(Path(__file__).parent / PREANNOUNCE_FILENAME)
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -20,6 +20,9 @@ CONNECTION_TEST_DATA: HassKey[dict[str, asyncio.Event]] = HassKey(
|
||||
f"{DOMAIN}_connection_tests"
|
||||
)
|
||||
|
||||
PREANNOUNCE_FILENAME = "preannounce.mp3"
|
||||
PREANNOUNCE_URL = f"/api/assist_satellite/static/{PREANNOUNCE_FILENAME}"
|
||||
|
||||
|
||||
class AssistSatelliteEntityFeature(IntFlag):
|
||||
"""Supported features of Assist satellite entity."""
|
||||
|
||||
@@ -28,7 +28,7 @@ from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import chat_session, entity
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
|
||||
from .const import AssistSatelliteEntityFeature
|
||||
from .const import PREANNOUNCE_URL, AssistSatelliteEntityFeature
|
||||
from .errors import AssistSatelliteError, SatelliteBusyError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -101,6 +101,9 @@ class AssistSatelliteAnnouncement:
|
||||
media_id_source: Literal["url", "media_id", "tts"]
|
||||
"""Source of the media ID."""
|
||||
|
||||
preannounce_media_id: str | None = None
|
||||
"""Media ID to be played before announcement."""
|
||||
|
||||
|
||||
class AssistSatelliteEntity(entity.Entity):
|
||||
"""Entity encapsulating the state and functionality of an Assist satellite."""
|
||||
@@ -177,6 +180,8 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
self,
|
||||
message: str | None = None,
|
||||
media_id: str | None = None,
|
||||
preannounce: bool = True,
|
||||
preannounce_media_id: str = PREANNOUNCE_URL,
|
||||
) -> None:
|
||||
"""Play and show an announcement on the satellite.
|
||||
|
||||
@@ -186,6 +191,9 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
If media_id is provided, it is played directly. It is possible
|
||||
to omit the message and the satellite will not show any text.
|
||||
|
||||
If preannounce is True, a sound is played before the announcement.
|
||||
If preannounce_media_id is provided, it overrides the default sound.
|
||||
|
||||
Calls async_announce with message and media id.
|
||||
"""
|
||||
await self._cancel_running_pipeline()
|
||||
@@ -193,7 +201,11 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
if message is None:
|
||||
message = ""
|
||||
|
||||
announcement = await self._resolve_announcement_media_id(message, media_id)
|
||||
announcement = await self._resolve_announcement_media_id(
|
||||
message,
|
||||
media_id,
|
||||
preannounce_media_id=preannounce_media_id if preannounce else None,
|
||||
)
|
||||
|
||||
if self._is_announcing:
|
||||
raise SatelliteBusyError
|
||||
@@ -220,6 +232,8 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
start_message: str | None = None,
|
||||
start_media_id: str | None = None,
|
||||
extra_system_prompt: str | None = None,
|
||||
preannounce: bool = True,
|
||||
preannounce_media_id: str = PREANNOUNCE_URL,
|
||||
) -> None:
|
||||
"""Start a conversation from the satellite.
|
||||
|
||||
@@ -229,6 +243,9 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
If start_media_id is provided, it is played directly. It is possible
|
||||
to omit the message and the satellite will not show any text.
|
||||
|
||||
If preannounce is True, a sound is played before the start message or media.
|
||||
If preannounce_media_id is provided, it overrides the default sound.
|
||||
|
||||
Calls async_start_conversation.
|
||||
"""
|
||||
await self._cancel_running_pipeline()
|
||||
@@ -244,13 +261,17 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
start_message = ""
|
||||
|
||||
announcement = await self._resolve_announcement_media_id(
|
||||
start_message, start_media_id
|
||||
start_message,
|
||||
start_media_id,
|
||||
preannounce_media_id=preannounce_media_id if preannounce else None,
|
||||
)
|
||||
|
||||
if self._is_announcing:
|
||||
raise SatelliteBusyError
|
||||
|
||||
self._is_announcing = True
|
||||
self._set_state(AssistSatelliteState.RESPONDING)
|
||||
|
||||
# Provide our start info to the LLM so it understands context of incoming message
|
||||
if extra_system_prompt is not None:
|
||||
self._extra_system_prompt = extra_system_prompt
|
||||
@@ -280,6 +301,7 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
raise
|
||||
finally:
|
||||
self._is_announcing = False
|
||||
self._set_state(AssistSatelliteState.IDLE)
|
||||
|
||||
async def async_start_conversation(
|
||||
self, start_announcement: AssistSatelliteAnnouncement
|
||||
@@ -470,7 +492,10 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
return vad.VadSensitivity.to_seconds(vad_sensitivity)
|
||||
|
||||
async def _resolve_announcement_media_id(
|
||||
self, message: str, media_id: str | None
|
||||
self,
|
||||
message: str,
|
||||
media_id: str | None,
|
||||
preannounce_media_id: str | None = None,
|
||||
) -> AssistSatelliteAnnouncement:
|
||||
"""Resolve the media ID."""
|
||||
media_id_source: Literal["url", "media_id", "tts"] | None = None
|
||||
@@ -478,7 +503,6 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
|
||||
if media_id:
|
||||
original_media_id = media_id
|
||||
|
||||
else:
|
||||
media_id_source = "tts"
|
||||
# Synthesize audio and get URL
|
||||
@@ -530,10 +554,26 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
# Resolve to full URL
|
||||
media_id = async_process_play_media_url(self.hass, media_id)
|
||||
|
||||
# Resolve preannounce media id
|
||||
if preannounce_media_id:
|
||||
if media_source.is_media_source_id(preannounce_media_id):
|
||||
preannounce_media = await media_source.async_resolve_media(
|
||||
self.hass,
|
||||
preannounce_media_id,
|
||||
None,
|
||||
)
|
||||
preannounce_media_id = preannounce_media.url
|
||||
|
||||
# Resolve to full URL
|
||||
preannounce_media_id = async_process_play_media_url(
|
||||
self.hass, preannounce_media_id
|
||||
)
|
||||
|
||||
return AssistSatelliteAnnouncement(
|
||||
message=message,
|
||||
media_id=media_id,
|
||||
original_media_id=original_media_id,
|
||||
tts_token=tts_token,
|
||||
media_id_source=media_id_source,
|
||||
preannounce_media_id=preannounce_media_id,
|
||||
)
|
||||
|
||||
Binary file not shown.
@@ -8,12 +8,22 @@ announce:
|
||||
message:
|
||||
required: false
|
||||
example: "Time to wake up!"
|
||||
default: ""
|
||||
selector:
|
||||
text:
|
||||
media_id:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
preannounce:
|
||||
required: false
|
||||
default: true
|
||||
selector:
|
||||
boolean:
|
||||
preannounce_media_id:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
start_conversation:
|
||||
target:
|
||||
entity:
|
||||
@@ -24,6 +34,7 @@ start_conversation:
|
||||
start_message:
|
||||
required: false
|
||||
example: "You left the lights on in the living room. Turn them off?"
|
||||
default: ""
|
||||
selector:
|
||||
text:
|
||||
start_media_id:
|
||||
@@ -34,3 +45,12 @@ start_conversation:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
preannounce:
|
||||
required: false
|
||||
default: true
|
||||
selector:
|
||||
boolean:
|
||||
preannounce_media_id:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
|
||||
@@ -23,6 +23,14 @@
|
||||
"media_id": {
|
||||
"name": "Media ID",
|
||||
"description": "The media ID to announce instead of using text-to-speech."
|
||||
},
|
||||
"preannounce": {
|
||||
"name": "Preannounce",
|
||||
"description": "Play a sound before the announcement."
|
||||
},
|
||||
"preannounce_media_id": {
|
||||
"name": "Preannounce media ID",
|
||||
"description": "Custom media ID to play before the announcement."
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -41,6 +49,14 @@
|
||||
"extra_system_prompt": {
|
||||
"name": "Extra system prompt",
|
||||
"description": "Provide background information to the AI about the request."
|
||||
},
|
||||
"preannounce": {
|
||||
"name": "Preannounce",
|
||||
"description": "Play a sound before the start message or media."
|
||||
},
|
||||
"preannounce_media_id": {
|
||||
"name": "Preannounce media ID",
|
||||
"description": "Custom media ID to play before the start message or media."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -198,7 +198,8 @@ async def websocket_test_connection(
|
||||
|
||||
hass.async_create_background_task(
|
||||
satellite.async_internal_announce(
|
||||
media_id=f"{CONNECTION_TEST_URL_BASE}/{connection_id}"
|
||||
media_id=f"{CONNECTION_TEST_URL_BASE}/{connection_id}",
|
||||
preannounce=False,
|
||||
),
|
||||
f"assist_satellite_connection_test_{msg['entity_id']}",
|
||||
)
|
||||
|
||||
@@ -2,10 +2,9 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from collections.abc import Callable, Mapping
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
from types import MappingProxyType
|
||||
from typing import Any
|
||||
|
||||
from pyasuswrt import AsusWrtError
|
||||
@@ -363,7 +362,7 @@ class AsusWrtRouter:
|
||||
"""Add a function to call when router is closed."""
|
||||
self._on_close.append(func)
|
||||
|
||||
def update_options(self, new_options: MappingProxyType[str, Any]) -> bool:
|
||||
def update_options(self, new_options: Mapping[str, Any]) -> bool:
|
||||
"""Update router options."""
|
||||
req_reload = False
|
||||
for name, new_opt in new_options.items():
|
||||
|
||||
@@ -28,5 +28,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pubnub", "yalexs"],
|
||||
"requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.7"]
|
||||
"requirements": ["yalexs==8.10.0", "yalexs-ble==2.6.0"]
|
||||
}
|
||||
|
||||
@@ -18,6 +18,7 @@ from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
ATTR_MODE,
|
||||
ATTR_NAME,
|
||||
CONF_ACTIONS,
|
||||
CONF_ALIAS,
|
||||
CONF_CONDITIONS,
|
||||
CONF_DEVICE_ID,
|
||||
@@ -27,6 +28,7 @@ from homeassistant.const import (
|
||||
CONF_MODE,
|
||||
CONF_PATH,
|
||||
CONF_PLATFORM,
|
||||
CONF_TRIGGERS,
|
||||
CONF_VARIABLES,
|
||||
CONF_ZONE,
|
||||
EVENT_HOMEASSISTANT_STARTED,
|
||||
@@ -86,11 +88,9 @@ from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
from .config import AutomationConfig, ValidationStatus
|
||||
from .const import (
|
||||
CONF_ACTIONS,
|
||||
CONF_INITIAL_STATE,
|
||||
CONF_TRACE,
|
||||
CONF_TRIGGER_VARIABLES,
|
||||
CONF_TRIGGERS,
|
||||
DEFAULT_INITIAL_STATE,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
|
||||
@@ -14,11 +14,15 @@ from homeassistant.components import blueprint
|
||||
from homeassistant.components.trace import TRACE_CONFIG_SCHEMA
|
||||
from homeassistant.config import config_per_platform, config_without_domain
|
||||
from homeassistant.const import (
|
||||
CONF_ACTION,
|
||||
CONF_ACTIONS,
|
||||
CONF_ALIAS,
|
||||
CONF_CONDITION,
|
||||
CONF_CONDITIONS,
|
||||
CONF_DESCRIPTION,
|
||||
CONF_ID,
|
||||
CONF_TRIGGER,
|
||||
CONF_TRIGGERS,
|
||||
CONF_VARIABLES,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -30,14 +34,10 @@ from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.yaml.input import UndefinedSubstitution
|
||||
|
||||
from .const import (
|
||||
CONF_ACTION,
|
||||
CONF_ACTIONS,
|
||||
CONF_HIDE_ENTITY,
|
||||
CONF_INITIAL_STATE,
|
||||
CONF_TRACE,
|
||||
CONF_TRIGGER,
|
||||
CONF_TRIGGER_VARIABLES,
|
||||
CONF_TRIGGERS,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
)
|
||||
@@ -58,34 +58,9 @@ _MINIMAL_PLATFORM_SCHEMA = vol.Schema(
|
||||
def _backward_compat_schema(value: Any | None) -> Any:
|
||||
"""Backward compatibility for automations."""
|
||||
|
||||
if not isinstance(value, dict):
|
||||
return value
|
||||
|
||||
# `trigger` has been renamed to `triggers`
|
||||
if CONF_TRIGGER in value:
|
||||
if CONF_TRIGGERS in value:
|
||||
raise vol.Invalid(
|
||||
"Cannot specify both 'trigger' and 'triggers'. Please use 'triggers' only."
|
||||
)
|
||||
value[CONF_TRIGGERS] = value.pop(CONF_TRIGGER)
|
||||
|
||||
# `condition` has been renamed to `conditions`
|
||||
if CONF_CONDITION in value:
|
||||
if CONF_CONDITIONS in value:
|
||||
raise vol.Invalid(
|
||||
"Cannot specify both 'condition' and 'conditions'. Please use 'conditions' only."
|
||||
)
|
||||
value[CONF_CONDITIONS] = value.pop(CONF_CONDITION)
|
||||
|
||||
# `action` has been renamed to `actions`
|
||||
if CONF_ACTION in value:
|
||||
if CONF_ACTIONS in value:
|
||||
raise vol.Invalid(
|
||||
"Cannot specify both 'action' and 'actions'. Please use 'actions' only."
|
||||
)
|
||||
value[CONF_ACTIONS] = value.pop(CONF_ACTION)
|
||||
|
||||
return value
|
||||
value = cv.renamed(CONF_TRIGGER, CONF_TRIGGERS)(value)
|
||||
value = cv.renamed(CONF_ACTION, CONF_ACTIONS)(value)
|
||||
return cv.renamed(CONF_CONDITION, CONF_CONDITIONS)(value)
|
||||
|
||||
|
||||
PLATFORM_SCHEMA = vol.All(
|
||||
|
||||
@@ -2,10 +2,6 @@
|
||||
|
||||
import logging
|
||||
|
||||
CONF_ACTION = "action"
|
||||
CONF_ACTIONS = "actions"
|
||||
CONF_TRIGGER = "trigger"
|
||||
CONF_TRIGGERS = "triggers"
|
||||
CONF_TRIGGER_VARIABLES = "trigger_variables"
|
||||
DOMAIN = "automation"
|
||||
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aiobotocore", "botocore"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["aiobotocore==2.13.1", "botocore==1.34.131"]
|
||||
"requirements": ["aiobotocore==2.21.1", "botocore==1.37.1"]
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
from ipaddress import ip_address
|
||||
from types import MappingProxyType
|
||||
from typing import Any
|
||||
from urllib.parse import urlsplit
|
||||
|
||||
@@ -88,7 +87,7 @@ class AxisFlowHandler(ConfigFlow, domain=AXIS_DOMAIN):
|
||||
|
||||
if user_input is not None:
|
||||
try:
|
||||
api = await get_axis_api(self.hass, MappingProxyType(user_input))
|
||||
api = await get_axis_api(self.hass, user_input)
|
||||
|
||||
except AuthenticationRequired:
|
||||
errors["base"] = "invalid_auth"
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Axis network device abstraction."""
|
||||
|
||||
from asyncio import timeout
|
||||
from types import MappingProxyType
|
||||
from collections.abc import Mapping
|
||||
from typing import Any
|
||||
|
||||
import axis
|
||||
@@ -23,7 +23,7 @@ from ..errors import AuthenticationRequired, CannotConnect
|
||||
|
||||
async def get_axis_api(
|
||||
hass: HomeAssistant,
|
||||
config: MappingProxyType[str, Any],
|
||||
config: Mapping[str, Any],
|
||||
) -> axis.AxisDevice:
|
||||
"""Create a Axis device API."""
|
||||
session = get_async_client(hass, verify_ssl=False)
|
||||
|
||||
@@ -3,11 +3,10 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
from collections.abc import Callable, Mapping
|
||||
from datetime import datetime
|
||||
import json
|
||||
import logging
|
||||
from types import MappingProxyType
|
||||
from typing import Any
|
||||
|
||||
from azure.eventhub import EventData, EventDataBatch
|
||||
@@ -179,7 +178,7 @@ class AzureEventHub:
|
||||
await self.async_send(None)
|
||||
await self._queue.join()
|
||||
|
||||
def update_options(self, new_options: MappingProxyType[str, Any]) -> None:
|
||||
def update_options(self, new_options: Mapping[str, Any]) -> None:
|
||||
"""Update options."""
|
||||
self._send_interval = new_options[CONF_SEND_INTERVAL]
|
||||
|
||||
|
||||
@@ -175,7 +175,8 @@ class AzureStorageBackupAgent(BackupAgent):
|
||||
"""Find a blob by backup id."""
|
||||
async for blob in self._client.list_blobs(include="metadata"):
|
||||
if (
|
||||
backup_id == blob.metadata.get("backup_id", "")
|
||||
blob.metadata is not None
|
||||
and backup_id == blob.metadata.get("backup_id", "")
|
||||
and blob.metadata.get("metadata_version") == METADATA_VERSION
|
||||
):
|
||||
return blob
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass, field, replace
|
||||
import datetime as dt
|
||||
from datetime import datetime, timedelta
|
||||
@@ -87,12 +88,26 @@ class BackupConfigData:
|
||||
else:
|
||||
time = None
|
||||
days = [Day(day) for day in data["schedule"]["days"]]
|
||||
agents = {}
|
||||
for agent_id, agent_data in data["agents"].items():
|
||||
protected = agent_data["protected"]
|
||||
stored_retention = agent_data["retention"]
|
||||
agent_retention: AgentRetentionConfig | None
|
||||
if stored_retention:
|
||||
agent_retention = AgentRetentionConfig(
|
||||
copies=stored_retention["copies"],
|
||||
days=stored_retention["days"],
|
||||
)
|
||||
else:
|
||||
agent_retention = None
|
||||
agent_config = AgentConfig(
|
||||
protected=protected,
|
||||
retention=agent_retention,
|
||||
)
|
||||
agents[agent_id] = agent_config
|
||||
|
||||
return cls(
|
||||
agents={
|
||||
agent_id: AgentConfig(protected=agent_data["protected"])
|
||||
for agent_id, agent_data in data["agents"].items()
|
||||
},
|
||||
agents=agents,
|
||||
automatic_backups_configured=data["automatic_backups_configured"],
|
||||
create_backup=CreateBackupConfig(
|
||||
agent_ids=data["create_backup"]["agent_ids"],
|
||||
@@ -176,12 +191,36 @@ class BackupConfig:
|
||||
"""Update config."""
|
||||
if agents is not UNDEFINED:
|
||||
for agent_id, agent_config in agents.items():
|
||||
if agent_id not in self.data.agents:
|
||||
self.data.agents[agent_id] = AgentConfig(**agent_config)
|
||||
agent_retention = agent_config.get("retention")
|
||||
if agent_retention is None:
|
||||
new_agent_retention = None
|
||||
else:
|
||||
self.data.agents[agent_id] = replace(
|
||||
self.data.agents[agent_id], **agent_config
|
||||
new_agent_retention = AgentRetentionConfig(
|
||||
copies=agent_retention.get("copies"),
|
||||
days=agent_retention.get("days"),
|
||||
)
|
||||
if agent_id not in self.data.agents:
|
||||
old_agent_retention = None
|
||||
self.data.agents[agent_id] = AgentConfig(
|
||||
protected=agent_config.get("protected", False),
|
||||
retention=new_agent_retention,
|
||||
)
|
||||
else:
|
||||
new_agent_config = self.data.agents[agent_id]
|
||||
old_agent_retention = new_agent_config.retention
|
||||
if "protected" in agent_config:
|
||||
new_agent_config = replace(
|
||||
new_agent_config, protected=agent_config["protected"]
|
||||
)
|
||||
if "retention" in agent_config:
|
||||
new_agent_config = replace(
|
||||
new_agent_config, retention=new_agent_retention
|
||||
)
|
||||
self.data.agents[agent_id] = new_agent_config
|
||||
if new_agent_retention != old_agent_retention:
|
||||
# There's a single retention application method
|
||||
# for both global and agent retention settings.
|
||||
self.data.retention.apply(self._manager)
|
||||
if automatic_backups_configured is not UNDEFINED:
|
||||
self.data.automatic_backups_configured = automatic_backups_configured
|
||||
if create_backup is not UNDEFINED:
|
||||
@@ -207,11 +246,24 @@ class AgentConfig:
|
||||
"""Represent the config for an agent."""
|
||||
|
||||
protected: bool
|
||||
"""Agent protected configuration.
|
||||
|
||||
If True, the agent backups are password protected.
|
||||
"""
|
||||
retention: AgentRetentionConfig | None = None
|
||||
"""Agent retention configuration.
|
||||
|
||||
If None, the global retention configuration is used.
|
||||
If not None, the global retention configuration is ignored for this agent.
|
||||
If an agent retention configuration is set and both copies and days are None,
|
||||
backups will be kept forever for that agent.
|
||||
"""
|
||||
|
||||
def to_dict(self) -> StoredAgentConfig:
|
||||
"""Convert agent config to a dict."""
|
||||
return {
|
||||
"protected": self.protected,
|
||||
"retention": self.retention.to_dict() if self.retention else None,
|
||||
}
|
||||
|
||||
|
||||
@@ -219,24 +271,46 @@ class StoredAgentConfig(TypedDict):
|
||||
"""Represent the stored config for an agent."""
|
||||
|
||||
protected: bool
|
||||
retention: StoredRetentionConfig | None
|
||||
|
||||
|
||||
class AgentParametersDict(TypedDict, total=False):
|
||||
"""Represent the parameters for an agent."""
|
||||
|
||||
protected: bool
|
||||
retention: RetentionParametersDict | None
|
||||
|
||||
|
||||
@dataclass(kw_only=True)
|
||||
class RetentionConfig:
|
||||
"""Represent the backup retention configuration."""
|
||||
class BaseRetentionConfig:
|
||||
"""Represent the base backup retention configuration."""
|
||||
|
||||
copies: int | None = None
|
||||
days: int | None = None
|
||||
|
||||
def to_dict(self) -> StoredRetentionConfig:
|
||||
"""Convert backup retention configuration to a dict."""
|
||||
return StoredRetentionConfig(
|
||||
copies=self.copies,
|
||||
days=self.days,
|
||||
)
|
||||
|
||||
|
||||
@dataclass(kw_only=True)
|
||||
class RetentionConfig(BaseRetentionConfig):
|
||||
"""Represent the backup retention configuration."""
|
||||
|
||||
def apply(self, manager: BackupManager) -> None:
|
||||
"""Apply backup retention configuration."""
|
||||
if self.days is not None:
|
||||
agents_retention = {
|
||||
agent_id: agent_config.retention
|
||||
for agent_id, agent_config in manager.config.data.agents.items()
|
||||
}
|
||||
|
||||
if self.days is not None or any(
|
||||
agent_retention and agent_retention.days is not None
|
||||
for agent_retention in agents_retention.values()
|
||||
):
|
||||
LOGGER.debug(
|
||||
"Scheduling next automatic delete of backups older than %s in 1 day",
|
||||
self.days,
|
||||
@@ -246,13 +320,6 @@ class RetentionConfig:
|
||||
LOGGER.debug("Unscheduling next automatic delete")
|
||||
self._unschedule_next(manager)
|
||||
|
||||
def to_dict(self) -> StoredRetentionConfig:
|
||||
"""Convert backup retention configuration to a dict."""
|
||||
return StoredRetentionConfig(
|
||||
copies=self.copies,
|
||||
days=self.days,
|
||||
)
|
||||
|
||||
@callback
|
||||
def _schedule_next(
|
||||
self,
|
||||
@@ -271,16 +338,81 @@ class RetentionConfig:
|
||||
"""Return backups older than days to delete."""
|
||||
# we need to check here since we await before
|
||||
# this filter is applied
|
||||
if self.days is None:
|
||||
return {}
|
||||
now = dt_util.utcnow()
|
||||
return {
|
||||
backup_id: backup
|
||||
for backup_id, backup in backups.items()
|
||||
if dt_util.parse_datetime(backup.date, raise_on_error=True)
|
||||
+ timedelta(days=self.days)
|
||||
< now
|
||||
agents_retention = {
|
||||
agent_id: agent_config.retention
|
||||
for agent_id, agent_config in manager.config.data.agents.items()
|
||||
}
|
||||
has_agents_retention = any(
|
||||
agent_retention for agent_retention in agents_retention.values()
|
||||
)
|
||||
has_agents_retention_days = any(
|
||||
agent_retention and agent_retention.days is not None
|
||||
for agent_retention in agents_retention.values()
|
||||
)
|
||||
if (global_days := self.days) is None and not has_agents_retention_days:
|
||||
# No global retention days and no agent retention days
|
||||
return {}
|
||||
|
||||
now = dt_util.utcnow()
|
||||
if global_days is not None and not has_agents_retention:
|
||||
# Return early to avoid the longer filtering below.
|
||||
return {
|
||||
backup_id: backup
|
||||
for backup_id, backup in backups.items()
|
||||
if dt_util.parse_datetime(backup.date, raise_on_error=True)
|
||||
+ timedelta(days=global_days)
|
||||
< now
|
||||
}
|
||||
|
||||
# If there are any agent retention settings, we need to check
|
||||
# the retention settings, for every backup and agent combination.
|
||||
|
||||
backups_to_delete = {}
|
||||
|
||||
for backup_id, backup in backups.items():
|
||||
backup_date = dt_util.parse_datetime(
|
||||
backup.date, raise_on_error=True
|
||||
)
|
||||
delete_from_agents = set(backup.agents)
|
||||
for agent_id in backup.agents:
|
||||
agent_retention = agents_retention.get(agent_id)
|
||||
if agent_retention is None:
|
||||
# This agent does not have a retention setting,
|
||||
# so the global retention setting should be used.
|
||||
if global_days is None:
|
||||
# This agent does not have a retention setting
|
||||
# and the global retention days setting is None,
|
||||
# so this backup should not be deleted.
|
||||
delete_from_agents.discard(agent_id)
|
||||
continue
|
||||
days = global_days
|
||||
elif (agent_days := agent_retention.days) is None:
|
||||
# This agent has a retention setting
|
||||
# where days is set to None,
|
||||
# so the backup should not be deleted.
|
||||
delete_from_agents.discard(agent_id)
|
||||
continue
|
||||
else:
|
||||
# This agent has a retention setting
|
||||
# where days is set to a number,
|
||||
# so that setting should be used.
|
||||
days = agent_days
|
||||
if backup_date + timedelta(days=days) >= now:
|
||||
# This backup is not older than the retention days,
|
||||
# so this agent should not be deleted.
|
||||
delete_from_agents.discard(agent_id)
|
||||
|
||||
filtered_backup = replace(
|
||||
backup,
|
||||
agents={
|
||||
agent_id: agent_backup_status
|
||||
for agent_id, agent_backup_status in backup.agents.items()
|
||||
if agent_id in delete_from_agents
|
||||
},
|
||||
)
|
||||
backups_to_delete[backup_id] = filtered_backup
|
||||
|
||||
return backups_to_delete
|
||||
|
||||
await manager.async_delete_filtered_backups(
|
||||
include_filter=_automatic_backups_filter, delete_filter=_delete_filter
|
||||
@@ -312,6 +444,10 @@ class RetentionParametersDict(TypedDict, total=False):
|
||||
days: int | None
|
||||
|
||||
|
||||
class AgentRetentionConfig(BaseRetentionConfig):
|
||||
"""Represent an agent retention configuration."""
|
||||
|
||||
|
||||
class StoredBackupSchedule(TypedDict):
|
||||
"""Represent the stored backup schedule configuration."""
|
||||
|
||||
@@ -554,16 +690,87 @@ async def delete_backups_exceeding_configured_count(manager: BackupManager) -> N
|
||||
backups: dict[str, ManagerBackup],
|
||||
) -> dict[str, ManagerBackup]:
|
||||
"""Return oldest backups more numerous than copies to delete."""
|
||||
agents_retention = {
|
||||
agent_id: agent_config.retention
|
||||
for agent_id, agent_config in manager.config.data.agents.items()
|
||||
}
|
||||
has_agents_retention = any(
|
||||
agent_retention for agent_retention in agents_retention.values()
|
||||
)
|
||||
has_agents_retention_copies = any(
|
||||
agent_retention and agent_retention.copies is not None
|
||||
for agent_retention in agents_retention.values()
|
||||
)
|
||||
# we need to check here since we await before
|
||||
# this filter is applied
|
||||
if manager.config.data.retention.copies is None:
|
||||
if (
|
||||
global_copies := manager.config.data.retention.copies
|
||||
) is None and not has_agents_retention_copies:
|
||||
# No global retention copies and no agent retention copies
|
||||
return {}
|
||||
return dict(
|
||||
sorted(
|
||||
backups.items(),
|
||||
key=lambda backup_item: backup_item[1].date,
|
||||
)[: max(len(backups) - manager.config.data.retention.copies, 0)]
|
||||
if global_copies is not None and not has_agents_retention:
|
||||
# Return early to avoid the longer filtering below.
|
||||
return dict(
|
||||
sorted(
|
||||
backups.items(),
|
||||
key=lambda backup_item: backup_item[1].date,
|
||||
)[: max(len(backups) - global_copies, 0)]
|
||||
)
|
||||
|
||||
backups_by_agent: dict[str, dict[str, ManagerBackup]] = defaultdict(dict)
|
||||
for backup_id, backup in backups.items():
|
||||
for agent_id in backup.agents:
|
||||
backups_by_agent[agent_id][backup_id] = backup
|
||||
|
||||
backups_to_delete_by_agent: dict[str, dict[str, ManagerBackup]] = defaultdict(
|
||||
dict
|
||||
)
|
||||
for agent_id, agent_backups in backups_by_agent.items():
|
||||
agent_retention = agents_retention.get(agent_id)
|
||||
if agent_retention is None:
|
||||
# This agent does not have a retention setting,
|
||||
# so the global retention setting should be used.
|
||||
if global_copies is None:
|
||||
# This agent does not have a retention setting
|
||||
# and the global retention copies setting is None,
|
||||
# so backups should not be deleted.
|
||||
continue
|
||||
# The global retention setting will be used.
|
||||
copies = global_copies
|
||||
elif (agent_copies := agent_retention.copies) is None:
|
||||
# This agent has a retention setting
|
||||
# where copies is set to None,
|
||||
# so backups should not be deleted.
|
||||
continue
|
||||
else:
|
||||
# This agent retention setting will be used.
|
||||
copies = agent_copies
|
||||
|
||||
backups_to_delete_by_agent[agent_id] = dict(
|
||||
sorted(
|
||||
agent_backups.items(),
|
||||
key=lambda backup_item: backup_item[1].date,
|
||||
)[: max(len(agent_backups) - copies, 0)]
|
||||
)
|
||||
|
||||
backup_ids_to_delete: dict[str, set[str]] = defaultdict(set)
|
||||
for agent_id, to_delete in backups_to_delete_by_agent.items():
|
||||
for backup_id in to_delete:
|
||||
backup_ids_to_delete[backup_id].add(agent_id)
|
||||
backups_to_delete: dict[str, ManagerBackup] = {}
|
||||
for backup_id, agent_ids in backup_ids_to_delete.items():
|
||||
backup = backups[backup_id]
|
||||
# filter the backup to only include the agents that should be deleted
|
||||
filtered_backup = replace(
|
||||
backup,
|
||||
agents={
|
||||
agent_id: agent_backup_status
|
||||
for agent_id, agent_backup_status in backup.agents.items()
|
||||
if agent_id in agent_ids
|
||||
},
|
||||
)
|
||||
backups_to_delete[backup_id] = filtered_backup
|
||||
return backups_to_delete
|
||||
|
||||
await manager.async_delete_filtered_backups(
|
||||
include_filter=_automatic_backups_filter, delete_filter=_delete_filter
|
||||
|
||||
@@ -16,8 +16,8 @@ DATA_MANAGER: HassKey[BackupManager] = HassKey(DOMAIN)
|
||||
LOGGER = getLogger(__package__)
|
||||
|
||||
EXCLUDE_FROM_BACKUP = [
|
||||
"__pycache__/*",
|
||||
".DS_Store",
|
||||
"**/__pycache__/*",
|
||||
"**/.DS_Store",
|
||||
".HA_RESTORE",
|
||||
"*.db-shm",
|
||||
"*.log.*",
|
||||
|
||||
@@ -0,0 +1,27 @@
|
||||
"""Diagnostics support for Home Assistant Backup integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.const import CONF_PASSWORD
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import BackupConfigEntry
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: BackupConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
coordinator = entry.runtime_data
|
||||
return {
|
||||
"backup_agents": [
|
||||
{"name": agent.name, "agent_id": agent.agent_id}
|
||||
for agent in coordinator.backup_manager.backup_agents.values()
|
||||
],
|
||||
"backup_config": async_redact_data(
|
||||
coordinator.backup_manager.config.data.to_dict(), [CONF_PASSWORD]
|
||||
),
|
||||
}
|
||||
@@ -1726,7 +1726,9 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
"""Filter to filter excludes."""
|
||||
|
||||
for exclude in excludes:
|
||||
if not path.match(exclude):
|
||||
# The home assistant core configuration directory is added as "data"
|
||||
# in the tar file, so we need to prefix that path to the filters.
|
||||
if not path.full_match(f"data/{exclude}"):
|
||||
continue
|
||||
LOGGER.debug("Ignoring %s because of %s", path, exclude)
|
||||
return True
|
||||
|
||||
@@ -0,0 +1,136 @@
|
||||
"""Backup onboarding views."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Coroutine
|
||||
from functools import wraps
|
||||
from http import HTTPStatus
|
||||
from typing import TYPE_CHECKING, Any, Concatenate
|
||||
|
||||
from aiohttp import web
|
||||
from aiohttp.web_exceptions import HTTPUnauthorized
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.http import KEY_HASS
|
||||
from homeassistant.components.http.data_validator import RequestDataValidator
|
||||
from homeassistant.components.onboarding import (
|
||||
BaseOnboardingView,
|
||||
NoAuthBaseOnboardingView,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.backup import async_get_manager as async_get_backup_manager
|
||||
|
||||
from . import BackupManager, Folder, IncorrectPasswordError, http as backup_http
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.components.onboarding import OnboardingStoreData
|
||||
|
||||
|
||||
async def async_setup_views(hass: HomeAssistant, data: OnboardingStoreData) -> None:
|
||||
"""Set up the backup views."""
|
||||
|
||||
hass.http.register_view(BackupInfoView(data))
|
||||
hass.http.register_view(RestoreBackupView(data))
|
||||
hass.http.register_view(UploadBackupView(data))
|
||||
|
||||
|
||||
def with_backup_manager[_ViewT: BaseOnboardingView, **_P](
|
||||
func: Callable[
|
||||
Concatenate[_ViewT, BackupManager, web.Request, _P],
|
||||
Coroutine[Any, Any, web.Response],
|
||||
],
|
||||
) -> Callable[Concatenate[_ViewT, web.Request, _P], Coroutine[Any, Any, web.Response]]:
|
||||
"""Home Assistant API decorator to check onboarding and inject manager."""
|
||||
|
||||
@wraps(func)
|
||||
async def with_backup(
|
||||
self: _ViewT,
|
||||
request: web.Request,
|
||||
*args: _P.args,
|
||||
**kwargs: _P.kwargs,
|
||||
) -> web.Response:
|
||||
"""Check admin and call function."""
|
||||
if self._data["done"]:
|
||||
raise HTTPUnauthorized
|
||||
|
||||
manager = await async_get_backup_manager(request.app[KEY_HASS])
|
||||
return await func(self, manager, request, *args, **kwargs)
|
||||
|
||||
return with_backup
|
||||
|
||||
|
||||
class BackupInfoView(NoAuthBaseOnboardingView):
|
||||
"""Get backup info view."""
|
||||
|
||||
url = "/api/onboarding/backup/info"
|
||||
name = "api:onboarding:backup:info"
|
||||
|
||||
@with_backup_manager
|
||||
async def get(self, manager: BackupManager, request: web.Request) -> web.Response:
|
||||
"""Return backup info."""
|
||||
backups, _ = await manager.async_get_backups()
|
||||
return self.json(
|
||||
{
|
||||
"backups": list(backups.values()),
|
||||
"state": manager.state,
|
||||
"last_action_event": manager.last_action_event,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class RestoreBackupView(NoAuthBaseOnboardingView):
|
||||
"""Restore backup view."""
|
||||
|
||||
url = "/api/onboarding/backup/restore"
|
||||
name = "api:onboarding:backup:restore"
|
||||
|
||||
@RequestDataValidator(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required("backup_id"): str,
|
||||
vol.Required("agent_id"): str,
|
||||
vol.Optional("password"): str,
|
||||
vol.Optional("restore_addons"): [str],
|
||||
vol.Optional("restore_database", default=True): bool,
|
||||
vol.Optional("restore_folders"): [vol.Coerce(Folder)],
|
||||
}
|
||||
)
|
||||
)
|
||||
@with_backup_manager
|
||||
async def post(
|
||||
self, manager: BackupManager, request: web.Request, data: dict[str, Any]
|
||||
) -> web.Response:
|
||||
"""Restore a backup."""
|
||||
try:
|
||||
await manager.async_restore_backup(
|
||||
data["backup_id"],
|
||||
agent_id=data["agent_id"],
|
||||
password=data.get("password"),
|
||||
restore_addons=data.get("restore_addons"),
|
||||
restore_database=data["restore_database"],
|
||||
restore_folders=data.get("restore_folders"),
|
||||
restore_homeassistant=True,
|
||||
)
|
||||
except IncorrectPasswordError:
|
||||
return self.json(
|
||||
{"code": "incorrect_password"}, status_code=HTTPStatus.BAD_REQUEST
|
||||
)
|
||||
except HomeAssistantError as err:
|
||||
return self.json(
|
||||
{"code": "restore_failed", "message": str(err)},
|
||||
status_code=HTTPStatus.BAD_REQUEST,
|
||||
)
|
||||
return web.Response(status=HTTPStatus.OK)
|
||||
|
||||
|
||||
class UploadBackupView(NoAuthBaseOnboardingView, backup_http.UploadBackupView):
|
||||
"""Upload backup view."""
|
||||
|
||||
url = "/api/onboarding/backup/upload"
|
||||
name = "api:onboarding:backup:upload"
|
||||
|
||||
@with_backup_manager
|
||||
async def post(self, manager: BackupManager, request: web.Request) -> web.Response:
|
||||
"""Upload a backup file."""
|
||||
return await self._post(request)
|
||||
@@ -16,7 +16,7 @@ if TYPE_CHECKING:
|
||||
STORE_DELAY_SAVE = 30
|
||||
STORAGE_KEY = DOMAIN
|
||||
STORAGE_VERSION = 1
|
||||
STORAGE_VERSION_MINOR = 5
|
||||
STORAGE_VERSION_MINOR = 6
|
||||
|
||||
|
||||
class StoredBackupData(TypedDict):
|
||||
@@ -72,6 +72,10 @@ class _BackupStore(Store[StoredBackupData]):
|
||||
data["config"]["automatic_backups_configured"] = (
|
||||
data["config"]["create_backup"]["password"] is not None
|
||||
)
|
||||
if old_minor_version < 6:
|
||||
# Version 1.6 adds agent retention settings
|
||||
for agent in data["config"]["agents"]:
|
||||
data["config"]["agents"][agent]["retention"] = None
|
||||
|
||||
# Note: We allow reading data with major version 2.
|
||||
# Reject if major version is higher than 2.
|
||||
|
||||
@@ -26,9 +26,9 @@
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"backup_manager_state": {
|
||||
"name": "Backup Manager State",
|
||||
"name": "Backup Manager state",
|
||||
"state": {
|
||||
"idle": "Idle",
|
||||
"idle": "[%key:common::state::idle%]",
|
||||
"create_backup": "Creating a backup",
|
||||
"receive_backup": "Receiving a backup",
|
||||
"restore_backup": "Restoring a backup"
|
||||
|
||||
@@ -346,7 +346,28 @@ async def handle_config_info(
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "backup/config/update",
|
||||
vol.Optional("agents"): vol.Schema({str: {"protected": bool}}),
|
||||
vol.Optional("agents"): vol.Schema(
|
||||
{
|
||||
str: {
|
||||
vol.Optional("protected"): bool,
|
||||
vol.Optional("retention"): vol.Any(
|
||||
vol.Schema(
|
||||
{
|
||||
# Note: We can't use cv.positive_int because it allows 0 even
|
||||
# though 0 is not positive.
|
||||
vol.Optional("copies"): vol.Any(
|
||||
vol.All(int, vol.Range(min=1)), None
|
||||
),
|
||||
vol.Optional("days"): vol.Any(
|
||||
vol.All(int, vol.Range(min=1)), None
|
||||
),
|
||||
},
|
||||
),
|
||||
None,
|
||||
),
|
||||
}
|
||||
}
|
||||
),
|
||||
vol.Optional("automatic_backups_configured"): bool,
|
||||
vol.Optional("create_backup"): vol.Schema(
|
||||
{
|
||||
|
||||
@@ -31,7 +31,7 @@
|
||||
"state_attributes": {
|
||||
"preset_mode": {
|
||||
"state": {
|
||||
"auto": "[%key:component::climate::entity_component::_::state_attributes::fan_mode::state::auto%]"
|
||||
"auto": "[%key:common::state::auto%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
"""Balay virtual integration."""
|
||||
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"domain": "balay",
|
||||
"name": "Balay",
|
||||
"integration_type": "virtual",
|
||||
"supported_by": "home_connect"
|
||||
}
|
||||
@@ -103,8 +103,8 @@
|
||||
"temperature_range": {
|
||||
"name": "Temperature range",
|
||||
"state": {
|
||||
"low": "Low",
|
||||
"high": "High"
|
||||
"low": "[%key:common::state::low%]",
|
||||
"high": "[%key:common::state::high%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -124,15 +124,15 @@
|
||||
"battery": {
|
||||
"name": "Battery",
|
||||
"state": {
|
||||
"off": "Normal",
|
||||
"on": "Low"
|
||||
"off": "[%key:common::state::normal%]",
|
||||
"on": "[%key:common::state::low%]"
|
||||
}
|
||||
},
|
||||
"battery_charging": {
|
||||
"name": "Charging",
|
||||
"state": {
|
||||
"off": "Not charging",
|
||||
"on": "Charging"
|
||||
"on": "[%key:common::state::charging%]"
|
||||
}
|
||||
},
|
||||
"carbon_monoxide": {
|
||||
@@ -145,7 +145,7 @@
|
||||
"cold": {
|
||||
"name": "Cold",
|
||||
"state": {
|
||||
"off": "[%key:component::binary_sensor::entity_component::battery::state::off%]",
|
||||
"off": "[%key:common::state::normal%]",
|
||||
"on": "Cold"
|
||||
}
|
||||
},
|
||||
@@ -180,7 +180,7 @@
|
||||
"heat": {
|
||||
"name": "Heat",
|
||||
"state": {
|
||||
"off": "[%key:component::binary_sensor::entity_component::battery::state::off%]",
|
||||
"off": "[%key:common::state::normal%]",
|
||||
"on": "Hot"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -30,18 +30,18 @@
|
||||
"available": "Available",
|
||||
"charging": "[%key:common::state::charging%]",
|
||||
"unavailable": "Unavailable",
|
||||
"error": "Error",
|
||||
"error": "[%key:common::state::error%]",
|
||||
"offline": "Offline"
|
||||
}
|
||||
},
|
||||
"vehicle_status": {
|
||||
"name": "Vehicle status",
|
||||
"state": {
|
||||
"standby": "Standby",
|
||||
"standby": "[%key:common::state::standby%]",
|
||||
"vehicle_detected": "Detected",
|
||||
"ready": "Ready",
|
||||
"no_power": "No power",
|
||||
"vehicle_error": "Error"
|
||||
"vehicle_error": "[%key:common::state::error%]"
|
||||
}
|
||||
},
|
||||
"actual_v1": {
|
||||
|
||||
@@ -12,5 +12,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/bluemaestro",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["bluemaestro-ble==0.2.3"]
|
||||
"requirements": ["bluemaestro-ble==0.4.0"]
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/bluesound",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["pyblu==2.0.0"],
|
||||
"requirements": ["pyblu==2.0.1"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_musc._tcp.local."
|
||||
|
||||
@@ -330,7 +330,12 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
|
||||
|
||||
if self._status.input_id is not None:
|
||||
for input_ in self._inputs:
|
||||
if input_.id == self._status.input_id:
|
||||
# the input might not have an id => also try to match on the stream_url/url
|
||||
# we have to use both because neither matches all the time
|
||||
if (
|
||||
input_.id == self._status.input_id
|
||||
or input_.url == self._status.stream_url
|
||||
):
|
||||
return input_.text
|
||||
|
||||
for preset in self._presets:
|
||||
@@ -501,18 +506,16 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
|
||||
return
|
||||
|
||||
# presets and inputs might have the same name; presets have priority
|
||||
url: str | None = None
|
||||
for input_ in self._inputs:
|
||||
if input_.text == source:
|
||||
url = input_.url
|
||||
await self._player.play_url(input_.url)
|
||||
return
|
||||
for preset in self._presets:
|
||||
if preset.name == source:
|
||||
url = preset.url
|
||||
await self._player.load_preset(preset.id)
|
||||
return
|
||||
|
||||
if url is None:
|
||||
raise ServiceValidationError(f"Source {source} not found")
|
||||
|
||||
await self._player.play_url(url)
|
||||
raise ServiceValidationError(f"Source {source} not found")
|
||||
|
||||
async def async_clear_playlist(self) -> None:
|
||||
"""Clear players playlist."""
|
||||
|
||||
@@ -19,8 +19,8 @@
|
||||
"bleak-retry-connector==3.9.0",
|
||||
"bluetooth-adapters==0.21.4",
|
||||
"bluetooth-auto-recovery==1.4.5",
|
||||
"bluetooth-data-tools==1.26.1",
|
||||
"bluetooth-data-tools==1.28.1",
|
||||
"dbus-fast==2.43.0",
|
||||
"habluetooth==3.37.0"
|
||||
"habluetooth==3.45.0"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -374,6 +374,27 @@ class PassiveBluetoothProcessorCoordinator[_DataT](BasePassiveBluetoothCoordinat
|
||||
self.logger.exception("Unexpected error updating %s data", self.name)
|
||||
return
|
||||
|
||||
self._process_update(update, was_available)
|
||||
|
||||
@callback
|
||||
def async_set_updated_data(self, update: _DataT) -> None:
|
||||
"""Manually update the processor with new data.
|
||||
|
||||
If the data comes in via a different method, like a
|
||||
notification, this method can be used to update the
|
||||
processor with the new data.
|
||||
|
||||
This is useful for devices that retrieve
|
||||
some of their data via notifications.
|
||||
"""
|
||||
was_available = self._available
|
||||
self._available = True
|
||||
self._process_update(update, was_available)
|
||||
|
||||
def _process_update(
|
||||
self, update: _DataT, was_available: bool | None = None
|
||||
) -> None:
|
||||
"""Process the update from the bluetooth device."""
|
||||
if not self.last_update_success:
|
||||
self.last_update_success = True
|
||||
self.logger.info("Coordinator %s recovered", self.name)
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from bluetooth_adapters import (
|
||||
from habluetooth import (
|
||||
DiscoveredDeviceAdvertisementData,
|
||||
DiscoveredDeviceAdvertisementDataDict,
|
||||
DiscoveryStorageType,
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"data": {
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"region": "ConnectedDrive Region"
|
||||
"region": "ConnectedDrive region"
|
||||
},
|
||||
"data_description": {
|
||||
"username": "The email address of your MyBMW/MINI Connected account.",
|
||||
@@ -113,10 +113,10 @@
|
||||
},
|
||||
"select": {
|
||||
"ac_limit": {
|
||||
"name": "AC Charging Limit"
|
||||
"name": "AC charging limit"
|
||||
},
|
||||
"charging_mode": {
|
||||
"name": "Charging Mode",
|
||||
"name": "Charging mode",
|
||||
"state": {
|
||||
"immediate_charging": "Immediate charging",
|
||||
"delayed_charging": "Delayed charging",
|
||||
@@ -139,7 +139,7 @@
|
||||
"state": {
|
||||
"default": "Default",
|
||||
"charging": "[%key:common::state::charging%]",
|
||||
"error": "Error",
|
||||
"error": "[%key:common::state::error%]",
|
||||
"complete": "Complete",
|
||||
"fully_charged": "Fully charged",
|
||||
"finished_fully_charged": "Finished, fully charged",
|
||||
@@ -181,7 +181,7 @@
|
||||
"cooling": "Cooling",
|
||||
"heating": "Heating",
|
||||
"inactive": "Inactive",
|
||||
"standby": "Standby",
|
||||
"standby": "[%key:common::state::standby%]",
|
||||
"ventilation": "Ventilation"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -16,6 +16,7 @@ from homeassistant.const import CONF_ACCESS_TOKEN, CONF_HOST, CONF_NAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
|
||||
from .const import DOMAIN
|
||||
@@ -91,11 +92,22 @@ class BondConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
self._discovered[CONF_ACCESS_TOKEN] = token
|
||||
try:
|
||||
_, hub_name = await _validate_input(self.hass, self._discovered)
|
||||
bond_id, hub_name = await _validate_input(self.hass, self._discovered)
|
||||
except InputValidationError:
|
||||
return
|
||||
await self.async_set_unique_id(bond_id)
|
||||
self._abort_if_unique_id_configured(updates={CONF_HOST: host})
|
||||
self._discovered[CONF_NAME] = hub_name
|
||||
|
||||
async def async_step_dhcp(
|
||||
self, discovery_info: DhcpServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by dhcp discovery."""
|
||||
host = discovery_info.ip
|
||||
bond_id = discovery_info.hostname.partition("-")[2].upper()
|
||||
await self.async_set_unique_id(bond_id)
|
||||
return await self.async_step_any_discovery(bond_id, host)
|
||||
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: ZeroconfServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
@@ -104,11 +116,17 @@ class BondConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
host: str = discovery_info.host
|
||||
bond_id = name.partition(".")[0]
|
||||
await self.async_set_unique_id(bond_id)
|
||||
return await self.async_step_any_discovery(bond_id, host)
|
||||
|
||||
async def async_step_any_discovery(
|
||||
self, bond_id: str, host: str
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by discovery."""
|
||||
for entry in self._async_current_entries():
|
||||
if entry.unique_id != bond_id:
|
||||
continue
|
||||
updates = {CONF_HOST: host}
|
||||
if entry.state == ConfigEntryState.SETUP_ERROR and (
|
||||
if entry.state is ConfigEntryState.SETUP_ERROR and (
|
||||
token := await async_get_token(self.hass, host)
|
||||
):
|
||||
updates[CONF_ACCESS_TOKEN] = token
|
||||
@@ -153,10 +171,14 @@ class BondConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
CONF_HOST: self._discovered[CONF_HOST],
|
||||
}
|
||||
try:
|
||||
_, hub_name = await _validate_input(self.hass, data)
|
||||
bond_id, hub_name = await _validate_input(self.hass, data)
|
||||
except InputValidationError as error:
|
||||
errors["base"] = error.base
|
||||
else:
|
||||
await self.async_set_unique_id(bond_id)
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={CONF_HOST: self._discovered[CONF_HOST]}
|
||||
)
|
||||
return self.async_create_entry(
|
||||
title=hub_name,
|
||||
data=data,
|
||||
@@ -185,8 +207,10 @@ class BondConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
except InputValidationError as error:
|
||||
errors["base"] = error.base
|
||||
else:
|
||||
await self.async_set_unique_id(bond_id)
|
||||
self._abort_if_unique_id_configured()
|
||||
await self.async_set_unique_id(bond_id, raise_on_progress=False)
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={CONF_HOST: user_input[CONF_HOST]}
|
||||
)
|
||||
return self.async_create_entry(title=hub_name, data=user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
|
||||
@@ -3,6 +3,16 @@
|
||||
"name": "Bond",
|
||||
"codeowners": ["@bdraco", "@prystupa", "@joshs85", "@marciogranzotto"],
|
||||
"config_flow": true,
|
||||
"dhcp": [
|
||||
{
|
||||
"hostname": "bond-*",
|
||||
"macaddress": "3C6A2C1*"
|
||||
},
|
||||
{
|
||||
"hostname": "bond-*",
|
||||
"macaddress": "F44E38*"
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/bond",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["bond_async"],
|
||||
|
||||
@@ -0,0 +1,67 @@
|
||||
"""The Bosch Alarm integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from ssl import SSLError
|
||||
|
||||
from bosch_alarm_mode2 import Panel
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
|
||||
from .const import CONF_INSTALLER_CODE, CONF_USER_CODE, DOMAIN
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.ALARM_CONTROL_PANEL, Platform.SENSOR]
|
||||
|
||||
type BoschAlarmConfigEntry = ConfigEntry[Panel]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: BoschAlarmConfigEntry) -> bool:
|
||||
"""Set up Bosch Alarm from a config entry."""
|
||||
|
||||
panel = Panel(
|
||||
host=entry.data[CONF_HOST],
|
||||
port=entry.data[CONF_PORT],
|
||||
automation_code=entry.data.get(CONF_PASSWORD),
|
||||
installer_or_user_code=entry.data.get(
|
||||
CONF_INSTALLER_CODE, entry.data.get(CONF_USER_CODE)
|
||||
),
|
||||
)
|
||||
try:
|
||||
await panel.connect()
|
||||
except (PermissionError, ValueError) as err:
|
||||
await panel.disconnect()
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN, translation_key="authentication_failed"
|
||||
) from err
|
||||
except (TimeoutError, OSError, ConnectionRefusedError, SSLError) as err:
|
||||
await panel.disconnect()
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect",
|
||||
) from err
|
||||
|
||||
entry.runtime_data = panel
|
||||
|
||||
device_registry = dr.async_get(hass)
|
||||
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
identifiers={(DOMAIN, entry.unique_id or entry.entry_id)},
|
||||
name=f"Bosch {panel.model}",
|
||||
manufacturer="Bosch Security Systems",
|
||||
model=panel.model,
|
||||
sw_version=panel.firmware_version,
|
||||
)
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: BoschAlarmConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
await entry.runtime_data.disconnect()
|
||||
return unload_ok
|
||||
@@ -0,0 +1,80 @@
|
||||
"""Support for Bosch Alarm Panel."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from bosch_alarm_mode2 import Panel
|
||||
|
||||
from homeassistant.components.alarm_control_panel import (
|
||||
AlarmControlPanelEntity,
|
||||
AlarmControlPanelEntityFeature,
|
||||
AlarmControlPanelState,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import BoschAlarmConfigEntry
|
||||
from .entity import BoschAlarmAreaEntity
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: BoschAlarmConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up control panels for each area."""
|
||||
panel = config_entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
AreaAlarmControlPanel(
|
||||
panel,
|
||||
area_id,
|
||||
config_entry.unique_id or config_entry.entry_id,
|
||||
)
|
||||
for area_id in panel.areas
|
||||
)
|
||||
|
||||
|
||||
class AreaAlarmControlPanel(BoschAlarmAreaEntity, AlarmControlPanelEntity):
|
||||
"""An alarm control panel entity for a bosch alarm panel."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_supported_features = (
|
||||
AlarmControlPanelEntityFeature.ARM_HOME
|
||||
| AlarmControlPanelEntityFeature.ARM_AWAY
|
||||
)
|
||||
_attr_code_arm_required = False
|
||||
_attr_name = None
|
||||
|
||||
def __init__(self, panel: Panel, area_id: int, unique_id: str) -> None:
|
||||
"""Initialise a Bosch Alarm control panel entity."""
|
||||
super().__init__(panel, area_id, unique_id, False, False, True)
|
||||
self._attr_unique_id = self._area_unique_id
|
||||
|
||||
@property
|
||||
def alarm_state(self) -> AlarmControlPanelState | None:
|
||||
"""Return the state of the alarm."""
|
||||
if self._area.is_triggered():
|
||||
return AlarmControlPanelState.TRIGGERED
|
||||
if self._area.is_disarmed():
|
||||
return AlarmControlPanelState.DISARMED
|
||||
if self._area.is_arming():
|
||||
return AlarmControlPanelState.ARMING
|
||||
if self._area.is_pending():
|
||||
return AlarmControlPanelState.PENDING
|
||||
if self._area.is_part_armed():
|
||||
return AlarmControlPanelState.ARMED_HOME
|
||||
if self._area.is_all_armed():
|
||||
return AlarmControlPanelState.ARMED_AWAY
|
||||
return None
|
||||
|
||||
async def async_alarm_disarm(self, code: str | None = None) -> None:
|
||||
"""Disarm this panel."""
|
||||
await self.panel.area_disarm(self._area_id)
|
||||
|
||||
async def async_alarm_arm_home(self, code: str | None = None) -> None:
|
||||
"""Send arm home command."""
|
||||
await self.panel.area_arm_part(self._area_id)
|
||||
|
||||
async def async_alarm_arm_away(self, code: str | None = None) -> None:
|
||||
"""Send arm away command."""
|
||||
await self.panel.area_arm_all(self._area_id)
|
||||
@@ -0,0 +1,248 @@
|
||||
"""Config flow for Bosch Alarm integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
import ssl
|
||||
from typing import Any
|
||||
|
||||
from bosch_alarm_mode2 import Panel
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_RECONFIGURE,
|
||||
SOURCE_USER,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
CONF_CODE,
|
||||
CONF_HOST,
|
||||
CONF_MODEL,
|
||||
CONF_PASSWORD,
|
||||
CONF_PORT,
|
||||
)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
from .const import CONF_INSTALLER_CODE, CONF_USER_CODE, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Required(CONF_PORT, default=7700): cv.positive_int,
|
||||
}
|
||||
)
|
||||
|
||||
STEP_AUTH_DATA_SCHEMA_SOLUTION = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_USER_CODE): str,
|
||||
}
|
||||
)
|
||||
|
||||
STEP_AUTH_DATA_SCHEMA_AMAX = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_INSTALLER_CODE): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
}
|
||||
)
|
||||
|
||||
STEP_AUTH_DATA_SCHEMA_BG = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
}
|
||||
)
|
||||
|
||||
STEP_INIT_DATA_SCHEMA = vol.Schema({vol.Optional(CONF_CODE): str})
|
||||
|
||||
|
||||
async def try_connect(
|
||||
data: dict[str, Any], load_selector: int = 0
|
||||
) -> tuple[str, int | None]:
|
||||
"""Validate the user input allows us to connect.
|
||||
|
||||
Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user.
|
||||
"""
|
||||
panel = Panel(
|
||||
host=data[CONF_HOST],
|
||||
port=data[CONF_PORT],
|
||||
automation_code=data.get(CONF_PASSWORD),
|
||||
installer_or_user_code=data.get(CONF_INSTALLER_CODE, data.get(CONF_USER_CODE)),
|
||||
)
|
||||
|
||||
try:
|
||||
await panel.connect(load_selector)
|
||||
finally:
|
||||
await panel.disconnect()
|
||||
|
||||
return (panel.model, panel.serial_number)
|
||||
|
||||
|
||||
class BoschAlarmConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Bosch Alarm."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Init config flow."""
|
||||
|
||||
self._data: dict[str, Any] = {}
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
try:
|
||||
# Use load_selector = 0 to fetch the panel model without authentication.
|
||||
(model, serial) = await try_connect(user_input, 0)
|
||||
except (
|
||||
OSError,
|
||||
ConnectionRefusedError,
|
||||
ssl.SSLError,
|
||||
asyncio.exceptions.TimeoutError,
|
||||
) as e:
|
||||
_LOGGER.error("Connection Error: %s", e)
|
||||
errors["base"] = "cannot_connect"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
self._data = user_input
|
||||
self._data[CONF_MODEL] = model
|
||||
|
||||
if self.source == SOURCE_RECONFIGURE:
|
||||
if (
|
||||
self._get_reconfigure_entry().data[CONF_MODEL]
|
||||
!= self._data[CONF_MODEL]
|
||||
):
|
||||
return self.async_abort(reason="device_mismatch")
|
||||
return await self.async_step_auth()
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
STEP_USER_DATA_SCHEMA, user_input
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the reconfigure step."""
|
||||
return await self.async_step_user()
|
||||
|
||||
async def async_step_auth(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the auth step."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
# Each model variant requires a different authentication flow
|
||||
if "Solution" in self._data[CONF_MODEL]:
|
||||
schema = STEP_AUTH_DATA_SCHEMA_SOLUTION
|
||||
elif "AMAX" in self._data[CONF_MODEL]:
|
||||
schema = STEP_AUTH_DATA_SCHEMA_AMAX
|
||||
else:
|
||||
schema = STEP_AUTH_DATA_SCHEMA_BG
|
||||
|
||||
if user_input is not None:
|
||||
self._data.update(user_input)
|
||||
try:
|
||||
(model, serial_number) = await try_connect(
|
||||
self._data, Panel.LOAD_EXTENDED_INFO
|
||||
)
|
||||
except (PermissionError, ValueError) as e:
|
||||
errors["base"] = "invalid_auth"
|
||||
_LOGGER.error("Authentication Error: %s", e)
|
||||
except (
|
||||
OSError,
|
||||
ConnectionRefusedError,
|
||||
ssl.SSLError,
|
||||
TimeoutError,
|
||||
) as e:
|
||||
_LOGGER.error("Connection Error: %s", e)
|
||||
errors["base"] = "cannot_connect"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
if serial_number:
|
||||
await self.async_set_unique_id(str(serial_number))
|
||||
if self.source == SOURCE_USER:
|
||||
if serial_number:
|
||||
self._abort_if_unique_id_configured()
|
||||
else:
|
||||
self._async_abort_entries_match(
|
||||
{CONF_HOST: self._data[CONF_HOST]}
|
||||
)
|
||||
return self.async_create_entry(
|
||||
title=f"Bosch {model}", data=self._data
|
||||
)
|
||||
if serial_number:
|
||||
self._abort_if_unique_id_mismatch(reason="device_mismatch")
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reconfigure_entry(),
|
||||
data=self._data,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="auth",
|
||||
data_schema=self.add_suggested_values_to_schema(schema, user_input),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Perform reauth upon an authentication error."""
|
||||
self._data = dict(entry_data)
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the reauth step."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
# Each model variant requires a different authentication flow
|
||||
if "Solution" in self._data[CONF_MODEL]:
|
||||
schema = STEP_AUTH_DATA_SCHEMA_SOLUTION
|
||||
elif "AMAX" in self._data[CONF_MODEL]:
|
||||
schema = STEP_AUTH_DATA_SCHEMA_AMAX
|
||||
else:
|
||||
schema = STEP_AUTH_DATA_SCHEMA_BG
|
||||
|
||||
if user_input is not None:
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
self._data.update(user_input)
|
||||
try:
|
||||
(_, _) = await try_connect(self._data, Panel.LOAD_EXTENDED_INFO)
|
||||
except (PermissionError, ValueError) as e:
|
||||
errors["base"] = "invalid_auth"
|
||||
_LOGGER.error("Authentication Error: %s", e)
|
||||
except (
|
||||
OSError,
|
||||
ConnectionRefusedError,
|
||||
ssl.SSLError,
|
||||
TimeoutError,
|
||||
) as e:
|
||||
_LOGGER.error("Connection Error: %s", e)
|
||||
errors["base"] = "cannot_connect"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
return self.async_update_reload_and_abort(
|
||||
reauth_entry,
|
||||
data_updates=user_input,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=self.add_suggested_values_to_schema(schema, user_input),
|
||||
errors=errors,
|
||||
)
|
||||
@@ -0,0 +1,6 @@
|
||||
"""Constants for the Bosch Alarm integration."""
|
||||
|
||||
DOMAIN = "bosch_alarm"
|
||||
HISTORY_ATTR = "history"
|
||||
CONF_INSTALLER_CODE = "installer_code"
|
||||
CONF_USER_CODE = "user_code"
|
||||
@@ -0,0 +1,73 @@
|
||||
"""Diagnostics for bosch alarm."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.const import CONF_PASSWORD
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import BoschAlarmConfigEntry
|
||||
from .const import CONF_INSTALLER_CODE, CONF_USER_CODE
|
||||
|
||||
TO_REDACT = [CONF_INSTALLER_CODE, CONF_USER_CODE, CONF_PASSWORD]
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: BoschAlarmConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
|
||||
return {
|
||||
"entry_data": async_redact_data(entry.data, TO_REDACT),
|
||||
"data": {
|
||||
"model": entry.runtime_data.model,
|
||||
"serial_number": entry.runtime_data.serial_number,
|
||||
"protocol_version": entry.runtime_data.protocol_version,
|
||||
"firmware_version": entry.runtime_data.firmware_version,
|
||||
"areas": [
|
||||
{
|
||||
"id": area_id,
|
||||
"name": area.name,
|
||||
"all_ready": area.all_ready,
|
||||
"part_ready": area.part_ready,
|
||||
"faults": area.faults,
|
||||
"alarms": area.alarms,
|
||||
"disarmed": area.is_disarmed(),
|
||||
"arming": area.is_arming(),
|
||||
"pending": area.is_pending(),
|
||||
"part_armed": area.is_part_armed(),
|
||||
"all_armed": area.is_all_armed(),
|
||||
"armed": area.is_armed(),
|
||||
"triggered": area.is_triggered(),
|
||||
}
|
||||
for area_id, area in entry.runtime_data.areas.items()
|
||||
],
|
||||
"points": [
|
||||
{
|
||||
"id": point_id,
|
||||
"name": point.name,
|
||||
"open": point.is_open(),
|
||||
"normal": point.is_normal(),
|
||||
}
|
||||
for point_id, point in entry.runtime_data.points.items()
|
||||
],
|
||||
"doors": [
|
||||
{
|
||||
"id": door_id,
|
||||
"name": door.name,
|
||||
"open": door.is_open(),
|
||||
"locked": door.is_locked(),
|
||||
}
|
||||
for door_id, door in entry.runtime_data.doors.items()
|
||||
],
|
||||
"outputs": [
|
||||
{
|
||||
"id": output_id,
|
||||
"name": output.name,
|
||||
"active": output.is_active(),
|
||||
}
|
||||
for output_id, output in entry.runtime_data.outputs.items()
|
||||
],
|
||||
"history_events": entry.runtime_data.events,
|
||||
},
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user