forked from home-assistant/core
Compare commits
1 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 20a1c046c9 |
@@ -1,6 +1,5 @@
|
||||
name: Report an issue with Home Assistant Core
|
||||
description: Report an issue with Home Assistant Core.
|
||||
type: Bug
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
|
||||
@@ -32,7 +32,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -116,7 +116,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -175,7 +175,7 @@ jobs:
|
||||
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@@ -324,7 +324,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@v3.8.2
|
||||
uses: sigstore/cosign-installer@v3.8.1
|
||||
with:
|
||||
cosign-release: "v2.2.3"
|
||||
|
||||
@@ -457,12 +457,12 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@@ -509,7 +509,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build Docker image
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
@@ -522,7 +522,7 @@ jobs:
|
||||
- name: Push Docker image
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
id: push
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
@@ -531,7 +531,7 @@ jobs:
|
||||
|
||||
- name: Generate artifact attestation
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
uses: actions/attest-build-provenance@db473fddc028af60658334401dc6fa3ffd8669fd # v2.3.0
|
||||
uses: actions/attest-build-provenance@c074443f1aee8d4aeeae555aebba3282517141b2 # v2.2.3
|
||||
with:
|
||||
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
|
||||
+23
-23
@@ -249,7 +249,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -294,7 +294,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
@@ -334,7 +334,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
@@ -374,7 +374,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
@@ -484,7 +484,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -587,7 +587,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -620,7 +620,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -653,7 +653,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Dependency review
|
||||
uses: actions/dependency-review-action@v4.6.0
|
||||
uses: actions/dependency-review-action@v4.5.0
|
||||
with:
|
||||
license-check: false # We use our own license audit checks
|
||||
|
||||
@@ -677,7 +677,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -720,7 +720,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -767,7 +767,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -812,7 +812,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -889,7 +889,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -949,7 +949,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -968,7 +968,7 @@ jobs:
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
|
||||
- name: Download pytest_buckets
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
name: pytest_buckets
|
||||
- name: Compile English translations
|
||||
@@ -1074,7 +1074,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -1208,7 +1208,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -1312,12 +1312,12 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
uses: codecov/codecov-action@v5.4.2
|
||||
uses: codecov/codecov-action@v5.4.0
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
@@ -1359,7 +1359,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -1454,12 +1454,12 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
uses: codecov/codecov-action@v5.4.2
|
||||
uses: codecov/codecov-action@v5.4.0
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -1479,7 +1479,7 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
pattern: test-results-*
|
||||
- name: Upload test results to Codecov
|
||||
|
||||
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.28.16
|
||||
uses: github/codeql-action/init@v3.28.13
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.28.16
|
||||
uses: github/codeql-action/analyze@v3.28.13
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
@@ -22,7 +22,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
|
||||
@@ -36,7 +36,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -138,17 +138,17 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
@@ -187,22 +187,22 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
- name: Download requirements_all_wheels
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
|
||||
|
||||
@@ -291,7 +291,6 @@ homeassistant.components.kaleidescape.*
|
||||
homeassistant.components.knocki.*
|
||||
homeassistant.components.knx.*
|
||||
homeassistant.components.kraken.*
|
||||
homeassistant.components.kulersky.*
|
||||
homeassistant.components.lacrosse.*
|
||||
homeassistant.components.lacrosse_view.*
|
||||
homeassistant.components.lamarzocco.*
|
||||
@@ -363,10 +362,8 @@ homeassistant.components.no_ip.*
|
||||
homeassistant.components.nordpool.*
|
||||
homeassistant.components.notify.*
|
||||
homeassistant.components.notion.*
|
||||
homeassistant.components.ntfy.*
|
||||
homeassistant.components.number.*
|
||||
homeassistant.components.nut.*
|
||||
homeassistant.components.ohme.*
|
||||
homeassistant.components.onboarding.*
|
||||
homeassistant.components.oncue.*
|
||||
homeassistant.components.onedrive.*
|
||||
@@ -386,7 +383,6 @@ homeassistant.components.pandora.*
|
||||
homeassistant.components.panel_custom.*
|
||||
homeassistant.components.peblar.*
|
||||
homeassistant.components.peco.*
|
||||
homeassistant.components.pegel_online.*
|
||||
homeassistant.components.persistent_notification.*
|
||||
homeassistant.components.person.*
|
||||
homeassistant.components.pi_hole.*
|
||||
@@ -463,7 +459,6 @@ homeassistant.components.slack.*
|
||||
homeassistant.components.sleepiq.*
|
||||
homeassistant.components.smhi.*
|
||||
homeassistant.components.smlight.*
|
||||
homeassistant.components.smtp.*
|
||||
homeassistant.components.snooz.*
|
||||
homeassistant.components.solarlog.*
|
||||
homeassistant.components.sonarr.*
|
||||
|
||||
Generated
+7
-15
@@ -432,7 +432,7 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/entur_public_transport/ @hfurubotten
|
||||
/homeassistant/components/environment_canada/ @gwww @michaeldavie
|
||||
/tests/components/environment_canada/ @gwww @michaeldavie
|
||||
/homeassistant/components/ephember/ @ttroy50 @roberty99
|
||||
/homeassistant/components/ephember/ @ttroy50
|
||||
/homeassistant/components/epic_games_store/ @hacf-fr @Quentame
|
||||
/tests/components/epic_games_store/ @hacf-fr @Quentame
|
||||
/homeassistant/components/epion/ @lhgravendeel
|
||||
@@ -704,8 +704,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/image_upload/ @home-assistant/core
|
||||
/homeassistant/components/imap/ @jbouwh
|
||||
/tests/components/imap/ @jbouwh
|
||||
/homeassistant/components/imeon_inverter/ @Imeon-Energy
|
||||
/tests/components/imeon_inverter/ @Imeon-Energy
|
||||
/homeassistant/components/imgw_pib/ @bieniu
|
||||
/tests/components/imgw_pib/ @bieniu
|
||||
/homeassistant/components/improv_ble/ @emontnemery
|
||||
@@ -937,8 +935,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/metoffice/ @MrHarcombe @avee87
|
||||
/homeassistant/components/microbees/ @microBeesTech
|
||||
/tests/components/microbees/ @microBeesTech
|
||||
/homeassistant/components/miele/ @astrandb
|
||||
/tests/components/miele/ @astrandb
|
||||
/homeassistant/components/mikrotik/ @engrbm87
|
||||
/tests/components/mikrotik/ @engrbm87
|
||||
/homeassistant/components/mill/ @danielhiversen
|
||||
@@ -1051,8 +1047,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/nsw_fuel_station/ @nickw444
|
||||
/homeassistant/components/nsw_rural_fire_service_feed/ @exxamalte
|
||||
/tests/components/nsw_rural_fire_service_feed/ @exxamalte
|
||||
/homeassistant/components/ntfy/ @tr4nt0r
|
||||
/tests/components/ntfy/ @tr4nt0r
|
||||
/homeassistant/components/nuheat/ @tstabrawa
|
||||
/tests/components/nuheat/ @tstabrawa
|
||||
/homeassistant/components/nuki/ @pschmitt @pvizeli @pree
|
||||
@@ -1081,6 +1075,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/ombi/ @larssont
|
||||
/homeassistant/components/onboarding/ @home-assistant/core
|
||||
/tests/components/onboarding/ @home-assistant/core
|
||||
/homeassistant/components/oncue/ @bdraco @peterager
|
||||
/tests/components/oncue/ @bdraco @peterager
|
||||
/homeassistant/components/ondilo_ico/ @JeromeHXP
|
||||
/tests/components/ondilo_ico/ @JeromeHXP
|
||||
/homeassistant/components/onedrive/ @zweckj
|
||||
@@ -1258,8 +1254,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/recovery_mode/ @home-assistant/core
|
||||
/homeassistant/components/refoss/ @ashionky
|
||||
/tests/components/refoss/ @ashionky
|
||||
/homeassistant/components/rehlko/ @bdraco @peterager
|
||||
/tests/components/rehlko/ @bdraco @peterager
|
||||
/homeassistant/components/remote/ @home-assistant/core
|
||||
/tests/components/remote/ @home-assistant/core
|
||||
/homeassistant/components/remote_calendar/ @Thomas55555
|
||||
@@ -1318,8 +1312,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/ruuvitag_ble/ @akx
|
||||
/homeassistant/components/rympro/ @OnFreund @elad-bar @maorcc
|
||||
/tests/components/rympro/ @OnFreund @elad-bar @maorcc
|
||||
/homeassistant/components/s3/ @tomasbedrich
|
||||
/tests/components/s3/ @tomasbedrich
|
||||
/homeassistant/components/sabnzbd/ @shaiu @jpbede
|
||||
/tests/components/sabnzbd/ @shaiu @jpbede
|
||||
/homeassistant/components/saj/ @fredericvl
|
||||
@@ -1395,6 +1387,7 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/siren/ @home-assistant/core @raman325
|
||||
/tests/components/siren/ @home-assistant/core @raman325
|
||||
/homeassistant/components/sisyphus/ @jkeljo
|
||||
/homeassistant/components/sky_hub/ @rogerselwyn
|
||||
/homeassistant/components/sky_remote/ @dunnmj @saty9
|
||||
/tests/components/sky_remote/ @dunnmj @saty9
|
||||
/homeassistant/components/skybell/ @tkdrob
|
||||
@@ -1441,8 +1434,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/solarlog/ @Ernst79 @dontinelli
|
||||
/homeassistant/components/solax/ @squishykid @Darsstar
|
||||
/tests/components/solax/ @squishykid @Darsstar
|
||||
/homeassistant/components/soma/ @ratsept
|
||||
/tests/components/soma/ @ratsept
|
||||
/homeassistant/components/soma/ @ratsept @sebfortier2288
|
||||
/tests/components/soma/ @ratsept @sebfortier2288
|
||||
/homeassistant/components/sonarr/ @ctalkington
|
||||
/tests/components/sonarr/ @ctalkington
|
||||
/homeassistant/components/songpal/ @rytilahti @shenxn
|
||||
@@ -1474,8 +1467,7 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/steam_online/ @tkdrob
|
||||
/homeassistant/components/steamist/ @bdraco
|
||||
/tests/components/steamist/ @bdraco
|
||||
/homeassistant/components/stiebel_eltron/ @fucm @ThyMYthOS
|
||||
/tests/components/stiebel_eltron/ @fucm @ThyMYthOS
|
||||
/homeassistant/components/stiebel_eltron/ @fucm
|
||||
/homeassistant/components/stookwijzer/ @fwestenberg
|
||||
/tests/components/stookwijzer/ @fwestenberg
|
||||
/homeassistant/components/stream/ @hunterjm @uvjustin @allenporter
|
||||
|
||||
Generated
+1
-1
@@ -31,7 +31,7 @@ RUN \
|
||||
&& go2rtc --version
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv==0.7.1
|
||||
RUN pip3 install uv==0.6.10
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
|
||||
+17
-12
@@ -53,7 +53,6 @@ from .components import (
|
||||
logbook as logbook_pre_import, # noqa: F401
|
||||
lovelace as lovelace_pre_import, # noqa: F401
|
||||
onboarding as onboarding_pre_import, # noqa: F401
|
||||
person as person_pre_import, # noqa: F401
|
||||
recorder as recorder_import, # noqa: F401 - not named pre_import since it has requirements
|
||||
repairs as repairs_pre_import, # noqa: F401
|
||||
search as search_pre_import, # noqa: F401
|
||||
@@ -860,14 +859,8 @@ async def _async_set_up_integrations(
|
||||
integrations, all_integrations = await _async_resolve_domains_and_preload(
|
||||
hass, config
|
||||
)
|
||||
# Detect all cycles
|
||||
integrations_after_dependencies = (
|
||||
await loader.resolve_integrations_after_dependencies(
|
||||
hass, all_integrations.values(), set(all_integrations)
|
||||
)
|
||||
)
|
||||
all_domains = set(integrations_after_dependencies)
|
||||
domains = set(integrations) & all_domains
|
||||
all_domains = set(all_integrations)
|
||||
domains = set(integrations)
|
||||
|
||||
_LOGGER.info(
|
||||
"Domains to be set up: %s | %s",
|
||||
@@ -875,8 +868,6 @@ async def _async_set_up_integrations(
|
||||
all_domains - domains,
|
||||
)
|
||||
|
||||
async_set_domains_to_be_loaded(hass, all_domains)
|
||||
|
||||
# Initialize recorder
|
||||
if "recorder" in all_domains:
|
||||
recorder.async_initialize_recorder(hass)
|
||||
@@ -909,12 +900,24 @@ async def _async_set_up_integrations(
|
||||
stage_dep_domains_unfiltered = {
|
||||
dep
|
||||
for domain in stage_domains
|
||||
for dep in integrations_after_dependencies[domain]
|
||||
for dep in all_integrations[domain].all_dependencies
|
||||
if dep not in stage_domains
|
||||
}
|
||||
stage_dep_domains = stage_dep_domains_unfiltered - hass.config.components
|
||||
|
||||
stage_all_domains = stage_domains | stage_dep_domains
|
||||
stage_all_integrations = {
|
||||
domain: all_integrations[domain] for domain in stage_all_domains
|
||||
}
|
||||
# Detect all cycles
|
||||
stage_integrations_after_dependencies = (
|
||||
await loader.resolve_integrations_after_dependencies(
|
||||
hass, stage_all_integrations.values(), stage_all_domains
|
||||
)
|
||||
)
|
||||
stage_all_domains = set(stage_integrations_after_dependencies)
|
||||
stage_domains &= stage_all_domains
|
||||
stage_dep_domains &= stage_all_domains
|
||||
|
||||
_LOGGER.info(
|
||||
"Setting up stage %s: %s | %s\nDependencies: %s | %s",
|
||||
@@ -925,6 +928,8 @@ async def _async_set_up_integrations(
|
||||
stage_dep_domains_unfiltered - stage_dep_domains,
|
||||
)
|
||||
|
||||
async_set_domains_to_be_loaded(hass, stage_all_domains)
|
||||
|
||||
if timeout is None:
|
||||
await _async_setup_multi_components(hass, stage_all_domains, config)
|
||||
continue
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"domain": "eve",
|
||||
"name": "Eve",
|
||||
"iot_standards": ["matter"]
|
||||
}
|
||||
@@ -6,7 +6,6 @@
|
||||
"google_assistant_sdk",
|
||||
"google_cloud",
|
||||
"google_drive",
|
||||
"google_gemini",
|
||||
"google_generative_ai_conversation",
|
||||
"google_mail",
|
||||
"google_maps",
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"domain": "nuki",
|
||||
"name": "Nuki",
|
||||
"integrations": ["nuki"],
|
||||
"iot_standards": ["matter"]
|
||||
}
|
||||
@@ -72,10 +72,10 @@
|
||||
"level": {
|
||||
"name": "Level",
|
||||
"state": {
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"high": "High",
|
||||
"low": "Low",
|
||||
"moderate": "Moderate",
|
||||
"very_high": "[%key:common::state::very_high%]"
|
||||
"very_high": "Very high"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -89,10 +89,10 @@
|
||||
"level": {
|
||||
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
||||
"state": {
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::high%]",
|
||||
"low": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
|
||||
"very_high": "[%key:common::state::very_high%]"
|
||||
"very_high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::very_high%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -123,10 +123,10 @@
|
||||
"level": {
|
||||
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
||||
"state": {
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::high%]",
|
||||
"low": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
|
||||
"very_high": "[%key:common::state::very_high%]"
|
||||
"very_high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::very_high%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -167,10 +167,10 @@
|
||||
"level": {
|
||||
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
||||
"state": {
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::high%]",
|
||||
"low": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
|
||||
"very_high": "[%key:common::state::very_high%]"
|
||||
"very_high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::very_high%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -181,10 +181,10 @@
|
||||
"level": {
|
||||
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
||||
"state": {
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::high%]",
|
||||
"low": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
|
||||
"very_high": "[%key:common::state::very_high%]"
|
||||
"very_high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::very_high%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -195,10 +195,10 @@
|
||||
"level": {
|
||||
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
||||
"state": {
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::high%]",
|
||||
"low": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
|
||||
"very_high": "[%key:common::state::very_high%]"
|
||||
"very_high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::very_high%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,38 +2,25 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import CONNECTION_TYPE, LOCAL
|
||||
from .coordinator import AdaxCloudCoordinator, AdaxConfigEntry, AdaxLocalCoordinator
|
||||
|
||||
PLATFORMS = [Platform.CLIMATE]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AdaxConfigEntry) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Adax from a config entry."""
|
||||
if entry.data.get(CONNECTION_TYPE) == LOCAL:
|
||||
local_coordinator = AdaxLocalCoordinator(hass, entry)
|
||||
entry.runtime_data = local_coordinator
|
||||
else:
|
||||
cloud_coordinator = AdaxCloudCoordinator(hass, entry)
|
||||
entry.runtime_data = cloud_coordinator
|
||||
|
||||
await entry.runtime_data.async_config_entry_first_refresh()
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AdaxConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_migrate_entry(
|
||||
hass: HomeAssistant, config_entry: AdaxConfigEntry
|
||||
) -> bool:
|
||||
async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
"""Migrate old entry."""
|
||||
# convert title and unique_id to string
|
||||
if config_entry.version == 1:
|
||||
|
||||
@@ -12,42 +12,57 @@ from homeassistant.components.climate import (
|
||||
ClimateEntityFeature,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
ATTR_TEMPERATURE,
|
||||
CONF_IP_ADDRESS,
|
||||
CONF_PASSWORD,
|
||||
CONF_TOKEN,
|
||||
CONF_UNIQUE_ID,
|
||||
PRECISION_WHOLE,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import AdaxConfigEntry
|
||||
from .const import CONNECTION_TYPE, DOMAIN, LOCAL
|
||||
from .coordinator import AdaxCloudCoordinator, AdaxLocalCoordinator
|
||||
from .const import ACCOUNT_ID, CONNECTION_TYPE, DOMAIN, LOCAL
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AdaxConfigEntry,
|
||||
entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Adax thermostat with config flow."""
|
||||
if entry.data.get(CONNECTION_TYPE) == LOCAL:
|
||||
local_coordinator = cast(AdaxLocalCoordinator, entry.runtime_data)
|
||||
async_add_entities(
|
||||
[LocalAdaxDevice(local_coordinator, entry.data[CONF_UNIQUE_ID])],
|
||||
adax_data_handler = AdaxLocal(
|
||||
entry.data[CONF_IP_ADDRESS],
|
||||
entry.data[CONF_TOKEN],
|
||||
websession=async_get_clientsession(hass, verify_ssl=False),
|
||||
)
|
||||
else:
|
||||
cloud_coordinator = cast(AdaxCloudCoordinator, entry.runtime_data)
|
||||
async_add_entities(
|
||||
AdaxDevice(cloud_coordinator, device_id)
|
||||
for device_id in cloud_coordinator.data
|
||||
[LocalAdaxDevice(adax_data_handler, entry.data[CONF_UNIQUE_ID])], True
|
||||
)
|
||||
return
|
||||
|
||||
adax_data_handler = Adax(
|
||||
entry.data[ACCOUNT_ID],
|
||||
entry.data[CONF_PASSWORD],
|
||||
websession=async_get_clientsession(hass),
|
||||
)
|
||||
|
||||
async_add_entities(
|
||||
(
|
||||
AdaxDevice(room, adax_data_handler)
|
||||
for room in await adax_data_handler.get_rooms()
|
||||
),
|
||||
True,
|
||||
)
|
||||
|
||||
|
||||
class AdaxDevice(CoordinatorEntity[AdaxCloudCoordinator], ClimateEntity):
|
||||
class AdaxDevice(ClimateEntity):
|
||||
"""Representation of a heater."""
|
||||
|
||||
_attr_hvac_modes = [HVACMode.HEAT, HVACMode.OFF]
|
||||
@@ -61,37 +76,20 @@ class AdaxDevice(CoordinatorEntity[AdaxCloudCoordinator], ClimateEntity):
|
||||
_attr_target_temperature_step = PRECISION_WHOLE
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AdaxCloudCoordinator,
|
||||
device_id: str,
|
||||
) -> None:
|
||||
def __init__(self, heater_data: dict[str, Any], adax_data_handler: Adax) -> None:
|
||||
"""Initialize the heater."""
|
||||
super().__init__(coordinator)
|
||||
self._adax_data_handler: Adax = coordinator.adax_data_handler
|
||||
self._device_id = device_id
|
||||
self._device_id = heater_data["id"]
|
||||
self._adax_data_handler = adax_data_handler
|
||||
|
||||
self._attr_name = self.room["name"]
|
||||
self._attr_unique_id = f"{self.room['homeId']}_{self._device_id}"
|
||||
self._attr_unique_id = f"{heater_data['homeId']}_{heater_data['id']}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, device_id)},
|
||||
identifiers={(DOMAIN, heater_data["id"])},
|
||||
# Instead of setting the device name to the entity name, adax
|
||||
# should be updated to set has_entity_name = True, and set the entity
|
||||
# name to None
|
||||
name=cast(str | None, self.name),
|
||||
manufacturer="Adax",
|
||||
)
|
||||
self._apply_data(self.room)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Whether the entity is available or not."""
|
||||
return super().available and self._device_id in self.coordinator.data
|
||||
|
||||
@property
|
||||
def room(self) -> dict[str, Any]:
|
||||
"""Gets the data for this particular device."""
|
||||
return self.coordinator.data[self._device_id]
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set hvac mode."""
|
||||
@@ -106,9 +104,7 @@ class AdaxDevice(CoordinatorEntity[AdaxCloudCoordinator], ClimateEntity):
|
||||
)
|
||||
else:
|
||||
return
|
||||
|
||||
# Request data refresh from source to verify that update was successful
|
||||
await self.coordinator.async_request_refresh()
|
||||
await self._adax_data_handler.update()
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
@@ -118,31 +114,28 @@ class AdaxDevice(CoordinatorEntity[AdaxCloudCoordinator], ClimateEntity):
|
||||
self._device_id, temperature, True
|
||||
)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
if room := self.room:
|
||||
self._apply_data(room)
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
def _apply_data(self, room: dict[str, Any]) -> None:
|
||||
"""Update the appropriate attributues based on received data."""
|
||||
self._attr_current_temperature = room.get("temperature")
|
||||
self._attr_target_temperature = room.get("targetTemperature")
|
||||
if room["heatingEnabled"]:
|
||||
self._attr_hvac_mode = HVACMode.HEAT
|
||||
self._attr_icon = "mdi:radiator"
|
||||
else:
|
||||
self._attr_hvac_mode = HVACMode.OFF
|
||||
self._attr_icon = "mdi:radiator-off"
|
||||
async def async_update(self) -> None:
|
||||
"""Get the latest data."""
|
||||
for room in await self._adax_data_handler.get_rooms():
|
||||
if room["id"] != self._device_id:
|
||||
continue
|
||||
self._attr_name = room["name"]
|
||||
self._attr_current_temperature = room.get("temperature")
|
||||
self._attr_target_temperature = room.get("targetTemperature")
|
||||
if room["heatingEnabled"]:
|
||||
self._attr_hvac_mode = HVACMode.HEAT
|
||||
self._attr_icon = "mdi:radiator"
|
||||
else:
|
||||
self._attr_hvac_mode = HVACMode.OFF
|
||||
self._attr_icon = "mdi:radiator-off"
|
||||
return
|
||||
|
||||
|
||||
class LocalAdaxDevice(CoordinatorEntity[AdaxLocalCoordinator], ClimateEntity):
|
||||
class LocalAdaxDevice(ClimateEntity):
|
||||
"""Representation of a heater."""
|
||||
|
||||
_attr_hvac_modes = [HVACMode.HEAT, HVACMode.OFF]
|
||||
_attr_hvac_mode = HVACMode.OFF
|
||||
_attr_icon = "mdi:radiator-off"
|
||||
_attr_hvac_mode = HVACMode.HEAT
|
||||
_attr_max_temp = 35
|
||||
_attr_min_temp = 5
|
||||
_attr_supported_features = (
|
||||
@@ -153,10 +146,9 @@ class LocalAdaxDevice(CoordinatorEntity[AdaxLocalCoordinator], ClimateEntity):
|
||||
_attr_target_temperature_step = PRECISION_WHOLE
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
|
||||
def __init__(self, coordinator: AdaxLocalCoordinator, unique_id: str) -> None:
|
||||
def __init__(self, adax_data_handler: AdaxLocal, unique_id: str) -> None:
|
||||
"""Initialize the heater."""
|
||||
super().__init__(coordinator)
|
||||
self._adax_data_handler: AdaxLocal = coordinator.adax_data_handler
|
||||
self._adax_data_handler = adax_data_handler
|
||||
self._attr_unique_id = unique_id
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, unique_id)},
|
||||
@@ -177,20 +169,17 @@ class LocalAdaxDevice(CoordinatorEntity[AdaxLocalCoordinator], ClimateEntity):
|
||||
return
|
||||
await self._adax_data_handler.set_target_temperature(temperature)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
if data := self.coordinator.data:
|
||||
self._attr_current_temperature = data["current_temperature"]
|
||||
self._attr_available = self._attr_current_temperature is not None
|
||||
if (target_temp := data["target_temperature"]) == 0:
|
||||
self._attr_hvac_mode = HVACMode.OFF
|
||||
self._attr_icon = "mdi:radiator-off"
|
||||
if target_temp == 0:
|
||||
self._attr_target_temperature = self._attr_min_temp
|
||||
else:
|
||||
self._attr_hvac_mode = HVACMode.HEAT
|
||||
self._attr_icon = "mdi:radiator"
|
||||
self._attr_target_temperature = target_temp
|
||||
|
||||
super()._handle_coordinator_update()
|
||||
async def async_update(self) -> None:
|
||||
"""Get the latest data."""
|
||||
data = await self._adax_data_handler.get_status()
|
||||
self._attr_current_temperature = data["current_temperature"]
|
||||
self._attr_available = self._attr_current_temperature is not None
|
||||
if (target_temp := data["target_temperature"]) == 0:
|
||||
self._attr_hvac_mode = HVACMode.OFF
|
||||
self._attr_icon = "mdi:radiator-off"
|
||||
if target_temp == 0:
|
||||
self._attr_target_temperature = self._attr_min_temp
|
||||
else:
|
||||
self._attr_hvac_mode = HVACMode.HEAT
|
||||
self._attr_icon = "mdi:radiator"
|
||||
self._attr_target_temperature = target_temp
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
"""Constants for the Adax integration."""
|
||||
|
||||
import datetime
|
||||
from typing import Final
|
||||
|
||||
ACCOUNT_ID: Final = "account_id"
|
||||
@@ -10,5 +9,3 @@ DOMAIN: Final = "adax"
|
||||
LOCAL = "Local"
|
||||
WIFI_SSID = "wifi_ssid"
|
||||
WIFI_PSWD = "wifi_pswd"
|
||||
|
||||
SCAN_INTERVAL = datetime.timedelta(seconds=60)
|
||||
|
||||
@@ -1,71 +0,0 @@
|
||||
"""DataUpdateCoordinator for the Adax component."""
|
||||
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
|
||||
from adax import Adax
|
||||
from adax_local import Adax as AdaxLocal
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD, CONF_TOKEN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import ACCOUNT_ID, SCAN_INTERVAL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
type AdaxConfigEntry = ConfigEntry[AdaxCloudCoordinator | AdaxLocalCoordinator]
|
||||
|
||||
|
||||
class AdaxCloudCoordinator(DataUpdateCoordinator[dict[str, dict[str, Any]]]):
|
||||
"""Coordinator for updating data to and from Adax (cloud)."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: AdaxConfigEntry) -> None:
|
||||
"""Initialize the Adax coordinator used for Cloud mode."""
|
||||
super().__init__(
|
||||
hass,
|
||||
config_entry=entry,
|
||||
logger=_LOGGER,
|
||||
name="AdaxCloud",
|
||||
update_interval=SCAN_INTERVAL,
|
||||
)
|
||||
|
||||
self.adax_data_handler = Adax(
|
||||
entry.data[ACCOUNT_ID],
|
||||
entry.data[CONF_PASSWORD],
|
||||
websession=async_get_clientsession(hass),
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> dict[str, dict[str, Any]]:
|
||||
"""Fetch data from the Adax."""
|
||||
rooms = await self.adax_data_handler.get_rooms() or []
|
||||
return {r["id"]: r for r in rooms}
|
||||
|
||||
|
||||
class AdaxLocalCoordinator(DataUpdateCoordinator[dict[str, Any] | None]):
|
||||
"""Coordinator for updating data to and from Adax (local)."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: AdaxConfigEntry) -> None:
|
||||
"""Initialize the Adax coordinator used for Local mode."""
|
||||
super().__init__(
|
||||
hass,
|
||||
config_entry=entry,
|
||||
logger=_LOGGER,
|
||||
name="AdaxLocal",
|
||||
update_interval=SCAN_INTERVAL,
|
||||
)
|
||||
|
||||
self.adax_data_handler = AdaxLocal(
|
||||
entry.data[CONF_IP_ADDRESS],
|
||||
entry.data[CONF_TOKEN],
|
||||
websession=async_get_clientsession(hass, verify_ssl=False),
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> dict[str, Any]:
|
||||
"""Fetch data from the Adax."""
|
||||
if result := await self.adax_data_handler.get_status():
|
||||
return cast(dict[str, Any], result)
|
||||
raise UpdateFailed("Got invalid status from device")
|
||||
@@ -16,8 +16,8 @@
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
"city": "City",
|
||||
"state": "State",
|
||||
"country": "[%key:common::config_flow::data::country%]"
|
||||
"country": "Country",
|
||||
"state": "State"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
@@ -56,12 +56,12 @@
|
||||
"sensor": {
|
||||
"pollutant_label": {
|
||||
"state": {
|
||||
"co": "[%key:component::sensor::entity_component::carbon_monoxide::name%]",
|
||||
"n2": "[%key:component::sensor::entity_component::nitrogen_dioxide::name%]",
|
||||
"o3": "[%key:component::sensor::entity_component::ozone::name%]",
|
||||
"p1": "[%key:component::sensor::entity_component::pm10::name%]",
|
||||
"p2": "[%key:component::sensor::entity_component::pm25::name%]",
|
||||
"s2": "[%key:component::sensor::entity_component::sulphur_dioxide::name%]"
|
||||
"co": "Carbon monoxide",
|
||||
"n2": "Nitrogen dioxide",
|
||||
"o3": "Ozone",
|
||||
"p1": "PM10",
|
||||
"p2": "PM2.5",
|
||||
"s2": "Sulfur dioxide"
|
||||
}
|
||||
},
|
||||
"pollutant_level": {
|
||||
|
||||
@@ -11,5 +11,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairzone"],
|
||||
"requirements": ["aioairzone==1.0.0"]
|
||||
"requirements": ["aioairzone==0.9.9"]
|
||||
}
|
||||
|
||||
@@ -9,8 +9,6 @@ from aioairzone.const import (
|
||||
AZD_HUMIDITY,
|
||||
AZD_TEMP,
|
||||
AZD_TEMP_UNIT,
|
||||
AZD_THERMOSTAT_BATTERY,
|
||||
AZD_THERMOSTAT_SIGNAL,
|
||||
AZD_WEBSERVER,
|
||||
AZD_WIFI_RSSI,
|
||||
AZD_ZONES,
|
||||
@@ -75,20 +73,6 @@ ZONE_SENSOR_TYPES: Final[tuple[SensorEntityDescription, ...]] = (
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
device_class=SensorDeviceClass.BATTERY,
|
||||
key=AZD_THERMOSTAT_BATTERY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
key=AZD_THERMOSTAT_SIGNAL,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="thermostat_signal",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -76,9 +76,6 @@
|
||||
"sensor": {
|
||||
"rssi": {
|
||||
"name": "RSSI"
|
||||
},
|
||||
"thermostat_signal": {
|
||||
"name": "Signal strength"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aioairzone_cloud"],
|
||||
"requirements": ["aioairzone-cloud==0.6.12"]
|
||||
"requirements": ["aioairzone-cloud==0.6.11"]
|
||||
}
|
||||
|
||||
@@ -34,7 +34,7 @@
|
||||
"state": {
|
||||
"off": "[%key:common::state::off%]",
|
||||
"on": "[%key:common::state::on%]",
|
||||
"auto": "[%key:common::state::auto%]"
|
||||
"auto": "Auto"
|
||||
}
|
||||
},
|
||||
"modes": {
|
||||
|
||||
@@ -719,7 +719,7 @@ class LockCapabilities(AlexaEntity):
|
||||
yield Alexa(self.entity)
|
||||
|
||||
|
||||
@ENTITY_ADAPTERS.register(media_player.DOMAIN)
|
||||
@ENTITY_ADAPTERS.register(media_player.const.DOMAIN)
|
||||
class MediaPlayerCapabilities(AlexaEntity):
|
||||
"""Class to represent MediaPlayer capabilities."""
|
||||
|
||||
@@ -757,7 +757,9 @@ class MediaPlayerCapabilities(AlexaEntity):
|
||||
|
||||
if supported & media_player.MediaPlayerEntityFeature.SELECT_SOURCE:
|
||||
inputs = AlexaInputController.get_valid_inputs(
|
||||
self.entity.attributes.get(media_player.ATTR_INPUT_SOURCE_LIST, [])
|
||||
self.entity.attributes.get(
|
||||
media_player.const.ATTR_INPUT_SOURCE_LIST, []
|
||||
)
|
||||
)
|
||||
if len(inputs) > 0:
|
||||
yield AlexaInputController(self.entity)
|
||||
@@ -774,7 +776,8 @@ class MediaPlayerCapabilities(AlexaEntity):
|
||||
and domain != "denonavr"
|
||||
):
|
||||
inputs = AlexaEqualizerController.get_valid_inputs(
|
||||
self.entity.attributes.get(media_player.ATTR_SOUND_MODE_LIST) or []
|
||||
self.entity.attributes.get(media_player.const.ATTR_SOUND_MODE_LIST)
|
||||
or []
|
||||
)
|
||||
if len(inputs) > 0:
|
||||
yield AlexaEqualizerController(self.entity)
|
||||
|
||||
@@ -566,7 +566,7 @@ async def async_api_set_volume(
|
||||
|
||||
data: dict[str, Any] = {
|
||||
ATTR_ENTITY_ID: entity.entity_id,
|
||||
media_player.ATTR_MEDIA_VOLUME_LEVEL: volume,
|
||||
media_player.const.ATTR_MEDIA_VOLUME_LEVEL: volume,
|
||||
}
|
||||
|
||||
await hass.services.async_call(
|
||||
@@ -589,7 +589,7 @@ async def async_api_select_input(
|
||||
|
||||
# Attempt to map the ALL UPPERCASE payload name to a source.
|
||||
# Strips trailing 1 to match single input devices.
|
||||
source_list = entity.attributes.get(media_player.ATTR_INPUT_SOURCE_LIST) or []
|
||||
source_list = entity.attributes.get(media_player.const.ATTR_INPUT_SOURCE_LIST) or []
|
||||
for source in source_list:
|
||||
formatted_source = (
|
||||
source.lower().replace("-", "").replace("_", "").replace(" ", "")
|
||||
@@ -611,7 +611,7 @@ async def async_api_select_input(
|
||||
|
||||
data: dict[str, Any] = {
|
||||
ATTR_ENTITY_ID: entity.entity_id,
|
||||
media_player.ATTR_INPUT_SOURCE: media_input,
|
||||
media_player.const.ATTR_INPUT_SOURCE: media_input,
|
||||
}
|
||||
|
||||
await hass.services.async_call(
|
||||
@@ -636,7 +636,7 @@ async def async_api_adjust_volume(
|
||||
volume_delta = int(directive.payload["volume"])
|
||||
|
||||
entity = directive.entity
|
||||
current_level = entity.attributes[media_player.ATTR_MEDIA_VOLUME_LEVEL]
|
||||
current_level = entity.attributes[media_player.const.ATTR_MEDIA_VOLUME_LEVEL]
|
||||
|
||||
# read current state
|
||||
try:
|
||||
@@ -648,7 +648,7 @@ async def async_api_adjust_volume(
|
||||
|
||||
data: dict[str, Any] = {
|
||||
ATTR_ENTITY_ID: entity.entity_id,
|
||||
media_player.ATTR_MEDIA_VOLUME_LEVEL: volume,
|
||||
media_player.const.ATTR_MEDIA_VOLUME_LEVEL: volume,
|
||||
}
|
||||
|
||||
await hass.services.async_call(
|
||||
@@ -709,7 +709,7 @@ async def async_api_set_mute(
|
||||
entity = directive.entity
|
||||
data: dict[str, Any] = {
|
||||
ATTR_ENTITY_ID: entity.entity_id,
|
||||
media_player.ATTR_MEDIA_VOLUME_MUTED: mute,
|
||||
media_player.const.ATTR_MEDIA_VOLUME_MUTED: mute,
|
||||
}
|
||||
|
||||
await hass.services.async_call(
|
||||
@@ -1708,13 +1708,15 @@ async def async_api_changechannel(
|
||||
|
||||
data: dict[str, Any] = {
|
||||
ATTR_ENTITY_ID: entity.entity_id,
|
||||
media_player.ATTR_MEDIA_CONTENT_ID: channel,
|
||||
media_player.ATTR_MEDIA_CONTENT_TYPE: (media_player.MediaType.CHANNEL),
|
||||
media_player.const.ATTR_MEDIA_CONTENT_ID: channel,
|
||||
media_player.const.ATTR_MEDIA_CONTENT_TYPE: (
|
||||
media_player.const.MEDIA_TYPE_CHANNEL
|
||||
),
|
||||
}
|
||||
|
||||
await hass.services.async_call(
|
||||
entity.domain,
|
||||
media_player.SERVICE_PLAY_MEDIA,
|
||||
media_player.const.SERVICE_PLAY_MEDIA,
|
||||
data,
|
||||
blocking=False,
|
||||
context=context,
|
||||
@@ -1823,13 +1825,13 @@ async def async_api_set_eq_mode(
|
||||
context: ha.Context,
|
||||
) -> AlexaResponse:
|
||||
"""Process a SetMode request for EqualizerController."""
|
||||
mode: str = directive.payload["mode"]
|
||||
mode = directive.payload["mode"]
|
||||
entity = directive.entity
|
||||
data: dict[str, Any] = {ATTR_ENTITY_ID: entity.entity_id}
|
||||
|
||||
sound_mode_list = entity.attributes.get(media_player.ATTR_SOUND_MODE_LIST)
|
||||
sound_mode_list = entity.attributes.get(media_player.const.ATTR_SOUND_MODE_LIST)
|
||||
if sound_mode_list and mode.lower() in sound_mode_list:
|
||||
data[media_player.ATTR_SOUND_MODE] = mode.lower()
|
||||
data[media_player.const.ATTR_SOUND_MODE] = mode.lower()
|
||||
else:
|
||||
msg = f"failed to map sound mode {mode} to a mode on {entity.entity_id}"
|
||||
raise AlexaInvalidValueError(msg)
|
||||
|
||||
@@ -3,10 +3,10 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from asyncio import timeout
|
||||
from collections.abc import Mapping
|
||||
from http import HTTPStatus
|
||||
import json
|
||||
import logging
|
||||
from types import MappingProxyType
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
from uuid import uuid4
|
||||
|
||||
@@ -260,10 +260,10 @@ async def async_enable_proactive_mode(
|
||||
def extra_significant_check(
|
||||
hass: HomeAssistant,
|
||||
old_state: str,
|
||||
old_attrs: Mapping[Any, Any],
|
||||
old_attrs: dict[Any, Any] | MappingProxyType[Any, Any],
|
||||
old_extra_arg: Any,
|
||||
new_state: str,
|
||||
new_attrs: Mapping[Any, Any],
|
||||
new_attrs: dict[str, Any] | MappingProxyType[Any, Any],
|
||||
new_extra_arg: Any,
|
||||
) -> bool:
|
||||
"""Check if the serialized data has changed."""
|
||||
|
||||
@@ -3,12 +3,12 @@
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"tracked_addons": "Add-ons",
|
||||
"tracked_addons": "Addons",
|
||||
"tracked_integrations": "Integrations",
|
||||
"tracked_custom_integrations": "Custom integrations"
|
||||
},
|
||||
"data_description": {
|
||||
"tracked_addons": "Select the add-ons you want to track",
|
||||
"tracked_addons": "Select the addons you want to track",
|
||||
"tracked_integrations": "Select the integrations you want to track",
|
||||
"tracked_custom_integrations": "Select the custom integrations you want to track"
|
||||
}
|
||||
|
||||
@@ -73,7 +73,7 @@ class AndroidTVRemoteBaseEntity(Entity):
|
||||
self._api.send_key_command(key_code, direction)
|
||||
except ConnectionClosed as exc:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN, translation_key="connection_closed"
|
||||
"Connection to Android TV device is closed"
|
||||
) from exc
|
||||
|
||||
def _send_launch_app_command(self, app_link: str) -> None:
|
||||
@@ -85,5 +85,5 @@ class AndroidTVRemoteBaseEntity(Entity):
|
||||
self._api.send_launch_app_command(app_link)
|
||||
except ConnectionClosed as exc:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN, translation_key="connection_closed"
|
||||
"Connection to Android TV device is closed"
|
||||
) from exc
|
||||
|
||||
@@ -21,7 +21,7 @@ from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AndroidTVRemoteConfigEntry
|
||||
from .const import CONF_APP_ICON, CONF_APP_NAME, DOMAIN
|
||||
from .const import CONF_APP_ICON, CONF_APP_NAME
|
||||
from .entity import AndroidTVRemoteBaseEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -233,5 +233,5 @@ class AndroidTVRemoteMediaPlayerEntity(AndroidTVRemoteBaseEntity, MediaPlayerEnt
|
||||
await asyncio.sleep(delay_secs)
|
||||
except ConnectionClosed as exc:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN, translation_key="connection_closed"
|
||||
"Connection to Android TV device is closed"
|
||||
) from exc
|
||||
|
||||
@@ -54,10 +54,5 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"connection_closed": {
|
||||
"message": "Connection to the Android TV device is closed"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
from functools import partial
|
||||
import logging
|
||||
from types import MappingProxyType
|
||||
@@ -53,7 +52,7 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
|
||||
RECOMMENDED_OPTIONS = {
|
||||
CONF_RECOMMENDED: True,
|
||||
CONF_LLM_HASS_API: [llm.LLM_API_ASSIST],
|
||||
CONF_LLM_HASS_API: llm.LLM_API_ASSIST,
|
||||
CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT,
|
||||
}
|
||||
|
||||
@@ -135,8 +134,9 @@ class AnthropicOptionsFlow(OptionsFlow):
|
||||
|
||||
if user_input is not None:
|
||||
if user_input[CONF_RECOMMENDED] == self.last_rendered_recommended:
|
||||
if not user_input.get(CONF_LLM_HASS_API):
|
||||
user_input.pop(CONF_LLM_HASS_API, None)
|
||||
if user_input[CONF_LLM_HASS_API] == "none":
|
||||
user_input.pop(CONF_LLM_HASS_API)
|
||||
|
||||
if user_input.get(
|
||||
CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET
|
||||
) >= user_input.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS):
|
||||
@@ -151,16 +151,12 @@ class AnthropicOptionsFlow(OptionsFlow):
|
||||
options = {
|
||||
CONF_RECOMMENDED: user_input[CONF_RECOMMENDED],
|
||||
CONF_PROMPT: user_input[CONF_PROMPT],
|
||||
CONF_LLM_HASS_API: user_input.get(CONF_LLM_HASS_API),
|
||||
CONF_LLM_HASS_API: user_input[CONF_LLM_HASS_API],
|
||||
}
|
||||
|
||||
suggested_values = options.copy()
|
||||
if not suggested_values.get(CONF_PROMPT):
|
||||
suggested_values[CONF_PROMPT] = llm.DEFAULT_INSTRUCTIONS_PROMPT
|
||||
if (
|
||||
suggested_llm_apis := suggested_values.get(CONF_LLM_HASS_API)
|
||||
) and isinstance(suggested_llm_apis, str):
|
||||
suggested_values[CONF_LLM_HASS_API] = [suggested_llm_apis]
|
||||
|
||||
schema = self.add_suggested_values_to_schema(
|
||||
vol.Schema(anthropic_config_option_schema(self.hass, options)),
|
||||
@@ -176,22 +172,28 @@ class AnthropicOptionsFlow(OptionsFlow):
|
||||
|
||||
def anthropic_config_option_schema(
|
||||
hass: HomeAssistant,
|
||||
options: Mapping[str, Any],
|
||||
options: dict[str, Any] | MappingProxyType[str, Any],
|
||||
) -> dict:
|
||||
"""Return a schema for Anthropic completion options."""
|
||||
hass_apis: list[SelectOptionDict] = [
|
||||
SelectOptionDict(
|
||||
label="No control",
|
||||
value="none",
|
||||
)
|
||||
]
|
||||
hass_apis.extend(
|
||||
SelectOptionDict(
|
||||
label=api.name,
|
||||
value=api.id,
|
||||
)
|
||||
for api in llm.async_get_apis(hass)
|
||||
]
|
||||
)
|
||||
|
||||
schema = {
|
||||
vol.Optional(CONF_PROMPT): TemplateSelector(),
|
||||
vol.Optional(
|
||||
CONF_LLM_HASS_API,
|
||||
): SelectSelector(SelectSelectorConfig(options=hass_apis, multiple=True)),
|
||||
vol.Optional(CONF_LLM_HASS_API, default="none"): SelectSelector(
|
||||
SelectSelectorConfig(options=hass_apis)
|
||||
),
|
||||
vol.Required(
|
||||
CONF_RECOMMENDED, default=options.get(CONF_RECOMMENDED, False)
|
||||
): bool,
|
||||
|
||||
@@ -9,13 +9,11 @@ from anthropic import AsyncStream
|
||||
from anthropic._types import NOT_GIVEN
|
||||
from anthropic.types import (
|
||||
InputJSONDelta,
|
||||
MessageDeltaUsage,
|
||||
MessageParam,
|
||||
MessageStreamEvent,
|
||||
RawContentBlockDeltaEvent,
|
||||
RawContentBlockStartEvent,
|
||||
RawContentBlockStopEvent,
|
||||
RawMessageDeltaEvent,
|
||||
RawMessageStartEvent,
|
||||
RawMessageStopEvent,
|
||||
RedactedThinkingBlock,
|
||||
@@ -33,7 +31,6 @@ from anthropic.types import (
|
||||
ToolResultBlockParam,
|
||||
ToolUseBlock,
|
||||
ToolUseBlockParam,
|
||||
Usage,
|
||||
)
|
||||
from voluptuous_openapi import convert
|
||||
|
||||
@@ -165,8 +162,7 @@ def _convert_content(
|
||||
return messages
|
||||
|
||||
|
||||
async def _transform_stream( # noqa: C901 - This is complex, but better to have it in one place
|
||||
chat_log: conversation.ChatLog,
|
||||
async def _transform_stream(
|
||||
result: AsyncStream[MessageStreamEvent],
|
||||
messages: list[MessageParam],
|
||||
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
|
||||
@@ -211,7 +207,6 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
| None
|
||||
) = None
|
||||
current_tool_args: str
|
||||
input_usage: Usage | None = None
|
||||
|
||||
async for response in result:
|
||||
LOGGER.debug("Received response: %s", response)
|
||||
@@ -220,7 +215,6 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
if response.message.role != "assistant":
|
||||
raise ValueError("Unexpected message role")
|
||||
current_message = MessageParam(role=response.message.role, content=[])
|
||||
input_usage = response.message.usage
|
||||
elif isinstance(response, RawContentBlockStartEvent):
|
||||
if isinstance(response.content_block, ToolUseBlock):
|
||||
current_block = ToolUseBlockParam(
|
||||
@@ -271,54 +265,32 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
if current_block is None:
|
||||
raise ValueError("Unexpected stop event without a current block")
|
||||
if current_block["type"] == "tool_use":
|
||||
# tool block
|
||||
tool_args = json.loads(current_tool_args) if current_tool_args else {}
|
||||
current_block["input"] = tool_args
|
||||
tool_block = cast(ToolUseBlockParam, current_block)
|
||||
tool_args = json.loads(current_tool_args)
|
||||
tool_block["input"] = tool_args
|
||||
yield {
|
||||
"tool_calls": [
|
||||
llm.ToolInput(
|
||||
id=current_block["id"],
|
||||
tool_name=current_block["name"],
|
||||
id=tool_block["id"],
|
||||
tool_name=tool_block["name"],
|
||||
tool_args=tool_args,
|
||||
)
|
||||
]
|
||||
}
|
||||
elif current_block["type"] == "thinking":
|
||||
# thinking block
|
||||
LOGGER.debug("Thinking: %s", current_block["thinking"])
|
||||
thinking_block = cast(ThinkingBlockParam, current_block)
|
||||
LOGGER.debug("Thinking: %s", thinking_block["thinking"])
|
||||
|
||||
if current_message is None:
|
||||
raise ValueError("Unexpected stop event without a current message")
|
||||
current_message["content"].append(current_block) # type: ignore[union-attr]
|
||||
current_block = None
|
||||
elif isinstance(response, RawMessageDeltaEvent):
|
||||
if (usage := response.usage) is not None:
|
||||
chat_log.async_trace(_create_token_stats(input_usage, usage))
|
||||
elif isinstance(response, RawMessageStopEvent):
|
||||
if current_message is not None:
|
||||
messages.append(current_message)
|
||||
current_message = None
|
||||
|
||||
|
||||
def _create_token_stats(
|
||||
input_usage: Usage | None, response_usage: MessageDeltaUsage
|
||||
) -> dict[str, Any]:
|
||||
"""Create token stats for conversation agent tracing."""
|
||||
input_tokens = 0
|
||||
cached_input_tokens = 0
|
||||
if input_usage:
|
||||
input_tokens = input_usage.input_tokens
|
||||
cached_input_tokens = input_usage.cache_creation_input_tokens or 0
|
||||
output_tokens = response_usage.output_tokens
|
||||
return {
|
||||
"stats": {
|
||||
"input_tokens": input_tokens,
|
||||
"cached_input_tokens": cached_input_tokens,
|
||||
"output_tokens": output_tokens,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class AnthropicConversationEntity(
|
||||
conversation.ConversationEntity, conversation.AbstractConversationAgent
|
||||
):
|
||||
@@ -421,8 +393,7 @@ class AnthropicConversationEntity(
|
||||
[
|
||||
content
|
||||
async for content in chat_log.async_add_delta_content_stream(
|
||||
user_input.agent_id,
|
||||
_transform_stream(chat_log, stream, messages),
|
||||
user_input.agent_id, _transform_stream(stream, messages)
|
||||
)
|
||||
if not isinstance(content, conversation.AssistantContent)
|
||||
]
|
||||
|
||||
@@ -53,8 +53,10 @@ class OnlineStatus(CoordinatorEntity[APCUPSdCoordinator], BinarySensorEntity):
|
||||
"""Initialize the APCUPSd binary device."""
|
||||
super().__init__(coordinator, context=description.key.upper())
|
||||
|
||||
# Set up unique id and device info if serial number is available.
|
||||
if (serial_no := coordinator.data.serial_no) is not None:
|
||||
self._attr_unique_id = f"{serial_no}_{description.key}"
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.unique_device_id}_{description.key}"
|
||||
self._attr_device_info = coordinator.device_info
|
||||
|
||||
@property
|
||||
|
||||
@@ -85,16 +85,11 @@ class APCUPSdCoordinator(DataUpdateCoordinator[APCUPSdData]):
|
||||
self._host = host
|
||||
self._port = port
|
||||
|
||||
@property
|
||||
def unique_device_id(self) -> str:
|
||||
"""Return a unique ID of the device, which is the serial number (if available) or the config entry ID."""
|
||||
return self.data.serial_no or self.config_entry.entry_id
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
"""Return the DeviceInfo of this APC UPS, if serial number is available."""
|
||||
return DeviceInfo(
|
||||
identifiers={(DOMAIN, self.unique_device_id)},
|
||||
identifiers={(DOMAIN, self.data.serial_no or self.config_entry.entry_id)},
|
||||
model=self.data.model,
|
||||
manufacturer="APC",
|
||||
name=self.data.name or "APC UPS",
|
||||
@@ -113,7 +108,4 @@ class APCUPSdCoordinator(DataUpdateCoordinator[APCUPSdData]):
|
||||
data = await aioapcaccess.request_status(self._host, self._port)
|
||||
return APCUPSdData(data)
|
||||
except (OSError, asyncio.IncompleteReadError) as error:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect",
|
||||
) from error
|
||||
raise UpdateFailed(error) from error
|
||||
|
||||
@@ -458,8 +458,11 @@ class APCUPSdSensor(CoordinatorEntity[APCUPSdCoordinator], SensorEntity):
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator=coordinator, context=description.key.upper())
|
||||
|
||||
# Set up unique id and device info if serial number is available.
|
||||
if (serial_no := coordinator.data.serial_no) is not None:
|
||||
self._attr_unique_id = f"{serial_no}_{description.key}"
|
||||
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.unique_device_id}_{description.key}"
|
||||
self._attr_device_info = coordinator.device_info
|
||||
|
||||
# Initial update of attributes.
|
||||
|
||||
@@ -93,7 +93,7 @@
|
||||
"name": "Internal temperature"
|
||||
},
|
||||
"last_self_test": {
|
||||
"name": "Last self-test"
|
||||
"name": "Last self test"
|
||||
},
|
||||
"last_transfer": {
|
||||
"name": "Last transfer"
|
||||
@@ -177,7 +177,7 @@
|
||||
"name": "Restore requirement"
|
||||
},
|
||||
"self_test_result": {
|
||||
"name": "Self-test result"
|
||||
"name": "Self test result"
|
||||
},
|
||||
"sensitivity": {
|
||||
"name": "Sensitivity"
|
||||
@@ -195,7 +195,7 @@
|
||||
"name": "Status"
|
||||
},
|
||||
"self_test_interval": {
|
||||
"name": "Self-test interval"
|
||||
"name": "Self test interval"
|
||||
},
|
||||
"time_left": {
|
||||
"name": "Time left"
|
||||
@@ -219,10 +219,5 @@
|
||||
"name": "Transfer to battery"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"cannot_connect": {
|
||||
"message": "Cannot connect to APC UPS Daemon."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,7 +20,6 @@ import voluptuous as vol
|
||||
from homeassistant.components import zeroconf
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_IGNORE,
|
||||
SOURCE_REAUTH,
|
||||
SOURCE_ZEROCONF,
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
@@ -382,9 +381,7 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
CONF_IDENTIFIERS: list(combined_identifiers),
|
||||
},
|
||||
)
|
||||
# Don't reload ignored entries or in the middle of reauth,
|
||||
# e.g. if the user is entering a new PIN
|
||||
if entry.source != SOURCE_IGNORE and self.source != SOURCE_REAUTH:
|
||||
if entry.source != SOURCE_IGNORE:
|
||||
self.hass.config_entries.async_schedule_reload(entry.entry_id)
|
||||
if not allow_exist:
|
||||
raise DeviceAlreadyConfigured
|
||||
|
||||
@@ -120,7 +120,6 @@ class AppleTvMediaPlayer(
|
||||
"""Initialize the Apple TV media player."""
|
||||
super().__init__(name, identifier, manager)
|
||||
self._playing: Playing | None = None
|
||||
self._playing_last_updated: datetime | None = None
|
||||
self._app_list: dict[str, str] = {}
|
||||
|
||||
@callback
|
||||
@@ -210,7 +209,6 @@ class AppleTvMediaPlayer(
|
||||
This is a callback function from pyatv.interface.PushListener.
|
||||
"""
|
||||
self._playing = playstatus
|
||||
self._playing_last_updated = dt_util.utcnow()
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
@@ -318,7 +316,7 @@ class AppleTvMediaPlayer(
|
||||
def media_position_updated_at(self) -> datetime | None:
|
||||
"""Last valid time of media position."""
|
||||
if self.state in {MediaPlayerState.PLAYING, MediaPlayerState.PAUSED}:
|
||||
return self._playing_last_updated
|
||||
return dt_util.utcnow()
|
||||
return None
|
||||
|
||||
async def async_play_media(
|
||||
|
||||
@@ -6,6 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/apsystems",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["APsystemsEZ1"],
|
||||
"requirements": ["apsystems-ez1==2.6.0"]
|
||||
"requirements": ["apsystems-ez1==2.5.0"]
|
||||
}
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"off_grid_status": {
|
||||
"name": "Off-grid status"
|
||||
"name": "Off grid status"
|
||||
},
|
||||
"dc_1_short_circuit_error_status": {
|
||||
"name": "DC 1 short circuit error status"
|
||||
|
||||
@@ -36,9 +36,9 @@
|
||||
"wi_fi_strength": {
|
||||
"name": "Wi-Fi strength",
|
||||
"state": {
|
||||
"low": "[%key:common::state::low%]",
|
||||
"medium": "[%key:common::state::medium%]",
|
||||
"high": "[%key:common::state::high%]"
|
||||
"low": "Low",
|
||||
"medium": "Medium",
|
||||
"high": "High"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,7 +26,7 @@
|
||||
"sensor": {
|
||||
"threshold": {
|
||||
"state": {
|
||||
"error": "[%key:common::state::error%]",
|
||||
"error": "Error",
|
||||
"green": "Green",
|
||||
"yellow": "Yellow",
|
||||
"red": "Red"
|
||||
|
||||
@@ -60,8 +60,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
{
|
||||
vol.Optional("message"): str,
|
||||
vol.Optional("media_id"): str,
|
||||
vol.Optional("preannounce"): bool,
|
||||
vol.Optional("preannounce_media_id"): str,
|
||||
vol.Optional("preannounce_media_id"): vol.Any(str, None),
|
||||
}
|
||||
),
|
||||
cv.has_at_least_one_key("message", "media_id"),
|
||||
@@ -76,8 +75,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
{
|
||||
vol.Optional("start_message"): str,
|
||||
vol.Optional("start_media_id"): str,
|
||||
vol.Optional("preannounce"): bool,
|
||||
vol.Optional("preannounce_media_id"): str,
|
||||
vol.Optional("preannounce_media_id"): vol.Any(str, None),
|
||||
vol.Optional("extra_system_prompt"): str,
|
||||
}
|
||||
),
|
||||
|
||||
@@ -180,8 +180,7 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
self,
|
||||
message: str | None = None,
|
||||
media_id: str | None = None,
|
||||
preannounce: bool = True,
|
||||
preannounce_media_id: str = PREANNOUNCE_URL,
|
||||
preannounce_media_id: str | None = PREANNOUNCE_URL,
|
||||
) -> None:
|
||||
"""Play and show an announcement on the satellite.
|
||||
|
||||
@@ -191,8 +190,8 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
If media_id is provided, it is played directly. It is possible
|
||||
to omit the message and the satellite will not show any text.
|
||||
|
||||
If preannounce is True, a sound is played before the announcement.
|
||||
If preannounce_media_id is provided, it overrides the default sound.
|
||||
If preannounce_media_id is None, no sound is played.
|
||||
|
||||
Calls async_announce with message and media id.
|
||||
"""
|
||||
@@ -202,9 +201,7 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
message = ""
|
||||
|
||||
announcement = await self._resolve_announcement_media_id(
|
||||
message,
|
||||
media_id,
|
||||
preannounce_media_id=preannounce_media_id if preannounce else None,
|
||||
message, media_id, preannounce_media_id
|
||||
)
|
||||
|
||||
if self._is_announcing:
|
||||
@@ -232,8 +229,7 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
start_message: str | None = None,
|
||||
start_media_id: str | None = None,
|
||||
extra_system_prompt: str | None = None,
|
||||
preannounce: bool = True,
|
||||
preannounce_media_id: str = PREANNOUNCE_URL,
|
||||
preannounce_media_id: str | None = PREANNOUNCE_URL,
|
||||
) -> None:
|
||||
"""Start a conversation from the satellite.
|
||||
|
||||
@@ -243,8 +239,8 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
If start_media_id is provided, it is played directly. It is possible
|
||||
to omit the message and the satellite will not show any text.
|
||||
|
||||
If preannounce is True, a sound is played before the start message or media.
|
||||
If preannounce_media_id is provided, it overrides the default sound.
|
||||
If preannounce_media_id is provided, it is played before the announcement.
|
||||
If preannounce_media_id is None, no sound is played.
|
||||
|
||||
Calls async_start_conversation.
|
||||
"""
|
||||
@@ -261,9 +257,7 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
start_message = ""
|
||||
|
||||
announcement = await self._resolve_announcement_media_id(
|
||||
start_message,
|
||||
start_media_id,
|
||||
preannounce_media_id=preannounce_media_id if preannounce else None,
|
||||
start_message, start_media_id, preannounce_media_id
|
||||
)
|
||||
|
||||
if self._is_announcing:
|
||||
|
||||
@@ -15,11 +15,6 @@ announce:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
preannounce:
|
||||
required: false
|
||||
default: true
|
||||
selector:
|
||||
boolean:
|
||||
preannounce_media_id:
|
||||
required: false
|
||||
selector:
|
||||
@@ -45,11 +40,6 @@ start_conversation:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
preannounce:
|
||||
required: false
|
||||
default: true
|
||||
selector:
|
||||
boolean:
|
||||
preannounce_media_id:
|
||||
required: false
|
||||
selector:
|
||||
|
||||
@@ -24,13 +24,9 @@
|
||||
"name": "Media ID",
|
||||
"description": "The media ID to announce instead of using text-to-speech."
|
||||
},
|
||||
"preannounce": {
|
||||
"name": "Preannounce",
|
||||
"description": "Play a sound before the announcement."
|
||||
},
|
||||
"preannounce_media_id": {
|
||||
"name": "Preannounce media ID",
|
||||
"description": "Custom media ID to play before the announcement."
|
||||
"name": "Preannounce Media ID",
|
||||
"description": "The media ID to play before the announcement."
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -50,13 +46,9 @@
|
||||
"name": "Extra system prompt",
|
||||
"description": "Provide background information to the AI about the request."
|
||||
},
|
||||
"preannounce": {
|
||||
"name": "Preannounce",
|
||||
"description": "Play a sound before the start message or media."
|
||||
},
|
||||
"preannounce_media_id": {
|
||||
"name": "Preannounce media ID",
|
||||
"description": "Custom media ID to play before the start message or media."
|
||||
"name": "Preannounce Media ID",
|
||||
"description": "The media ID to play before the start message or media."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -199,7 +199,7 @@ async def websocket_test_connection(
|
||||
hass.async_create_background_task(
|
||||
satellite.async_internal_announce(
|
||||
media_id=f"{CONNECTION_TEST_URL_BASE}/{connection_id}",
|
||||
preannounce=False,
|
||||
preannounce_media_id=None,
|
||||
),
|
||||
f"assist_satellite_connection_test_{msg['entity_id']}",
|
||||
)
|
||||
|
||||
@@ -2,9 +2,10 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Mapping
|
||||
from collections.abc import Callable
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
from types import MappingProxyType
|
||||
from typing import Any
|
||||
|
||||
from pyasuswrt import AsusWrtError
|
||||
@@ -362,7 +363,7 @@ class AsusWrtRouter:
|
||||
"""Add a function to call when router is closed."""
|
||||
self._on_close.append(func)
|
||||
|
||||
def update_options(self, new_options: Mapping[str, Any]) -> bool:
|
||||
def update_options(self, new_options: MappingProxyType[str, Any]) -> bool:
|
||||
"""Update router options."""
|
||||
req_reload = False
|
||||
for name, new_opt in new_options.items():
|
||||
|
||||
@@ -28,5 +28,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pubnub", "yalexs"],
|
||||
"requirements": ["yalexs==8.10.0", "yalexs-ble==2.6.0"]
|
||||
"requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.7"]
|
||||
}
|
||||
|
||||
@@ -18,7 +18,6 @@ from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
ATTR_MODE,
|
||||
ATTR_NAME,
|
||||
CONF_ACTIONS,
|
||||
CONF_ALIAS,
|
||||
CONF_CONDITIONS,
|
||||
CONF_DEVICE_ID,
|
||||
@@ -28,7 +27,6 @@ from homeassistant.const import (
|
||||
CONF_MODE,
|
||||
CONF_PATH,
|
||||
CONF_PLATFORM,
|
||||
CONF_TRIGGERS,
|
||||
CONF_VARIABLES,
|
||||
CONF_ZONE,
|
||||
EVENT_HOMEASSISTANT_STARTED,
|
||||
@@ -88,9 +86,11 @@ from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
from .config import AutomationConfig, ValidationStatus
|
||||
from .const import (
|
||||
CONF_ACTIONS,
|
||||
CONF_INITIAL_STATE,
|
||||
CONF_TRACE,
|
||||
CONF_TRIGGER_VARIABLES,
|
||||
CONF_TRIGGERS,
|
||||
DEFAULT_INITIAL_STATE,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
|
||||
@@ -14,15 +14,11 @@ from homeassistant.components import blueprint
|
||||
from homeassistant.components.trace import TRACE_CONFIG_SCHEMA
|
||||
from homeassistant.config import config_per_platform, config_without_domain
|
||||
from homeassistant.const import (
|
||||
CONF_ACTION,
|
||||
CONF_ACTIONS,
|
||||
CONF_ALIAS,
|
||||
CONF_CONDITION,
|
||||
CONF_CONDITIONS,
|
||||
CONF_DESCRIPTION,
|
||||
CONF_ID,
|
||||
CONF_TRIGGER,
|
||||
CONF_TRIGGERS,
|
||||
CONF_VARIABLES,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -34,10 +30,14 @@ from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.yaml.input import UndefinedSubstitution
|
||||
|
||||
from .const import (
|
||||
CONF_ACTION,
|
||||
CONF_ACTIONS,
|
||||
CONF_HIDE_ENTITY,
|
||||
CONF_INITIAL_STATE,
|
||||
CONF_TRACE,
|
||||
CONF_TRIGGER,
|
||||
CONF_TRIGGER_VARIABLES,
|
||||
CONF_TRIGGERS,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
)
|
||||
@@ -58,9 +58,34 @@ _MINIMAL_PLATFORM_SCHEMA = vol.Schema(
|
||||
def _backward_compat_schema(value: Any | None) -> Any:
|
||||
"""Backward compatibility for automations."""
|
||||
|
||||
value = cv.renamed(CONF_TRIGGER, CONF_TRIGGERS)(value)
|
||||
value = cv.renamed(CONF_ACTION, CONF_ACTIONS)(value)
|
||||
return cv.renamed(CONF_CONDITION, CONF_CONDITIONS)(value)
|
||||
if not isinstance(value, dict):
|
||||
return value
|
||||
|
||||
# `trigger` has been renamed to `triggers`
|
||||
if CONF_TRIGGER in value:
|
||||
if CONF_TRIGGERS in value:
|
||||
raise vol.Invalid(
|
||||
"Cannot specify both 'trigger' and 'triggers'. Please use 'triggers' only."
|
||||
)
|
||||
value[CONF_TRIGGERS] = value.pop(CONF_TRIGGER)
|
||||
|
||||
# `condition` has been renamed to `conditions`
|
||||
if CONF_CONDITION in value:
|
||||
if CONF_CONDITIONS in value:
|
||||
raise vol.Invalid(
|
||||
"Cannot specify both 'condition' and 'conditions'. Please use 'conditions' only."
|
||||
)
|
||||
value[CONF_CONDITIONS] = value.pop(CONF_CONDITION)
|
||||
|
||||
# `action` has been renamed to `actions`
|
||||
if CONF_ACTION in value:
|
||||
if CONF_ACTIONS in value:
|
||||
raise vol.Invalid(
|
||||
"Cannot specify both 'action' and 'actions'. Please use 'actions' only."
|
||||
)
|
||||
value[CONF_ACTIONS] = value.pop(CONF_ACTION)
|
||||
|
||||
return value
|
||||
|
||||
|
||||
PLATFORM_SCHEMA = vol.All(
|
||||
|
||||
@@ -2,6 +2,10 @@
|
||||
|
||||
import logging
|
||||
|
||||
CONF_ACTION = "action"
|
||||
CONF_ACTIONS = "actions"
|
||||
CONF_TRIGGER = "trigger"
|
||||
CONF_TRIGGERS = "triggers"
|
||||
CONF_TRIGGER_VARIABLES = "trigger_variables"
|
||||
DOMAIN = "automation"
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
from ipaddress import ip_address
|
||||
from types import MappingProxyType
|
||||
from typing import Any
|
||||
from urllib.parse import urlsplit
|
||||
|
||||
@@ -87,7 +88,7 @@ class AxisFlowHandler(ConfigFlow, domain=AXIS_DOMAIN):
|
||||
|
||||
if user_input is not None:
|
||||
try:
|
||||
api = await get_axis_api(self.hass, user_input)
|
||||
api = await get_axis_api(self.hass, MappingProxyType(user_input))
|
||||
|
||||
except AuthenticationRequired:
|
||||
errors["base"] = "invalid_auth"
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Axis network device abstraction."""
|
||||
|
||||
from asyncio import timeout
|
||||
from collections.abc import Mapping
|
||||
from types import MappingProxyType
|
||||
from typing import Any
|
||||
|
||||
import axis
|
||||
@@ -23,7 +23,7 @@ from ..errors import AuthenticationRequired, CannotConnect
|
||||
|
||||
async def get_axis_api(
|
||||
hass: HomeAssistant,
|
||||
config: Mapping[str, Any],
|
||||
config: MappingProxyType[str, Any],
|
||||
) -> axis.AxisDevice:
|
||||
"""Create a Axis device API."""
|
||||
session = get_async_client(hass, verify_ssl=False)
|
||||
|
||||
@@ -3,10 +3,11 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable, Mapping
|
||||
from collections.abc import Callable
|
||||
from datetime import datetime
|
||||
import json
|
||||
import logging
|
||||
from types import MappingProxyType
|
||||
from typing import Any
|
||||
|
||||
from azure.eventhub import EventData, EventDataBatch
|
||||
@@ -178,7 +179,7 @@ class AzureEventHub:
|
||||
await self.async_send(None)
|
||||
await self._queue.join()
|
||||
|
||||
def update_options(self, new_options: Mapping[str, Any]) -> None:
|
||||
def update_options(self, new_options: MappingProxyType[str, Any]) -> None:
|
||||
"""Update options."""
|
||||
self._send_interval = new_options[CONF_SEND_INTERVAL]
|
||||
|
||||
|
||||
@@ -175,8 +175,7 @@ class AzureStorageBackupAgent(BackupAgent):
|
||||
"""Find a blob by backup id."""
|
||||
async for blob in self._client.list_blobs(include="metadata"):
|
||||
if (
|
||||
blob.metadata is not None
|
||||
and backup_id == blob.metadata.get("backup_id", "")
|
||||
backup_id == blob.metadata.get("backup_id", "")
|
||||
and blob.metadata.get("metadata_version") == METADATA_VERSION
|
||||
):
|
||||
return blob
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass, field, replace
|
||||
import datetime as dt
|
||||
from datetime import datetime, timedelta
|
||||
@@ -88,26 +87,12 @@ class BackupConfigData:
|
||||
else:
|
||||
time = None
|
||||
days = [Day(day) for day in data["schedule"]["days"]]
|
||||
agents = {}
|
||||
for agent_id, agent_data in data["agents"].items():
|
||||
protected = agent_data["protected"]
|
||||
stored_retention = agent_data["retention"]
|
||||
agent_retention: AgentRetentionConfig | None
|
||||
if stored_retention:
|
||||
agent_retention = AgentRetentionConfig(
|
||||
copies=stored_retention["copies"],
|
||||
days=stored_retention["days"],
|
||||
)
|
||||
else:
|
||||
agent_retention = None
|
||||
agent_config = AgentConfig(
|
||||
protected=protected,
|
||||
retention=agent_retention,
|
||||
)
|
||||
agents[agent_id] = agent_config
|
||||
|
||||
return cls(
|
||||
agents=agents,
|
||||
agents={
|
||||
agent_id: AgentConfig(protected=agent_data["protected"])
|
||||
for agent_id, agent_data in data["agents"].items()
|
||||
},
|
||||
automatic_backups_configured=data["automatic_backups_configured"],
|
||||
create_backup=CreateBackupConfig(
|
||||
agent_ids=data["create_backup"]["agent_ids"],
|
||||
@@ -191,36 +176,12 @@ class BackupConfig:
|
||||
"""Update config."""
|
||||
if agents is not UNDEFINED:
|
||||
for agent_id, agent_config in agents.items():
|
||||
agent_retention = agent_config.get("retention")
|
||||
if agent_retention is None:
|
||||
new_agent_retention = None
|
||||
else:
|
||||
new_agent_retention = AgentRetentionConfig(
|
||||
copies=agent_retention.get("copies"),
|
||||
days=agent_retention.get("days"),
|
||||
)
|
||||
if agent_id not in self.data.agents:
|
||||
old_agent_retention = None
|
||||
self.data.agents[agent_id] = AgentConfig(
|
||||
protected=agent_config.get("protected", True),
|
||||
retention=new_agent_retention,
|
||||
)
|
||||
self.data.agents[agent_id] = AgentConfig(**agent_config)
|
||||
else:
|
||||
new_agent_config = self.data.agents[agent_id]
|
||||
old_agent_retention = new_agent_config.retention
|
||||
if "protected" in agent_config:
|
||||
new_agent_config = replace(
|
||||
new_agent_config, protected=agent_config["protected"]
|
||||
)
|
||||
if "retention" in agent_config:
|
||||
new_agent_config = replace(
|
||||
new_agent_config, retention=new_agent_retention
|
||||
)
|
||||
self.data.agents[agent_id] = new_agent_config
|
||||
if new_agent_retention != old_agent_retention:
|
||||
# There's a single retention application method
|
||||
# for both global and agent retention settings.
|
||||
self.data.retention.apply(self._manager)
|
||||
self.data.agents[agent_id] = replace(
|
||||
self.data.agents[agent_id], **agent_config
|
||||
)
|
||||
if automatic_backups_configured is not UNDEFINED:
|
||||
self.data.automatic_backups_configured = automatic_backups_configured
|
||||
if create_backup is not UNDEFINED:
|
||||
@@ -246,24 +207,11 @@ class AgentConfig:
|
||||
"""Represent the config for an agent."""
|
||||
|
||||
protected: bool
|
||||
"""Agent protected configuration.
|
||||
|
||||
If True, the agent backups are password protected.
|
||||
"""
|
||||
retention: AgentRetentionConfig | None = None
|
||||
"""Agent retention configuration.
|
||||
|
||||
If None, the global retention configuration is used.
|
||||
If not None, the global retention configuration is ignored for this agent.
|
||||
If an agent retention configuration is set and both copies and days are None,
|
||||
backups will be kept forever for that agent.
|
||||
"""
|
||||
|
||||
def to_dict(self) -> StoredAgentConfig:
|
||||
"""Convert agent config to a dict."""
|
||||
return {
|
||||
"protected": self.protected,
|
||||
"retention": self.retention.to_dict() if self.retention else None,
|
||||
}
|
||||
|
||||
|
||||
@@ -271,46 +219,24 @@ class StoredAgentConfig(TypedDict):
|
||||
"""Represent the stored config for an agent."""
|
||||
|
||||
protected: bool
|
||||
retention: StoredRetentionConfig | None
|
||||
|
||||
|
||||
class AgentParametersDict(TypedDict, total=False):
|
||||
"""Represent the parameters for an agent."""
|
||||
|
||||
protected: bool
|
||||
retention: RetentionParametersDict | None
|
||||
|
||||
|
||||
@dataclass(kw_only=True)
|
||||
class BaseRetentionConfig:
|
||||
"""Represent the base backup retention configuration."""
|
||||
class RetentionConfig:
|
||||
"""Represent the backup retention configuration."""
|
||||
|
||||
copies: int | None = None
|
||||
days: int | None = None
|
||||
|
||||
def to_dict(self) -> StoredRetentionConfig:
|
||||
"""Convert backup retention configuration to a dict."""
|
||||
return StoredRetentionConfig(
|
||||
copies=self.copies,
|
||||
days=self.days,
|
||||
)
|
||||
|
||||
|
||||
@dataclass(kw_only=True)
|
||||
class RetentionConfig(BaseRetentionConfig):
|
||||
"""Represent the backup retention configuration."""
|
||||
|
||||
def apply(self, manager: BackupManager) -> None:
|
||||
"""Apply backup retention configuration."""
|
||||
agents_retention = {
|
||||
agent_id: agent_config.retention
|
||||
for agent_id, agent_config in manager.config.data.agents.items()
|
||||
}
|
||||
|
||||
if self.days is not None or any(
|
||||
agent_retention and agent_retention.days is not None
|
||||
for agent_retention in agents_retention.values()
|
||||
):
|
||||
if self.days is not None:
|
||||
LOGGER.debug(
|
||||
"Scheduling next automatic delete of backups older than %s in 1 day",
|
||||
self.days,
|
||||
@@ -320,6 +246,13 @@ class RetentionConfig(BaseRetentionConfig):
|
||||
LOGGER.debug("Unscheduling next automatic delete")
|
||||
self._unschedule_next(manager)
|
||||
|
||||
def to_dict(self) -> StoredRetentionConfig:
|
||||
"""Convert backup retention configuration to a dict."""
|
||||
return StoredRetentionConfig(
|
||||
copies=self.copies,
|
||||
days=self.days,
|
||||
)
|
||||
|
||||
@callback
|
||||
def _schedule_next(
|
||||
self,
|
||||
@@ -338,81 +271,16 @@ class RetentionConfig(BaseRetentionConfig):
|
||||
"""Return backups older than days to delete."""
|
||||
# we need to check here since we await before
|
||||
# this filter is applied
|
||||
agents_retention = {
|
||||
agent_id: agent_config.retention
|
||||
for agent_id, agent_config in manager.config.data.agents.items()
|
||||
}
|
||||
has_agents_retention = any(
|
||||
agent_retention for agent_retention in agents_retention.values()
|
||||
)
|
||||
has_agents_retention_days = any(
|
||||
agent_retention and agent_retention.days is not None
|
||||
for agent_retention in agents_retention.values()
|
||||
)
|
||||
if (global_days := self.days) is None and not has_agents_retention_days:
|
||||
# No global retention days and no agent retention days
|
||||
if self.days is None:
|
||||
return {}
|
||||
|
||||
now = dt_util.utcnow()
|
||||
if global_days is not None and not has_agents_retention:
|
||||
# Return early to avoid the longer filtering below.
|
||||
return {
|
||||
backup_id: backup
|
||||
for backup_id, backup in backups.items()
|
||||
if dt_util.parse_datetime(backup.date, raise_on_error=True)
|
||||
+ timedelta(days=global_days)
|
||||
< now
|
||||
}
|
||||
|
||||
# If there are any agent retention settings, we need to check
|
||||
# the retention settings, for every backup and agent combination.
|
||||
|
||||
backups_to_delete = {}
|
||||
|
||||
for backup_id, backup in backups.items():
|
||||
backup_date = dt_util.parse_datetime(
|
||||
backup.date, raise_on_error=True
|
||||
)
|
||||
delete_from_agents = set(backup.agents)
|
||||
for agent_id in backup.agents:
|
||||
agent_retention = agents_retention.get(agent_id)
|
||||
if agent_retention is None:
|
||||
# This agent does not have a retention setting,
|
||||
# so the global retention setting should be used.
|
||||
if global_days is None:
|
||||
# This agent does not have a retention setting
|
||||
# and the global retention days setting is None,
|
||||
# so this backup should not be deleted.
|
||||
delete_from_agents.discard(agent_id)
|
||||
continue
|
||||
days = global_days
|
||||
elif (agent_days := agent_retention.days) is None:
|
||||
# This agent has a retention setting
|
||||
# where days is set to None,
|
||||
# so the backup should not be deleted.
|
||||
delete_from_agents.discard(agent_id)
|
||||
continue
|
||||
else:
|
||||
# This agent has a retention setting
|
||||
# where days is set to a number,
|
||||
# so that setting should be used.
|
||||
days = agent_days
|
||||
if backup_date + timedelta(days=days) >= now:
|
||||
# This backup is not older than the retention days,
|
||||
# so this agent should not be deleted.
|
||||
delete_from_agents.discard(agent_id)
|
||||
|
||||
filtered_backup = replace(
|
||||
backup,
|
||||
agents={
|
||||
agent_id: agent_backup_status
|
||||
for agent_id, agent_backup_status in backup.agents.items()
|
||||
if agent_id in delete_from_agents
|
||||
},
|
||||
)
|
||||
backups_to_delete[backup_id] = filtered_backup
|
||||
|
||||
return backups_to_delete
|
||||
return {
|
||||
backup_id: backup
|
||||
for backup_id, backup in backups.items()
|
||||
if dt_util.parse_datetime(backup.date, raise_on_error=True)
|
||||
+ timedelta(days=self.days)
|
||||
< now
|
||||
}
|
||||
|
||||
await manager.async_delete_filtered_backups(
|
||||
include_filter=_automatic_backups_filter, delete_filter=_delete_filter
|
||||
@@ -444,10 +312,6 @@ class RetentionParametersDict(TypedDict, total=False):
|
||||
days: int | None
|
||||
|
||||
|
||||
class AgentRetentionConfig(BaseRetentionConfig):
|
||||
"""Represent an agent retention configuration."""
|
||||
|
||||
|
||||
class StoredBackupSchedule(TypedDict):
|
||||
"""Represent the stored backup schedule configuration."""
|
||||
|
||||
@@ -690,87 +554,16 @@ async def delete_backups_exceeding_configured_count(manager: BackupManager) -> N
|
||||
backups: dict[str, ManagerBackup],
|
||||
) -> dict[str, ManagerBackup]:
|
||||
"""Return oldest backups more numerous than copies to delete."""
|
||||
agents_retention = {
|
||||
agent_id: agent_config.retention
|
||||
for agent_id, agent_config in manager.config.data.agents.items()
|
||||
}
|
||||
has_agents_retention = any(
|
||||
agent_retention for agent_retention in agents_retention.values()
|
||||
)
|
||||
has_agents_retention_copies = any(
|
||||
agent_retention and agent_retention.copies is not None
|
||||
for agent_retention in agents_retention.values()
|
||||
)
|
||||
# we need to check here since we await before
|
||||
# this filter is applied
|
||||
if (
|
||||
global_copies := manager.config.data.retention.copies
|
||||
) is None and not has_agents_retention_copies:
|
||||
# No global retention copies and no agent retention copies
|
||||
if manager.config.data.retention.copies is None:
|
||||
return {}
|
||||
if global_copies is not None and not has_agents_retention:
|
||||
# Return early to avoid the longer filtering below.
|
||||
return dict(
|
||||
sorted(
|
||||
backups.items(),
|
||||
key=lambda backup_item: backup_item[1].date,
|
||||
)[: max(len(backups) - global_copies, 0)]
|
||||
)
|
||||
|
||||
backups_by_agent: dict[str, dict[str, ManagerBackup]] = defaultdict(dict)
|
||||
for backup_id, backup in backups.items():
|
||||
for agent_id in backup.agents:
|
||||
backups_by_agent[agent_id][backup_id] = backup
|
||||
|
||||
backups_to_delete_by_agent: dict[str, dict[str, ManagerBackup]] = defaultdict(
|
||||
dict
|
||||
return dict(
|
||||
sorted(
|
||||
backups.items(),
|
||||
key=lambda backup_item: backup_item[1].date,
|
||||
)[: max(len(backups) - manager.config.data.retention.copies, 0)]
|
||||
)
|
||||
for agent_id, agent_backups in backups_by_agent.items():
|
||||
agent_retention = agents_retention.get(agent_id)
|
||||
if agent_retention is None:
|
||||
# This agent does not have a retention setting,
|
||||
# so the global retention setting should be used.
|
||||
if global_copies is None:
|
||||
# This agent does not have a retention setting
|
||||
# and the global retention copies setting is None,
|
||||
# so backups should not be deleted.
|
||||
continue
|
||||
# The global retention setting will be used.
|
||||
copies = global_copies
|
||||
elif (agent_copies := agent_retention.copies) is None:
|
||||
# This agent has a retention setting
|
||||
# where copies is set to None,
|
||||
# so backups should not be deleted.
|
||||
continue
|
||||
else:
|
||||
# This agent retention setting will be used.
|
||||
copies = agent_copies
|
||||
|
||||
backups_to_delete_by_agent[agent_id] = dict(
|
||||
sorted(
|
||||
agent_backups.items(),
|
||||
key=lambda backup_item: backup_item[1].date,
|
||||
)[: max(len(agent_backups) - copies, 0)]
|
||||
)
|
||||
|
||||
backup_ids_to_delete: dict[str, set[str]] = defaultdict(set)
|
||||
for agent_id, to_delete in backups_to_delete_by_agent.items():
|
||||
for backup_id in to_delete:
|
||||
backup_ids_to_delete[backup_id].add(agent_id)
|
||||
backups_to_delete: dict[str, ManagerBackup] = {}
|
||||
for backup_id, agent_ids in backup_ids_to_delete.items():
|
||||
backup = backups[backup_id]
|
||||
# filter the backup to only include the agents that should be deleted
|
||||
filtered_backup = replace(
|
||||
backup,
|
||||
agents={
|
||||
agent_id: agent_backup_status
|
||||
for agent_id, agent_backup_status in backup.agents.items()
|
||||
if agent_id in agent_ids
|
||||
},
|
||||
)
|
||||
backups_to_delete[backup_id] = filtered_backup
|
||||
return backups_to_delete
|
||||
|
||||
await manager.async_delete_filtered_backups(
|
||||
include_filter=_automatic_backups_filter, delete_filter=_delete_filter
|
||||
|
||||
@@ -1,136 +0,0 @@
|
||||
"""Backup onboarding views."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Coroutine
|
||||
from functools import wraps
|
||||
from http import HTTPStatus
|
||||
from typing import TYPE_CHECKING, Any, Concatenate
|
||||
|
||||
from aiohttp import web
|
||||
from aiohttp.web_exceptions import HTTPUnauthorized
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.http import KEY_HASS
|
||||
from homeassistant.components.http.data_validator import RequestDataValidator
|
||||
from homeassistant.components.onboarding import (
|
||||
BaseOnboardingView,
|
||||
NoAuthBaseOnboardingView,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.backup import async_get_manager as async_get_backup_manager
|
||||
|
||||
from . import BackupManager, Folder, IncorrectPasswordError, http as backup_http
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.components.onboarding import OnboardingStoreData
|
||||
|
||||
|
||||
async def async_setup_views(hass: HomeAssistant, data: OnboardingStoreData) -> None:
|
||||
"""Set up the backup views."""
|
||||
|
||||
hass.http.register_view(BackupInfoView(data))
|
||||
hass.http.register_view(RestoreBackupView(data))
|
||||
hass.http.register_view(UploadBackupView(data))
|
||||
|
||||
|
||||
def with_backup_manager[_ViewT: BaseOnboardingView, **_P](
|
||||
func: Callable[
|
||||
Concatenate[_ViewT, BackupManager, web.Request, _P],
|
||||
Coroutine[Any, Any, web.Response],
|
||||
],
|
||||
) -> Callable[Concatenate[_ViewT, web.Request, _P], Coroutine[Any, Any, web.Response]]:
|
||||
"""Home Assistant API decorator to check onboarding and inject manager."""
|
||||
|
||||
@wraps(func)
|
||||
async def with_backup(
|
||||
self: _ViewT,
|
||||
request: web.Request,
|
||||
*args: _P.args,
|
||||
**kwargs: _P.kwargs,
|
||||
) -> web.Response:
|
||||
"""Check admin and call function."""
|
||||
if self._data["done"]:
|
||||
raise HTTPUnauthorized
|
||||
|
||||
manager = await async_get_backup_manager(request.app[KEY_HASS])
|
||||
return await func(self, manager, request, *args, **kwargs)
|
||||
|
||||
return with_backup
|
||||
|
||||
|
||||
class BackupInfoView(NoAuthBaseOnboardingView):
|
||||
"""Get backup info view."""
|
||||
|
||||
url = "/api/onboarding/backup/info"
|
||||
name = "api:onboarding:backup:info"
|
||||
|
||||
@with_backup_manager
|
||||
async def get(self, manager: BackupManager, request: web.Request) -> web.Response:
|
||||
"""Return backup info."""
|
||||
backups, _ = await manager.async_get_backups()
|
||||
return self.json(
|
||||
{
|
||||
"backups": list(backups.values()),
|
||||
"state": manager.state,
|
||||
"last_action_event": manager.last_action_event,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class RestoreBackupView(NoAuthBaseOnboardingView):
|
||||
"""Restore backup view."""
|
||||
|
||||
url = "/api/onboarding/backup/restore"
|
||||
name = "api:onboarding:backup:restore"
|
||||
|
||||
@RequestDataValidator(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required("backup_id"): str,
|
||||
vol.Required("agent_id"): str,
|
||||
vol.Optional("password"): str,
|
||||
vol.Optional("restore_addons"): [str],
|
||||
vol.Optional("restore_database", default=True): bool,
|
||||
vol.Optional("restore_folders"): [vol.Coerce(Folder)],
|
||||
}
|
||||
)
|
||||
)
|
||||
@with_backup_manager
|
||||
async def post(
|
||||
self, manager: BackupManager, request: web.Request, data: dict[str, Any]
|
||||
) -> web.Response:
|
||||
"""Restore a backup."""
|
||||
try:
|
||||
await manager.async_restore_backup(
|
||||
data["backup_id"],
|
||||
agent_id=data["agent_id"],
|
||||
password=data.get("password"),
|
||||
restore_addons=data.get("restore_addons"),
|
||||
restore_database=data["restore_database"],
|
||||
restore_folders=data.get("restore_folders"),
|
||||
restore_homeassistant=True,
|
||||
)
|
||||
except IncorrectPasswordError:
|
||||
return self.json(
|
||||
{"code": "incorrect_password"}, status_code=HTTPStatus.BAD_REQUEST
|
||||
)
|
||||
except HomeAssistantError as err:
|
||||
return self.json(
|
||||
{"code": "restore_failed", "message": str(err)},
|
||||
status_code=HTTPStatus.BAD_REQUEST,
|
||||
)
|
||||
return web.Response(status=HTTPStatus.OK)
|
||||
|
||||
|
||||
class UploadBackupView(NoAuthBaseOnboardingView, backup_http.UploadBackupView):
|
||||
"""Upload backup view."""
|
||||
|
||||
url = "/api/onboarding/backup/upload"
|
||||
name = "api:onboarding:backup:upload"
|
||||
|
||||
@with_backup_manager
|
||||
async def post(self, manager: BackupManager, request: web.Request) -> web.Response:
|
||||
"""Upload a backup file."""
|
||||
return await self._post(request)
|
||||
@@ -16,7 +16,7 @@ if TYPE_CHECKING:
|
||||
STORE_DELAY_SAVE = 30
|
||||
STORAGE_KEY = DOMAIN
|
||||
STORAGE_VERSION = 1
|
||||
STORAGE_VERSION_MINOR = 6
|
||||
STORAGE_VERSION_MINOR = 5
|
||||
|
||||
|
||||
class StoredBackupData(TypedDict):
|
||||
@@ -72,10 +72,6 @@ class _BackupStore(Store[StoredBackupData]):
|
||||
data["config"]["automatic_backups_configured"] = (
|
||||
data["config"]["create_backup"]["password"] is not None
|
||||
)
|
||||
if old_minor_version < 6:
|
||||
# Version 1.6 adds agent retention settings
|
||||
for agent in data["config"]["agents"]:
|
||||
data["config"]["agents"][agent]["retention"] = None
|
||||
|
||||
# Note: We allow reading data with major version 2.
|
||||
# Reject if major version is higher than 2.
|
||||
|
||||
@@ -346,28 +346,7 @@ async def handle_config_info(
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "backup/config/update",
|
||||
vol.Optional("agents"): vol.Schema(
|
||||
{
|
||||
str: {
|
||||
vol.Optional("protected"): bool,
|
||||
vol.Optional("retention"): vol.Any(
|
||||
vol.Schema(
|
||||
{
|
||||
# Note: We can't use cv.positive_int because it allows 0 even
|
||||
# though 0 is not positive.
|
||||
vol.Optional("copies"): vol.Any(
|
||||
vol.All(int, vol.Range(min=1)), None
|
||||
),
|
||||
vol.Optional("days"): vol.Any(
|
||||
vol.All(int, vol.Range(min=1)), None
|
||||
),
|
||||
},
|
||||
),
|
||||
None,
|
||||
),
|
||||
}
|
||||
}
|
||||
),
|
||||
vol.Optional("agents"): vol.Schema({str: {"protected": bool}}),
|
||||
vol.Optional("automatic_backups_configured"): bool,
|
||||
vol.Optional("create_backup"): vol.Schema(
|
||||
{
|
||||
|
||||
@@ -31,7 +31,7 @@
|
||||
"state_attributes": {
|
||||
"preset_mode": {
|
||||
"state": {
|
||||
"auto": "[%key:common::state::auto%]"
|
||||
"auto": "[%key:component::climate::entity_component::_::state_attributes::fan_mode::state::auto%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -103,8 +103,8 @@
|
||||
"temperature_range": {
|
||||
"name": "Temperature range",
|
||||
"state": {
|
||||
"low": "[%key:common::state::low%]",
|
||||
"high": "[%key:common::state::high%]"
|
||||
"low": "Low",
|
||||
"high": "High"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -124,8 +124,8 @@
|
||||
"battery": {
|
||||
"name": "Battery",
|
||||
"state": {
|
||||
"off": "[%key:common::state::normal%]",
|
||||
"on": "[%key:common::state::low%]"
|
||||
"off": "Normal",
|
||||
"on": "Low"
|
||||
}
|
||||
},
|
||||
"battery_charging": {
|
||||
@@ -145,7 +145,7 @@
|
||||
"cold": {
|
||||
"name": "Cold",
|
||||
"state": {
|
||||
"off": "[%key:common::state::normal%]",
|
||||
"off": "[%key:component::binary_sensor::entity_component::battery::state::off%]",
|
||||
"on": "Cold"
|
||||
}
|
||||
},
|
||||
@@ -180,7 +180,7 @@
|
||||
"heat": {
|
||||
"name": "Heat",
|
||||
"state": {
|
||||
"off": "[%key:common::state::normal%]",
|
||||
"off": "[%key:component::binary_sensor::entity_component::battery::state::off%]",
|
||||
"on": "Hot"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -30,7 +30,7 @@
|
||||
"available": "Available",
|
||||
"charging": "[%key:common::state::charging%]",
|
||||
"unavailable": "Unavailable",
|
||||
"error": "[%key:common::state::error%]",
|
||||
"error": "Error",
|
||||
"offline": "Offline"
|
||||
}
|
||||
},
|
||||
@@ -41,7 +41,7 @@
|
||||
"vehicle_detected": "Detected",
|
||||
"ready": "Ready",
|
||||
"no_power": "No power",
|
||||
"vehicle_error": "[%key:common::state::error%]"
|
||||
"vehicle_error": "Error"
|
||||
}
|
||||
},
|
||||
"actual_v1": {
|
||||
|
||||
@@ -12,5 +12,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/bluemaestro",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["bluemaestro-ble==0.4.0"]
|
||||
"requirements": ["bluemaestro-ble==0.2.3"]
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/bluesound",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["pyblu==2.0.1"],
|
||||
"requirements": ["pyblu==2.0.0"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_musc._tcp.local."
|
||||
|
||||
@@ -330,12 +330,7 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
|
||||
|
||||
if self._status.input_id is not None:
|
||||
for input_ in self._inputs:
|
||||
# the input might not have an id => also try to match on the stream_url/url
|
||||
# we have to use both because neither matches all the time
|
||||
if (
|
||||
input_.id == self._status.input_id
|
||||
or input_.url == self._status.stream_url
|
||||
):
|
||||
if input_.id == self._status.input_id:
|
||||
return input_.text
|
||||
|
||||
for preset in self._presets:
|
||||
@@ -506,16 +501,18 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
|
||||
return
|
||||
|
||||
# presets and inputs might have the same name; presets have priority
|
||||
url: str | None = None
|
||||
for input_ in self._inputs:
|
||||
if input_.text == source:
|
||||
await self._player.play_url(input_.url)
|
||||
return
|
||||
url = input_.url
|
||||
for preset in self._presets:
|
||||
if preset.name == source:
|
||||
await self._player.load_preset(preset.id)
|
||||
return
|
||||
url = preset.url
|
||||
|
||||
raise ServiceValidationError(f"Source {source} not found")
|
||||
if url is None:
|
||||
raise ServiceValidationError(f"Source {source} not found")
|
||||
|
||||
await self._player.play_url(url)
|
||||
|
||||
async def async_clear_playlist(self) -> None:
|
||||
"""Clear players playlist."""
|
||||
|
||||
@@ -18,9 +18,9 @@
|
||||
"bleak==0.22.3",
|
||||
"bleak-retry-connector==3.9.0",
|
||||
"bluetooth-adapters==0.21.4",
|
||||
"bluetooth-auto-recovery==1.5.1",
|
||||
"bluetooth-data-tools==1.28.1",
|
||||
"bluetooth-auto-recovery==1.4.5",
|
||||
"bluetooth-data-tools==1.26.1",
|
||||
"dbus-fast==2.43.0",
|
||||
"habluetooth==3.47.1"
|
||||
"habluetooth==3.37.0"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -374,27 +374,6 @@ class PassiveBluetoothProcessorCoordinator[_DataT](BasePassiveBluetoothCoordinat
|
||||
self.logger.exception("Unexpected error updating %s data", self.name)
|
||||
return
|
||||
|
||||
self._process_update(update, was_available)
|
||||
|
||||
@callback
|
||||
def async_set_updated_data(self, update: _DataT) -> None:
|
||||
"""Manually update the processor with new data.
|
||||
|
||||
If the data comes in via a different method, like a
|
||||
notification, this method can be used to update the
|
||||
processor with the new data.
|
||||
|
||||
This is useful for devices that retrieve
|
||||
some of their data via notifications.
|
||||
"""
|
||||
was_available = self._available
|
||||
self._available = True
|
||||
self._process_update(update, was_available)
|
||||
|
||||
def _process_update(
|
||||
self, update: _DataT, was_available: bool | None = None
|
||||
) -> None:
|
||||
"""Process the update from the bluetooth device."""
|
||||
if not self.last_update_success:
|
||||
self.last_update_success = True
|
||||
self.logger.info("Coordinator %s recovered", self.name)
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from habluetooth import (
|
||||
from bluetooth_adapters import (
|
||||
DiscoveredDeviceAdvertisementData,
|
||||
DiscoveredDeviceAdvertisementDataDict,
|
||||
DiscoveryStorageType,
|
||||
|
||||
@@ -139,7 +139,7 @@
|
||||
"state": {
|
||||
"default": "Default",
|
||||
"charging": "[%key:common::state::charging%]",
|
||||
"error": "[%key:common::state::error%]",
|
||||
"error": "Error",
|
||||
"complete": "Complete",
|
||||
"fully_charged": "Fully charged",
|
||||
"finished_fully_charged": "Finished, fully charged",
|
||||
|
||||
@@ -16,7 +16,6 @@ from homeassistant.const import CONF_ACCESS_TOKEN, CONF_HOST, CONF_NAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
|
||||
from .const import DOMAIN
|
||||
@@ -92,22 +91,11 @@ class BondConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
self._discovered[CONF_ACCESS_TOKEN] = token
|
||||
try:
|
||||
bond_id, hub_name = await _validate_input(self.hass, self._discovered)
|
||||
_, hub_name = await _validate_input(self.hass, self._discovered)
|
||||
except InputValidationError:
|
||||
return
|
||||
await self.async_set_unique_id(bond_id)
|
||||
self._abort_if_unique_id_configured(updates={CONF_HOST: host})
|
||||
self._discovered[CONF_NAME] = hub_name
|
||||
|
||||
async def async_step_dhcp(
|
||||
self, discovery_info: DhcpServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by dhcp discovery."""
|
||||
host = discovery_info.ip
|
||||
bond_id = discovery_info.hostname.partition("-")[2].upper()
|
||||
await self.async_set_unique_id(bond_id)
|
||||
return await self.async_step_any_discovery(bond_id, host)
|
||||
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: ZeroconfServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
@@ -116,17 +104,11 @@ class BondConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
host: str = discovery_info.host
|
||||
bond_id = name.partition(".")[0]
|
||||
await self.async_set_unique_id(bond_id)
|
||||
return await self.async_step_any_discovery(bond_id, host)
|
||||
|
||||
async def async_step_any_discovery(
|
||||
self, bond_id: str, host: str
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by discovery."""
|
||||
for entry in self._async_current_entries():
|
||||
if entry.unique_id != bond_id:
|
||||
continue
|
||||
updates = {CONF_HOST: host}
|
||||
if entry.state is ConfigEntryState.SETUP_ERROR and (
|
||||
if entry.state == ConfigEntryState.SETUP_ERROR and (
|
||||
token := await async_get_token(self.hass, host)
|
||||
):
|
||||
updates[CONF_ACCESS_TOKEN] = token
|
||||
@@ -171,14 +153,10 @@ class BondConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
CONF_HOST: self._discovered[CONF_HOST],
|
||||
}
|
||||
try:
|
||||
bond_id, hub_name = await _validate_input(self.hass, data)
|
||||
_, hub_name = await _validate_input(self.hass, data)
|
||||
except InputValidationError as error:
|
||||
errors["base"] = error.base
|
||||
else:
|
||||
await self.async_set_unique_id(bond_id)
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={CONF_HOST: self._discovered[CONF_HOST]}
|
||||
)
|
||||
return self.async_create_entry(
|
||||
title=hub_name,
|
||||
data=data,
|
||||
@@ -207,10 +185,8 @@ class BondConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
except InputValidationError as error:
|
||||
errors["base"] = error.base
|
||||
else:
|
||||
await self.async_set_unique_id(bond_id, raise_on_progress=False)
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={CONF_HOST: user_input[CONF_HOST]}
|
||||
)
|
||||
await self.async_set_unique_id(bond_id)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(title=hub_name, data=user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
|
||||
@@ -3,16 +3,6 @@
|
||||
"name": "Bond",
|
||||
"codeowners": ["@bdraco", "@prystupa", "@joshs85", "@marciogranzotto"],
|
||||
"config_flow": true,
|
||||
"dhcp": [
|
||||
{
|
||||
"hostname": "bond-*",
|
||||
"macaddress": "3C6A2C1*"
|
||||
},
|
||||
{
|
||||
"hostname": "bond-*",
|
||||
"macaddress": "F44E38*"
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/bond",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["bond_async"],
|
||||
|
||||
@@ -9,12 +9,12 @@ from bosch_alarm_mode2 import Panel
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
|
||||
from .const import CONF_INSTALLER_CODE, CONF_USER_CODE, DOMAIN
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.ALARM_CONTROL_PANEL, Platform.SENSOR]
|
||||
PLATFORMS: list[Platform] = [Platform.ALARM_CONTROL_PANEL]
|
||||
|
||||
type BoschAlarmConfigEntry = ConfigEntry[Panel]
|
||||
|
||||
@@ -34,15 +34,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: BoschAlarmConfigEntry) -
|
||||
await panel.connect()
|
||||
except (PermissionError, ValueError) as err:
|
||||
await panel.disconnect()
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN, translation_key="authentication_failed"
|
||||
) from err
|
||||
raise ConfigEntryNotReady from err
|
||||
except (TimeoutError, OSError, ConnectionRefusedError, SSLError) as err:
|
||||
await panel.disconnect()
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect",
|
||||
) from err
|
||||
raise ConfigEntryNotReady("Connection failed") from err
|
||||
|
||||
entry.runtime_data = panel
|
||||
|
||||
|
||||
@@ -10,10 +10,11 @@ from homeassistant.components.alarm_control_panel import (
|
||||
AlarmControlPanelState,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import BoschAlarmConfigEntry
|
||||
from .entity import BoschAlarmAreaEntity
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -34,7 +35,7 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
|
||||
class AreaAlarmControlPanel(BoschAlarmAreaEntity, AlarmControlPanelEntity):
|
||||
class AreaAlarmControlPanel(AlarmControlPanelEntity):
|
||||
"""An alarm control panel entity for a bosch alarm panel."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
@@ -47,8 +48,19 @@ class AreaAlarmControlPanel(BoschAlarmAreaEntity, AlarmControlPanelEntity):
|
||||
|
||||
def __init__(self, panel: Panel, area_id: int, unique_id: str) -> None:
|
||||
"""Initialise a Bosch Alarm control panel entity."""
|
||||
super().__init__(panel, area_id, unique_id, False, False, True)
|
||||
self._attr_unique_id = self._area_unique_id
|
||||
self.panel = panel
|
||||
self._area = panel.areas[area_id]
|
||||
self._area_id = area_id
|
||||
self._attr_unique_id = f"{unique_id}_area_{area_id}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, self._attr_unique_id)},
|
||||
name=self._area.name,
|
||||
manufacturer="Bosch Security Systems",
|
||||
via_device=(
|
||||
DOMAIN,
|
||||
unique_id,
|
||||
),
|
||||
)
|
||||
|
||||
@property
|
||||
def alarm_state(self) -> AlarmControlPanelState | None:
|
||||
@@ -78,3 +90,20 @@ class AreaAlarmControlPanel(BoschAlarmAreaEntity, AlarmControlPanelEntity):
|
||||
async def async_alarm_arm_away(self, code: str | None = None) -> None:
|
||||
"""Send arm away command."""
|
||||
await self.panel.area_arm_all(self._area_id)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return self.panel.connection_status()
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity attached to hass."""
|
||||
await super().async_added_to_hass()
|
||||
self._area.status_observer.attach(self.schedule_update_ha_state)
|
||||
self.panel.connection_status_observer.attach(self.schedule_update_ha_state)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity removed from hass."""
|
||||
await super().async_will_remove_from_hass()
|
||||
self._area.status_observer.detach(self.schedule_update_ha_state)
|
||||
self.panel.connection_status_observer.detach(self.schedule_update_ha_state)
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
import ssl
|
||||
from typing import Any
|
||||
@@ -11,12 +10,7 @@ from typing import Any
|
||||
from bosch_alarm_mode2 import Panel
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_RECONFIGURE,
|
||||
SOURCE_USER,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import (
|
||||
CONF_CODE,
|
||||
CONF_HOST,
|
||||
@@ -113,13 +107,6 @@ class BoschAlarmConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
else:
|
||||
self._data = user_input
|
||||
self._data[CONF_MODEL] = model
|
||||
|
||||
if self.source == SOURCE_RECONFIGURE:
|
||||
if (
|
||||
self._get_reconfigure_entry().data[CONF_MODEL]
|
||||
!= self._data[CONF_MODEL]
|
||||
):
|
||||
return self.async_abort(reason="device_mismatch")
|
||||
return await self.async_step_auth()
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
@@ -129,12 +116,6 @@ class BoschAlarmConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the reconfigure step."""
|
||||
return await self.async_step_user()
|
||||
|
||||
async def async_step_auth(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -172,77 +153,13 @@ class BoschAlarmConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
else:
|
||||
if serial_number:
|
||||
await self.async_set_unique_id(str(serial_number))
|
||||
if self.source == SOURCE_USER:
|
||||
if serial_number:
|
||||
self._abort_if_unique_id_configured()
|
||||
else:
|
||||
self._async_abort_entries_match(
|
||||
{CONF_HOST: self._data[CONF_HOST]}
|
||||
)
|
||||
return self.async_create_entry(
|
||||
title=f"Bosch {model}", data=self._data
|
||||
)
|
||||
if serial_number:
|
||||
self._abort_if_unique_id_mismatch(reason="device_mismatch")
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reconfigure_entry(),
|
||||
data=self._data,
|
||||
)
|
||||
self._abort_if_unique_id_configured()
|
||||
else:
|
||||
self._async_abort_entries_match({CONF_HOST: self._data[CONF_HOST]})
|
||||
return self.async_create_entry(title=f"Bosch {model}", data=self._data)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="auth",
|
||||
data_schema=self.add_suggested_values_to_schema(schema, user_input),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Perform reauth upon an authentication error."""
|
||||
self._data = dict(entry_data)
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the reauth step."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
# Each model variant requires a different authentication flow
|
||||
if "Solution" in self._data[CONF_MODEL]:
|
||||
schema = STEP_AUTH_DATA_SCHEMA_SOLUTION
|
||||
elif "AMAX" in self._data[CONF_MODEL]:
|
||||
schema = STEP_AUTH_DATA_SCHEMA_AMAX
|
||||
else:
|
||||
schema = STEP_AUTH_DATA_SCHEMA_BG
|
||||
|
||||
if user_input is not None:
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
self._data.update(user_input)
|
||||
try:
|
||||
(_, _) = await try_connect(self._data, Panel.LOAD_EXTENDED_INFO)
|
||||
except (PermissionError, ValueError) as e:
|
||||
errors["base"] = "invalid_auth"
|
||||
_LOGGER.error("Authentication Error: %s", e)
|
||||
except (
|
||||
OSError,
|
||||
ConnectionRefusedError,
|
||||
ssl.SSLError,
|
||||
TimeoutError,
|
||||
) as e:
|
||||
_LOGGER.error("Connection Error: %s", e)
|
||||
errors["base"] = "cannot_connect"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
return self.async_update_reload_and_abort(
|
||||
reauth_entry,
|
||||
data_updates=user_input,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=self.add_suggested_values_to_schema(schema, user_input),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
@@ -1,73 +0,0 @@
|
||||
"""Diagnostics for bosch alarm."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.const import CONF_PASSWORD
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import BoschAlarmConfigEntry
|
||||
from .const import CONF_INSTALLER_CODE, CONF_USER_CODE
|
||||
|
||||
TO_REDACT = [CONF_INSTALLER_CODE, CONF_USER_CODE, CONF_PASSWORD]
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: BoschAlarmConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
|
||||
return {
|
||||
"entry_data": async_redact_data(entry.data, TO_REDACT),
|
||||
"data": {
|
||||
"model": entry.runtime_data.model,
|
||||
"serial_number": entry.runtime_data.serial_number,
|
||||
"protocol_version": entry.runtime_data.protocol_version,
|
||||
"firmware_version": entry.runtime_data.firmware_version,
|
||||
"areas": [
|
||||
{
|
||||
"id": area_id,
|
||||
"name": area.name,
|
||||
"all_ready": area.all_ready,
|
||||
"part_ready": area.part_ready,
|
||||
"faults": area.faults,
|
||||
"alarms": area.alarms,
|
||||
"disarmed": area.is_disarmed(),
|
||||
"arming": area.is_arming(),
|
||||
"pending": area.is_pending(),
|
||||
"part_armed": area.is_part_armed(),
|
||||
"all_armed": area.is_all_armed(),
|
||||
"armed": area.is_armed(),
|
||||
"triggered": area.is_triggered(),
|
||||
}
|
||||
for area_id, area in entry.runtime_data.areas.items()
|
||||
],
|
||||
"points": [
|
||||
{
|
||||
"id": point_id,
|
||||
"name": point.name,
|
||||
"open": point.is_open(),
|
||||
"normal": point.is_normal(),
|
||||
}
|
||||
for point_id, point in entry.runtime_data.points.items()
|
||||
],
|
||||
"doors": [
|
||||
{
|
||||
"id": door_id,
|
||||
"name": door.name,
|
||||
"open": door.is_open(),
|
||||
"locked": door.is_locked(),
|
||||
}
|
||||
for door_id, door in entry.runtime_data.doors.items()
|
||||
],
|
||||
"outputs": [
|
||||
{
|
||||
"id": output_id,
|
||||
"name": output.name,
|
||||
"active": output.is_active(),
|
||||
}
|
||||
for output_id, output in entry.runtime_data.outputs.items()
|
||||
],
|
||||
"history_events": entry.runtime_data.events,
|
||||
},
|
||||
}
|
||||
@@ -1,88 +0,0 @@
|
||||
"""Support for Bosch Alarm Panel History as a sensor."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from bosch_alarm_mode2 import Panel
|
||||
|
||||
from homeassistant.components.sensor import Entity
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
class BoschAlarmEntity(Entity):
|
||||
"""A base entity for a bosch alarm panel."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(self, panel: Panel, unique_id: str) -> None:
|
||||
"""Set up a entity for a bosch alarm panel."""
|
||||
self.panel = panel
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, unique_id)},
|
||||
name=f"Bosch {panel.model}",
|
||||
manufacturer="Bosch Security Systems",
|
||||
)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return self.panel.connection_status()
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Observe state changes."""
|
||||
self.panel.connection_status_observer.attach(self.schedule_update_ha_state)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Stop observing state changes."""
|
||||
self.panel.connection_status_observer.detach(self.schedule_update_ha_state)
|
||||
|
||||
|
||||
class BoschAlarmAreaEntity(BoschAlarmEntity):
|
||||
"""A base entity for area related entities within a bosch alarm panel."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
panel: Panel,
|
||||
area_id: int,
|
||||
unique_id: str,
|
||||
observe_alarms: bool,
|
||||
observe_ready: bool,
|
||||
observe_status: bool,
|
||||
) -> None:
|
||||
"""Set up a area related entity for a bosch alarm panel."""
|
||||
super().__init__(panel, unique_id)
|
||||
self._area_id = area_id
|
||||
self._area_unique_id = f"{unique_id}_area_{area_id}"
|
||||
self._observe_alarms = observe_alarms
|
||||
self._observe_ready = observe_ready
|
||||
self._observe_status = observe_status
|
||||
self._area = panel.areas[area_id]
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, self._area_unique_id)},
|
||||
name=self._area.name,
|
||||
manufacturer="Bosch Security Systems",
|
||||
via_device=(DOMAIN, unique_id),
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Observe state changes."""
|
||||
await super().async_added_to_hass()
|
||||
if self._observe_alarms:
|
||||
self._area.alarm_observer.attach(self.schedule_update_ha_state)
|
||||
if self._observe_ready:
|
||||
self._area.ready_observer.attach(self.schedule_update_ha_state)
|
||||
if self._observe_status:
|
||||
self._area.status_observer.attach(self.schedule_update_ha_state)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Stop observing state changes."""
|
||||
await super().async_added_to_hass()
|
||||
if self._observe_alarms:
|
||||
self._area.alarm_observer.detach(self.schedule_update_ha_state)
|
||||
if self._observe_ready:
|
||||
self._area.ready_observer.detach(self.schedule_update_ha_state)
|
||||
if self._observe_status:
|
||||
self._area.status_observer.detach(self.schedule_update_ha_state)
|
||||
@@ -1,9 +0,0 @@
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"faulting_points": {
|
||||
"default": "mdi:alert-circle-outline"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["bosch-alarm-mode2==0.4.6"]
|
||||
"requirements": ["bosch-alarm-mode2==0.4.3"]
|
||||
}
|
||||
|
||||
@@ -40,7 +40,7 @@ rules:
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
parallel-updates: todo
|
||||
reauthentication-flow: done
|
||||
reauthentication-flow: todo
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
@@ -62,9 +62,9 @@ rules:
|
||||
entity-category: todo
|
||||
entity-device-class: todo
|
||||
entity-disabled-by-default: todo
|
||||
entity-translations: done
|
||||
entity-translations: todo
|
||||
exception-translations: todo
|
||||
icon-translations: done
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
|
||||
@@ -1,86 +0,0 @@
|
||||
"""Support for Bosch Alarm Panel History as a sensor."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from bosch_alarm_mode2 import Panel
|
||||
from bosch_alarm_mode2.panel import Area
|
||||
|
||||
from homeassistant.components.sensor import SensorEntity, SensorEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import BoschAlarmConfigEntry
|
||||
from .entity import BoschAlarmAreaEntity
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class BoschAlarmSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes Bosch Alarm sensor entity."""
|
||||
|
||||
value_fn: Callable[[Area], int]
|
||||
observe_alarms: bool = False
|
||||
observe_ready: bool = False
|
||||
observe_status: bool = False
|
||||
|
||||
|
||||
SENSOR_TYPES: list[BoschAlarmSensorEntityDescription] = [
|
||||
BoschAlarmSensorEntityDescription(
|
||||
key="faulting_points",
|
||||
translation_key="faulting_points",
|
||||
value_fn=lambda area: area.faults,
|
||||
observe_ready=True,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: BoschAlarmConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up bosch alarm sensors."""
|
||||
|
||||
panel = config_entry.runtime_data
|
||||
unique_id = config_entry.unique_id or config_entry.entry_id
|
||||
|
||||
async_add_entities(
|
||||
BoschAreaSensor(panel, area_id, unique_id, template)
|
||||
for area_id in panel.areas
|
||||
for template in SENSOR_TYPES
|
||||
)
|
||||
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
class BoschAreaSensor(BoschAlarmAreaEntity, SensorEntity):
|
||||
"""An area sensor entity for a bosch alarm panel."""
|
||||
|
||||
entity_description: BoschAlarmSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
panel: Panel,
|
||||
area_id: int,
|
||||
unique_id: str,
|
||||
entity_description: BoschAlarmSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Set up an area sensor entity for a bosch alarm panel."""
|
||||
super().__init__(
|
||||
panel,
|
||||
area_id,
|
||||
unique_id,
|
||||
entity_description.observe_alarms,
|
||||
entity_description.observe_ready,
|
||||
entity_description.observe_status,
|
||||
)
|
||||
self.entity_description = entity_description
|
||||
self._attr_unique_id = f"{self._area_unique_id}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> int:
|
||||
"""Return the state of the sensor."""
|
||||
return self.entity_description.value_fn(self._area)
|
||||
@@ -22,18 +22,6 @@
|
||||
"installer_code": "The installer code from your panel",
|
||||
"user_code": "The user code from your panel"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"installer_code": "[%key:component::bosch_alarm::config::step::auth::data::installer_code%]",
|
||||
"user_code": "[%key:component::bosch_alarm::config::step::auth::data::user_code%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "[%key:component::bosch_alarm::config::step::auth::data_description::password%]",
|
||||
"installer_code": "[%key:component::bosch_alarm::config::step::auth::data_description::installer_code%]",
|
||||
"user_code": "[%key:component::bosch_alarm::config::step::auth::data_description::user_code%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
@@ -42,26 +30,7 @@
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"device_mismatch": "Please ensure you reconfigure against the same device."
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"cannot_connect": {
|
||||
"message": "Could not connect to panel."
|
||||
},
|
||||
"authentication_failed": {
|
||||
"message": "Incorrect credentials for panel."
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"faulting_points": {
|
||||
"name": "Faulting points",
|
||||
"unit_of_measurement": "points"
|
||||
}
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
},
|
||||
"data_description": {
|
||||
"email": "The email address associated with your Bring! account.",
|
||||
"password": "The password to log in to your Bring! account."
|
||||
"password": "The password to login to your Bring! account."
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
|
||||
@@ -12,7 +12,6 @@ from buienradar.constants import (
|
||||
CONDITION,
|
||||
CONTENT,
|
||||
DATA,
|
||||
FEELTEMPERATURE,
|
||||
FORECAST,
|
||||
HUMIDITY,
|
||||
MESSAGE,
|
||||
@@ -23,7 +22,6 @@ from buienradar.constants import (
|
||||
TEMPERATURE,
|
||||
VISIBILITY,
|
||||
WINDAZIMUTH,
|
||||
WINDGUST,
|
||||
WINDSPEED,
|
||||
)
|
||||
from buienradar.urls import JSON_FEED_URL, json_precipitation_forecast_url
|
||||
@@ -202,14 +200,6 @@ class BrData:
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
@property
|
||||
def feeltemperature(self):
|
||||
"""Return the feeltemperature, or None."""
|
||||
try:
|
||||
return float(self.data.get(FEELTEMPERATURE))
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
@property
|
||||
def pressure(self):
|
||||
"""Return the pressure, or None."""
|
||||
@@ -234,14 +224,6 @@ class BrData:
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
@property
|
||||
def wind_gust(self):
|
||||
"""Return the windgust, or None."""
|
||||
try:
|
||||
return float(self.data.get(WINDGUST))
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
@property
|
||||
def wind_speed(self):
|
||||
"""Return the windspeed, or None."""
|
||||
|
||||
@@ -9,7 +9,6 @@ from buienradar.constants import (
|
||||
MAX_TEMP,
|
||||
MIN_TEMP,
|
||||
RAIN,
|
||||
RAIN_CHANCE,
|
||||
WINDAZIMUTH,
|
||||
WINDSPEED,
|
||||
)
|
||||
@@ -34,7 +33,6 @@ from homeassistant.components.weather import (
|
||||
ATTR_FORECAST_NATIVE_TEMP,
|
||||
ATTR_FORECAST_NATIVE_TEMP_LOW,
|
||||
ATTR_FORECAST_NATIVE_WIND_SPEED,
|
||||
ATTR_FORECAST_PRECIPITATION_PROBABILITY,
|
||||
ATTR_FORECAST_TIME,
|
||||
ATTR_FORECAST_WIND_BEARING,
|
||||
Forecast,
|
||||
@@ -155,9 +153,7 @@ class BrWeather(WeatherEntity):
|
||||
)
|
||||
self._attr_native_pressure = data.pressure
|
||||
self._attr_native_temperature = data.temperature
|
||||
self._attr_native_apparent_temperature = data.feeltemperature
|
||||
self._attr_native_visibility = data.visibility
|
||||
self._attr_native_wind_gust_speed = data.wind_gust
|
||||
self._attr_native_wind_speed = data.wind_speed
|
||||
self._attr_wind_bearing = data.wind_bearing
|
||||
|
||||
@@ -192,7 +188,6 @@ class BrWeather(WeatherEntity):
|
||||
ATTR_FORECAST_NATIVE_TEMP_LOW: data_in.get(MIN_TEMP),
|
||||
ATTR_FORECAST_NATIVE_TEMP: data_in.get(MAX_TEMP),
|
||||
ATTR_FORECAST_NATIVE_PRECIPITATION: data_in.get(RAIN),
|
||||
ATTR_FORECAST_PRECIPITATION_PROBABILITY: data_in.get(RAIN_CHANCE),
|
||||
ATTR_FORECAST_WIND_BEARING: data_in.get(WINDAZIMUTH),
|
||||
ATTR_FORECAST_NATIVE_WIND_SPEED: data_in.get(WINDSPEED),
|
||||
}
|
||||
|
||||
@@ -74,7 +74,7 @@
|
||||
},
|
||||
"get_events": {
|
||||
"name": "Get events",
|
||||
"description": "Retrieves events on a calendar within a time range.",
|
||||
"description": "Get events on a calendar within a time range.",
|
||||
"fields": {
|
||||
"start_date_time": {
|
||||
"name": "Start time",
|
||||
|
||||
@@ -2,10 +2,17 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Literal, cast
|
||||
|
||||
from turbojpeg import TurboJPEG
|
||||
with suppress(Exception):
|
||||
# TurboJPEG imports numpy which may or may not work so
|
||||
# we have to guard the import here. We still want
|
||||
# to import it at top level so it gets loaded
|
||||
# in the import executor and not in the event loop.
|
||||
from turbojpeg import TurboJPEG
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import Image
|
||||
|
||||
@@ -8,18 +8,46 @@ from typing import Final
|
||||
|
||||
from canary.api import Api
|
||||
from requests.exceptions import ConnectTimeout, HTTPError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.camera import DOMAIN as CAMERA_DOMAIN
|
||||
from homeassistant.config_entries import SOURCE_IMPORT
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_TIMEOUT, CONF_USERNAME, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import CONF_FFMPEG_ARGUMENTS, DEFAULT_FFMPEG_ARGUMENTS, DEFAULT_TIMEOUT
|
||||
from .const import (
|
||||
CONF_FFMPEG_ARGUMENTS,
|
||||
DEFAULT_FFMPEG_ARGUMENTS,
|
||||
DEFAULT_TIMEOUT,
|
||||
DOMAIN,
|
||||
)
|
||||
from .coordinator import CanaryConfigEntry, CanaryDataUpdateCoordinator
|
||||
|
||||
_LOGGER: Final = logging.getLogger(__name__)
|
||||
|
||||
MIN_TIME_BETWEEN_UPDATES: Final = timedelta(seconds=30)
|
||||
|
||||
CONFIG_SCHEMA: Final = vol.Schema(
|
||||
vol.All(
|
||||
cv.deprecated(DOMAIN),
|
||||
{
|
||||
DOMAIN: vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_USERNAME): cv.string,
|
||||
vol.Required(CONF_PASSWORD): cv.string,
|
||||
vol.Optional(
|
||||
CONF_TIMEOUT, default=DEFAULT_TIMEOUT
|
||||
): cv.positive_int,
|
||||
}
|
||||
)
|
||||
},
|
||||
),
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
PLATFORMS: Final[list[Platform]] = [
|
||||
Platform.ALARM_CONTROL_PANEL,
|
||||
Platform.CAMERA,
|
||||
@@ -27,6 +55,37 @@ PLATFORMS: Final[list[Platform]] = [
|
||||
]
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Canary integration."""
|
||||
if hass.config_entries.async_entries(DOMAIN):
|
||||
return True
|
||||
|
||||
ffmpeg_arguments = DEFAULT_FFMPEG_ARGUMENTS
|
||||
if CAMERA_DOMAIN in config:
|
||||
camera_config = next(
|
||||
(item for item in config[CAMERA_DOMAIN] if item["platform"] == DOMAIN),
|
||||
None,
|
||||
)
|
||||
|
||||
if camera_config:
|
||||
ffmpeg_arguments = camera_config.get(
|
||||
CONF_FFMPEG_ARGUMENTS, DEFAULT_FFMPEG_ARGUMENTS
|
||||
)
|
||||
|
||||
if DOMAIN in config:
|
||||
if ffmpeg_arguments != DEFAULT_FFMPEG_ARGUMENTS:
|
||||
config[DOMAIN][CONF_FFMPEG_ARGUMENTS] = ffmpeg_arguments
|
||||
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_IMPORT},
|
||||
data=config[DOMAIN],
|
||||
)
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: CanaryConfigEntry) -> bool:
|
||||
"""Set up Canary from a config entry."""
|
||||
if not entry.options:
|
||||
|
||||
@@ -54,6 +54,10 @@ class CanaryConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Get the options flow for this handler."""
|
||||
return CanaryOptionsFlowHandler()
|
||||
|
||||
async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Handle a flow initiated by configuration file."""
|
||||
return await self.async_step_user(import_data)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/chacon_dio",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["dio_chacon_api"],
|
||||
"requirements": ["dio-chacon-wifi-api==1.2.2"]
|
||||
"requirements": ["dio-chacon-wifi-api==1.2.1"]
|
||||
}
|
||||
|
||||
@@ -28,10 +28,10 @@
|
||||
"name": "Thermostat",
|
||||
"state": {
|
||||
"off": "[%key:common::state::off%]",
|
||||
"auto": "[%key:common::state::auto%]",
|
||||
"heat": "Heat",
|
||||
"cool": "Cool",
|
||||
"heat_cool": "Heat/Cool",
|
||||
"auto": "Auto",
|
||||
"dry": "Dry",
|
||||
"fan_only": "Fan only"
|
||||
},
|
||||
@@ -50,10 +50,10 @@
|
||||
"state": {
|
||||
"off": "[%key:common::state::off%]",
|
||||
"on": "[%key:common::state::on%]",
|
||||
"auto": "[%key:common::state::auto%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"medium": "[%key:common::state::medium%]",
|
||||
"high": "[%key:common::state::high%]",
|
||||
"auto": "Auto",
|
||||
"low": "Low",
|
||||
"medium": "Medium",
|
||||
"high": "High",
|
||||
"top": "Top",
|
||||
"middle": "Middle",
|
||||
"focus": "Focus",
|
||||
@@ -69,13 +69,13 @@
|
||||
"hvac_action": {
|
||||
"name": "Current action",
|
||||
"state": {
|
||||
"off": "[%key:common::state::off%]",
|
||||
"idle": "[%key:common::state::idle%]",
|
||||
"cooling": "Cooling",
|
||||
"defrosting": "Defrosting",
|
||||
"drying": "Drying",
|
||||
"fan": "Fan",
|
||||
"heating": "Heating",
|
||||
"idle": "[%key:common::state::idle%]",
|
||||
"off": "[%key:common::state::off%]",
|
||||
"preheating": "Preheating"
|
||||
}
|
||||
},
|
||||
@@ -98,13 +98,13 @@
|
||||
"name": "Preset",
|
||||
"state": {
|
||||
"none": "None",
|
||||
"home": "[%key:common::state::home%]",
|
||||
"away": "[%key:common::state::not_home%]",
|
||||
"activity": "Activity",
|
||||
"eco": "Eco",
|
||||
"away": "Away",
|
||||
"boost": "Boost",
|
||||
"comfort": "Comfort",
|
||||
"eco": "Eco",
|
||||
"sleep": "Sleep"
|
||||
"home": "[%key:common::state::home%]",
|
||||
"sleep": "Sleep",
|
||||
"activity": "Activity"
|
||||
}
|
||||
},
|
||||
"preset_modes": {
|
||||
@@ -257,8 +257,8 @@
|
||||
"selector": {
|
||||
"hvac_mode": {
|
||||
"options": {
|
||||
"off": "[%key:common::state::off%]",
|
||||
"auto": "[%key:common::state::auto%]",
|
||||
"off": "Off",
|
||||
"auto": "Auto",
|
||||
"cool": "Cool",
|
||||
"dry": "Dry",
|
||||
"fan_only": "Fan only",
|
||||
|
||||
@@ -127,11 +127,7 @@ class CloudOAuth2Implementation(config_entry_oauth2_flow.AbstractOAuth2Implement
|
||||
flow_id=flow_id, user_input=tokens
|
||||
)
|
||||
|
||||
# It's a background task because it should be cancelled on shutdown and there's nothing else
|
||||
# we can do in such case. There's also no need to wait for this during setup.
|
||||
self.hass.async_create_background_task(
|
||||
await_tokens(), name="Awaiting OAuth tokens"
|
||||
)
|
||||
self.hass.async_create_task(await_tokens())
|
||||
|
||||
return authorize_url
|
||||
|
||||
|
||||
@@ -93,5 +93,3 @@ STT_ENTITY_UNIQUE_ID = "cloud-speech-to-text"
|
||||
TTS_ENTITY_UNIQUE_ID = "cloud-text-to-speech"
|
||||
|
||||
LOGIN_MFA_TIMEOUT = 60
|
||||
|
||||
VOICE_STYLE_SEPERATOR = "||"
|
||||
|
||||
@@ -18,7 +18,7 @@ from aiohttp import web
|
||||
import attr
|
||||
from hass_nabucasa import AlreadyConnectedError, Cloud, auth, thingtalk
|
||||
from hass_nabucasa.const import STATE_DISCONNECTED
|
||||
from hass_nabucasa.voice_data import TTS_VOICES
|
||||
from hass_nabucasa.voice import TTS_VOICES
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import websocket_api
|
||||
@@ -57,7 +57,6 @@ from .const import (
|
||||
PREF_REMOTE_ALLOW_REMOTE_ENABLE,
|
||||
PREF_TTS_DEFAULT_VOICE,
|
||||
REQUEST_TIMEOUT,
|
||||
VOICE_STYLE_SEPERATOR,
|
||||
)
|
||||
from .google_config import CLOUD_GOOGLE
|
||||
from .repairs import async_manage_legacy_subscription_issue
|
||||
@@ -592,21 +591,10 @@ async def websocket_subscription(
|
||||
def validate_language_voice(value: tuple[str, str]) -> tuple[str, str]:
|
||||
"""Validate language and voice."""
|
||||
language, voice = value
|
||||
style: str | None
|
||||
voice, _, style = voice.partition(VOICE_STYLE_SEPERATOR)
|
||||
if not style:
|
||||
style = None
|
||||
if language not in TTS_VOICES:
|
||||
raise vol.Invalid(f"Invalid language {language}")
|
||||
if voice not in (language_info := TTS_VOICES[language]):
|
||||
if voice not in TTS_VOICES[language]:
|
||||
raise vol.Invalid(f"Invalid voice {voice} for language {language}")
|
||||
voice_info = language_info[voice]
|
||||
if style and (
|
||||
isinstance(voice_info, str) or style not in voice_info.get("variants", [])
|
||||
):
|
||||
raise vol.Invalid(
|
||||
f"Invalid style {style} for voice {voice} in language {language}"
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
@@ -1024,24 +1012,13 @@ def tts_info(
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Fetch available tts info."""
|
||||
result = []
|
||||
for language, voices in TTS_VOICES.items():
|
||||
for voice_id, voice_info in voices.items():
|
||||
if isinstance(voice_info, str):
|
||||
result.append((language, voice_id, voice_info))
|
||||
continue
|
||||
|
||||
name = voice_info["name"]
|
||||
result.append((language, voice_id, name))
|
||||
result.extend(
|
||||
[
|
||||
(
|
||||
language,
|
||||
f"{voice_id}{VOICE_STYLE_SEPERATOR}{variant}",
|
||||
f"{name} ({variant})",
|
||||
)
|
||||
for variant in voice_info.get("variants", [])
|
||||
]
|
||||
)
|
||||
|
||||
connection.send_result(msg["id"], {"languages": result})
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
{
|
||||
"languages": [
|
||||
(language, voice)
|
||||
for language, voices in TTS_VOICES.items()
|
||||
for voice in voices
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==0.96.0"],
|
||||
"requirements": ["hass-nabucasa==0.94.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -1,110 +0,0 @@
|
||||
"""Cloud onboarding views."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Coroutine
|
||||
from functools import wraps
|
||||
from typing import TYPE_CHECKING, Any, Concatenate
|
||||
|
||||
from aiohttp import web
|
||||
from aiohttp.web_exceptions import HTTPUnauthorized
|
||||
|
||||
from homeassistant.components.http import KEY_HASS
|
||||
from homeassistant.components.onboarding import (
|
||||
BaseOnboardingView,
|
||||
NoAuthBaseOnboardingView,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import http_api as cloud_http
|
||||
from .const import DATA_CLOUD
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.components.onboarding import OnboardingStoreData
|
||||
|
||||
|
||||
async def async_setup_views(hass: HomeAssistant, data: OnboardingStoreData) -> None:
|
||||
"""Set up the cloud views."""
|
||||
|
||||
hass.http.register_view(CloudForgotPasswordView(data))
|
||||
hass.http.register_view(CloudLoginView(data))
|
||||
hass.http.register_view(CloudLogoutView(data))
|
||||
hass.http.register_view(CloudStatusView(data))
|
||||
|
||||
|
||||
def ensure_not_done[_ViewT: BaseOnboardingView, **_P](
|
||||
func: Callable[
|
||||
Concatenate[_ViewT, web.Request, _P],
|
||||
Coroutine[Any, Any, web.Response],
|
||||
],
|
||||
) -> Callable[Concatenate[_ViewT, web.Request, _P], Coroutine[Any, Any, web.Response]]:
|
||||
"""Home Assistant API decorator to check onboarding and cloud."""
|
||||
|
||||
@wraps(func)
|
||||
async def _ensure_not_done(
|
||||
self: _ViewT,
|
||||
request: web.Request,
|
||||
*args: _P.args,
|
||||
**kwargs: _P.kwargs,
|
||||
) -> web.Response:
|
||||
"""Check onboarding status, cloud and call function."""
|
||||
if self._data["done"]:
|
||||
# If at least one onboarding step is done, we don't allow accessing
|
||||
# the cloud onboarding views.
|
||||
raise HTTPUnauthorized
|
||||
|
||||
return await func(self, request, *args, **kwargs)
|
||||
|
||||
return _ensure_not_done
|
||||
|
||||
|
||||
class CloudForgotPasswordView(
|
||||
NoAuthBaseOnboardingView, cloud_http.CloudForgotPasswordView
|
||||
):
|
||||
"""View to start Forgot Password flow."""
|
||||
|
||||
url = "/api/onboarding/cloud/forgot_password"
|
||||
name = "api:onboarding:cloud:forgot_password"
|
||||
|
||||
@ensure_not_done
|
||||
async def post(self, request: web.Request) -> web.Response:
|
||||
"""Handle forgot password request."""
|
||||
return await super()._post(request)
|
||||
|
||||
|
||||
class CloudLoginView(NoAuthBaseOnboardingView, cloud_http.CloudLoginView):
|
||||
"""Login to Home Assistant Cloud."""
|
||||
|
||||
url = "/api/onboarding/cloud/login"
|
||||
name = "api:onboarding:cloud:login"
|
||||
|
||||
@ensure_not_done
|
||||
async def post(self, request: web.Request) -> web.Response:
|
||||
"""Handle login request."""
|
||||
return await super()._post(request)
|
||||
|
||||
|
||||
class CloudLogoutView(NoAuthBaseOnboardingView, cloud_http.CloudLogoutView):
|
||||
"""Log out of the Home Assistant cloud."""
|
||||
|
||||
url = "/api/onboarding/cloud/logout"
|
||||
name = "api:onboarding:cloud:logout"
|
||||
|
||||
@ensure_not_done
|
||||
async def post(self, request: web.Request) -> web.Response:
|
||||
"""Handle logout request."""
|
||||
return await super()._post(request)
|
||||
|
||||
|
||||
class CloudStatusView(NoAuthBaseOnboardingView):
|
||||
"""Get cloud status view."""
|
||||
|
||||
url = "/api/onboarding/cloud/status"
|
||||
name = "api:onboarding:cloud:status"
|
||||
|
||||
@ensure_not_done
|
||||
async def get(self, request: web.Request) -> web.Response:
|
||||
"""Return cloud status."""
|
||||
hass = request.app[KEY_HASS]
|
||||
cloud = hass.data[DATA_CLOUD]
|
||||
return self.json({"logged_in": cloud.is_logged_in})
|
||||
@@ -6,8 +6,7 @@ import logging
|
||||
from typing import Any
|
||||
|
||||
from hass_nabucasa import Cloud
|
||||
from hass_nabucasa.voice import MAP_VOICE, AudioOutput, Gender, VoiceError
|
||||
from hass_nabucasa.voice_data import TTS_VOICES
|
||||
from hass_nabucasa.voice import MAP_VOICE, TTS_VOICES, AudioOutput, Gender, VoiceError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.tts import (
|
||||
@@ -31,13 +30,7 @@ from homeassistant.setup import async_when_setup
|
||||
|
||||
from .assist_pipeline import async_migrate_cloud_pipeline_engine
|
||||
from .client import CloudClient
|
||||
from .const import (
|
||||
DATA_CLOUD,
|
||||
DATA_PLATFORMS_SETUP,
|
||||
DOMAIN,
|
||||
TTS_ENTITY_UNIQUE_ID,
|
||||
VOICE_STYLE_SEPERATOR,
|
||||
)
|
||||
from .const import DATA_CLOUD, DATA_PLATFORMS_SETUP, DOMAIN, TTS_ENTITY_UNIQUE_ID
|
||||
from .prefs import CloudPreferences
|
||||
|
||||
ATTR_GENDER = "gender"
|
||||
@@ -64,7 +57,6 @@ DEFAULT_VOICES = {
|
||||
"ar-SY": "AmanyNeural",
|
||||
"ar-TN": "ReemNeural",
|
||||
"ar-YE": "MaryamNeural",
|
||||
"as-IN": "PriyomNeural",
|
||||
"az-AZ": "BabekNeural",
|
||||
"bg-BG": "KalinaNeural",
|
||||
"bn-BD": "NabanitaNeural",
|
||||
@@ -134,8 +126,6 @@ DEFAULT_VOICES = {
|
||||
"id-ID": "GadisNeural",
|
||||
"is-IS": "GudrunNeural",
|
||||
"it-IT": "ElsaNeural",
|
||||
"iu-Cans-CA": "SiqiniqNeural",
|
||||
"iu-Latn-CA": "SiqiniqNeural",
|
||||
"ja-JP": "NanamiNeural",
|
||||
"jv-ID": "SitiNeural",
|
||||
"ka-GE": "EkaNeural",
|
||||
@@ -157,8 +147,6 @@ DEFAULT_VOICES = {
|
||||
"ne-NP": "HemkalaNeural",
|
||||
"nl-BE": "DenaNeural",
|
||||
"nl-NL": "ColetteNeural",
|
||||
"or-IN": "SubhasiniNeural",
|
||||
"pa-IN": "OjasNeural",
|
||||
"pl-PL": "AgnieszkaNeural",
|
||||
"ps-AF": "LatifaNeural",
|
||||
"pt-BR": "FranciscaNeural",
|
||||
@@ -170,7 +158,6 @@ DEFAULT_VOICES = {
|
||||
"sl-SI": "PetraNeural",
|
||||
"so-SO": "UbaxNeural",
|
||||
"sq-AL": "AnilaNeural",
|
||||
"sr-Latn-RS": "NicholasNeural",
|
||||
"sr-RS": "SophieNeural",
|
||||
"su-ID": "TutiNeural",
|
||||
"sv-SE": "SofieNeural",
|
||||
@@ -190,9 +177,12 @@ DEFAULT_VOICES = {
|
||||
"vi-VN": "HoaiMyNeural",
|
||||
"wuu-CN": "XiaotongNeural",
|
||||
"yue-CN": "XiaoMinNeural",
|
||||
"zh-CN-henan": "YundengNeural",
|
||||
"zh-CN-shandong": "YunxiangNeural",
|
||||
"zh-CN": "XiaoxiaoNeural",
|
||||
"zh-CN-henan": "YundengNeural",
|
||||
"zh-CN-liaoning": "XiaobeiNeural",
|
||||
"zh-CN-shaanxi": "XiaoniNeural",
|
||||
"zh-CN-shandong": "YunxiangNeural",
|
||||
"zh-CN-sichuan": "YunxiNeural",
|
||||
"zh-HK": "HiuMaanNeural",
|
||||
"zh-TW": "HsiaoChenNeural",
|
||||
"zu-ZA": "ThandoNeural",
|
||||
@@ -201,39 +191,6 @@ DEFAULT_VOICES = {
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@callback
|
||||
def _prepare_voice_args(
|
||||
*,
|
||||
hass: HomeAssistant,
|
||||
language: str,
|
||||
voice: str,
|
||||
gender: str | None,
|
||||
) -> dict:
|
||||
"""Prepare voice arguments."""
|
||||
gender = handle_deprecated_gender(hass, gender)
|
||||
style: str | None
|
||||
original_voice, _, style = voice.partition(VOICE_STYLE_SEPERATOR)
|
||||
if not style:
|
||||
style = None
|
||||
updated_voice = handle_deprecated_voice(hass, original_voice)
|
||||
if updated_voice not in TTS_VOICES[language]:
|
||||
default_voice = DEFAULT_VOICES[language]
|
||||
_LOGGER.debug(
|
||||
"Unsupported voice %s detected, falling back to default %s for %s",
|
||||
voice,
|
||||
default_voice,
|
||||
language,
|
||||
)
|
||||
updated_voice = default_voice
|
||||
|
||||
return {
|
||||
"language": language,
|
||||
"voice": updated_voice,
|
||||
"gender": gender,
|
||||
"style": style,
|
||||
}
|
||||
|
||||
|
||||
def _deprecated_platform(value: str) -> str:
|
||||
"""Validate if platform is deprecated."""
|
||||
if value == DOMAIN:
|
||||
@@ -371,61 +328,36 @@ class CloudTTSEntity(TextToSpeechEntity):
|
||||
"""Return a list of supported voices for a language."""
|
||||
if not (voices := TTS_VOICES.get(language)):
|
||||
return None
|
||||
|
||||
result = []
|
||||
|
||||
for voice_id, voice_info in voices.items():
|
||||
if isinstance(voice_info, str):
|
||||
result.append(
|
||||
Voice(
|
||||
voice_id,
|
||||
voice_info,
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
name = voice_info["name"]
|
||||
|
||||
result.append(
|
||||
Voice(
|
||||
voice_id,
|
||||
name,
|
||||
)
|
||||
)
|
||||
result.extend(
|
||||
[
|
||||
Voice(
|
||||
f"{voice_id}{VOICE_STYLE_SEPERATOR}{variant}",
|
||||
f"{name} ({variant})",
|
||||
)
|
||||
for variant in voice_info.get("variants", [])
|
||||
]
|
||||
)
|
||||
|
||||
return result
|
||||
return [Voice(voice, voice) for voice in voices]
|
||||
|
||||
async def async_get_tts_audio(
|
||||
self, message: str, language: str, options: dict[str, Any]
|
||||
) -> TtsAudioType:
|
||||
"""Load TTS from Home Assistant Cloud."""
|
||||
gender: Gender | str | None = options.get(ATTR_GENDER)
|
||||
gender = handle_deprecated_gender(self.hass, gender)
|
||||
original_voice: str = options.get(
|
||||
ATTR_VOICE,
|
||||
self._voice if language == self._language else DEFAULT_VOICES[language],
|
||||
)
|
||||
voice = handle_deprecated_voice(self.hass, original_voice)
|
||||
if voice not in TTS_VOICES[language]:
|
||||
default_voice = DEFAULT_VOICES[language]
|
||||
_LOGGER.debug(
|
||||
"Unsupported voice %s detected, falling back to default %s for %s",
|
||||
voice,
|
||||
default_voice,
|
||||
language,
|
||||
)
|
||||
voice = default_voice
|
||||
# Process TTS
|
||||
try:
|
||||
data = await self.cloud.voice.process_tts(
|
||||
text=message,
|
||||
language=language,
|
||||
gender=gender,
|
||||
voice=voice,
|
||||
output=options[ATTR_AUDIO_OUTPUT],
|
||||
**_prepare_voice_args(
|
||||
hass=self.hass,
|
||||
language=language,
|
||||
voice=options.get(
|
||||
ATTR_VOICE,
|
||||
(
|
||||
self._voice
|
||||
if language == self._language
|
||||
else DEFAULT_VOICES[language]
|
||||
),
|
||||
),
|
||||
gender=options.get(ATTR_GENDER),
|
||||
),
|
||||
)
|
||||
except VoiceError as err:
|
||||
_LOGGER.error("Voice error: %s", err)
|
||||
@@ -437,8 +369,6 @@ class CloudTTSEntity(TextToSpeechEntity):
|
||||
class CloudProvider(Provider):
|
||||
"""Home Assistant Cloud speech API provider."""
|
||||
|
||||
has_entity = True
|
||||
|
||||
def __init__(self, cloud: Cloud[CloudClient]) -> None:
|
||||
"""Initialize cloud provider."""
|
||||
self.cloud = cloud
|
||||
@@ -471,38 +401,7 @@ class CloudProvider(Provider):
|
||||
"""Return a list of supported voices for a language."""
|
||||
if not (voices := TTS_VOICES.get(language)):
|
||||
return None
|
||||
|
||||
result = []
|
||||
|
||||
for voice_id, voice_info in voices.items():
|
||||
if isinstance(voice_info, str):
|
||||
result.append(
|
||||
Voice(
|
||||
voice_id,
|
||||
voice_info,
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
name = voice_info["name"]
|
||||
|
||||
result.append(
|
||||
Voice(
|
||||
voice_id,
|
||||
name,
|
||||
)
|
||||
)
|
||||
result.extend(
|
||||
[
|
||||
Voice(
|
||||
f"{voice_id}{VOICE_STYLE_SEPERATOR}{variant}",
|
||||
f"{name} ({variant})",
|
||||
)
|
||||
for variant in voice_info.get("variants", [])
|
||||
]
|
||||
)
|
||||
|
||||
return result
|
||||
return [Voice(voice, voice) for voice in voices]
|
||||
|
||||
@property
|
||||
def default_options(self) -> dict[str, str]:
|
||||
@@ -516,22 +415,30 @@ class CloudProvider(Provider):
|
||||
) -> TtsAudioType:
|
||||
"""Load TTS from Home Assistant Cloud."""
|
||||
assert self.hass is not None
|
||||
gender: Gender | str | None = options.get(ATTR_GENDER)
|
||||
gender = handle_deprecated_gender(self.hass, gender)
|
||||
original_voice: str = options.get(
|
||||
ATTR_VOICE,
|
||||
self._voice if language == self._language else DEFAULT_VOICES[language],
|
||||
)
|
||||
voice = handle_deprecated_voice(self.hass, original_voice)
|
||||
if voice not in TTS_VOICES[language]:
|
||||
default_voice = DEFAULT_VOICES[language]
|
||||
_LOGGER.debug(
|
||||
"Unsupported voice %s detected, falling back to default %s for %s",
|
||||
voice,
|
||||
default_voice,
|
||||
language,
|
||||
)
|
||||
voice = default_voice
|
||||
# Process TTS
|
||||
try:
|
||||
data = await self.cloud.voice.process_tts(
|
||||
text=message,
|
||||
language=language,
|
||||
gender=gender,
|
||||
voice=voice,
|
||||
output=options[ATTR_AUDIO_OUTPUT],
|
||||
**_prepare_voice_args(
|
||||
hass=self.hass,
|
||||
language=language,
|
||||
voice=options.get(
|
||||
ATTR_VOICE,
|
||||
self._voice
|
||||
if language == self._language
|
||||
else DEFAULT_VOICES[language],
|
||||
),
|
||||
gender=options.get(ATTR_GENDER),
|
||||
),
|
||||
)
|
||||
except VoiceError as err:
|
||||
_LOGGER.error("Voice error: %s", err)
|
||||
|
||||
@@ -12,7 +12,6 @@ from .coordinator import (
|
||||
ComelitSerialBridge,
|
||||
ComelitVedoSystem,
|
||||
)
|
||||
from .utils import async_client_session
|
||||
|
||||
BRIDGE_PLATFORMS = [
|
||||
Platform.CLIMATE,
|
||||
@@ -33,9 +32,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ComelitConfigEntry) -> b
|
||||
"""Set up Comelit platform."""
|
||||
|
||||
coordinator: ComelitBaseCoordinator
|
||||
|
||||
session = await async_client_session(hass)
|
||||
|
||||
if entry.data.get(CONF_TYPE, BRIDGE) == BRIDGE:
|
||||
coordinator = ComelitSerialBridge(
|
||||
hass,
|
||||
@@ -43,7 +39,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ComelitConfigEntry) -> b
|
||||
entry.data[CONF_HOST],
|
||||
entry.data.get(CONF_PORT, DEFAULT_PORT),
|
||||
entry.data[CONF_PIN],
|
||||
session,
|
||||
)
|
||||
platforms = BRIDGE_PLATFORMS
|
||||
else:
|
||||
@@ -53,7 +48,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ComelitConfigEntry) -> b
|
||||
entry.data[CONF_HOST],
|
||||
entry.data.get(CONF_PORT, DEFAULT_PORT),
|
||||
entry.data[CONF_PIN],
|
||||
session,
|
||||
)
|
||||
platforms = VEDO_PLATFORMS
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user