forked from home-assistant/core
Compare commits
1 Commits
2025.5.1
...
remove_bac
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
01bfc09072 |
16
.github/workflows/builder.yml
vendored
16
.github/workflows/builder.yml
vendored
@@ -32,7 +32,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -116,7 +116,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -175,7 +175,7 @@ jobs:
|
||||
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@@ -457,12 +457,12 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@@ -509,7 +509,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build Docker image
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
@@ -522,7 +522,7 @@ jobs:
|
||||
- name: Push Docker image
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
id: push
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
@@ -531,7 +531,7 @@ jobs:
|
||||
|
||||
- name: Generate artifact attestation
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
uses: actions/attest-build-provenance@db473fddc028af60658334401dc6fa3ffd8669fd # v2.3.0
|
||||
uses: actions/attest-build-provenance@c074443f1aee8d4aeeae555aebba3282517141b2 # v2.2.3
|
||||
with:
|
||||
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
|
||||
40
.github/workflows/ci.yaml
vendored
40
.github/workflows/ci.yaml
vendored
@@ -249,7 +249,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -294,7 +294,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
@@ -334,7 +334,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
@@ -374,7 +374,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
@@ -484,7 +484,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -587,7 +587,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -620,7 +620,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -677,7 +677,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -720,7 +720,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -767,7 +767,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -812,7 +812,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -889,7 +889,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -949,7 +949,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -968,7 +968,7 @@ jobs:
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
|
||||
- name: Download pytest_buckets
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
name: pytest_buckets
|
||||
- name: Compile English translations
|
||||
@@ -1074,7 +1074,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -1208,7 +1208,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -1312,7 +1312,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
@@ -1359,7 +1359,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -1454,7 +1454,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
@@ -1479,7 +1479,7 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
pattern: test-results-*
|
||||
- name: Upload test results to Codecov
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.28.16
|
||||
uses: github/codeql-action/init@v3.28.15
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.28.16
|
||||
uses: github/codeql-action/analyze@v3.28.15
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
2
.github/workflows/translations.yml
vendored
2
.github/workflows/translations.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
|
||||
16
.github/workflows/wheels.yml
vendored
16
.github/workflows/wheels.yml
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v5.5.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -138,17 +138,17 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
@@ -187,22 +187,22 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
- name: Download requirements_all_wheels
|
||||
uses: actions/download-artifact@v4.3.0
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
|
||||
|
||||
@@ -386,7 +386,6 @@ homeassistant.components.pandora.*
|
||||
homeassistant.components.panel_custom.*
|
||||
homeassistant.components.peblar.*
|
||||
homeassistant.components.peco.*
|
||||
homeassistant.components.pegel_online.*
|
||||
homeassistant.components.persistent_notification.*
|
||||
homeassistant.components.person.*
|
||||
homeassistant.components.pi_hole.*
|
||||
@@ -463,7 +462,6 @@ homeassistant.components.slack.*
|
||||
homeassistant.components.sleepiq.*
|
||||
homeassistant.components.smhi.*
|
||||
homeassistant.components.smlight.*
|
||||
homeassistant.components.smtp.*
|
||||
homeassistant.components.snooz.*
|
||||
homeassistant.components.solarlog.*
|
||||
homeassistant.components.sonarr.*
|
||||
|
||||
17
CODEOWNERS
generated
17
CODEOWNERS
generated
@@ -171,8 +171,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/avea/ @pattyland
|
||||
/homeassistant/components/awair/ @ahayworth @danielsjf
|
||||
/tests/components/awair/ @ahayworth @danielsjf
|
||||
/homeassistant/components/aws_s3/ @tomasbedrich
|
||||
/tests/components/aws_s3/ @tomasbedrich
|
||||
/homeassistant/components/axis/ @Kane610
|
||||
/tests/components/axis/ @Kane610
|
||||
/homeassistant/components/azure_data_explorer/ @kaareseras
|
||||
@@ -1083,6 +1081,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/ombi/ @larssont
|
||||
/homeassistant/components/onboarding/ @home-assistant/core
|
||||
/tests/components/onboarding/ @home-assistant/core
|
||||
/homeassistant/components/oncue/ @bdraco @peterager
|
||||
/tests/components/oncue/ @bdraco @peterager
|
||||
/homeassistant/components/ondilo_ico/ @JeromeHXP
|
||||
/tests/components/ondilo_ico/ @JeromeHXP
|
||||
/homeassistant/components/onedrive/ @zweckj
|
||||
@@ -1260,8 +1260,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/recovery_mode/ @home-assistant/core
|
||||
/homeassistant/components/refoss/ @ashionky
|
||||
/tests/components/refoss/ @ashionky
|
||||
/homeassistant/components/rehlko/ @bdraco @peterager
|
||||
/tests/components/rehlko/ @bdraco @peterager
|
||||
/homeassistant/components/remote/ @home-assistant/core
|
||||
/tests/components/remote/ @home-assistant/core
|
||||
/homeassistant/components/remote_calendar/ @Thomas55555
|
||||
@@ -1441,8 +1439,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/solarlog/ @Ernst79 @dontinelli
|
||||
/homeassistant/components/solax/ @squishykid @Darsstar
|
||||
/tests/components/solax/ @squishykid @Darsstar
|
||||
/homeassistant/components/soma/ @ratsept
|
||||
/tests/components/soma/ @ratsept
|
||||
/homeassistant/components/soma/ @ratsept @sebfortier2288
|
||||
/tests/components/soma/ @ratsept @sebfortier2288
|
||||
/homeassistant/components/sonarr/ @ctalkington
|
||||
/tests/components/sonarr/ @ctalkington
|
||||
/homeassistant/components/songpal/ @rytilahti @shenxn
|
||||
@@ -1474,8 +1472,7 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/steam_online/ @tkdrob
|
||||
/homeassistant/components/steamist/ @bdraco
|
||||
/tests/components/steamist/ @bdraco
|
||||
/homeassistant/components/stiebel_eltron/ @fucm @ThyMYthOS
|
||||
/tests/components/stiebel_eltron/ @fucm @ThyMYthOS
|
||||
/homeassistant/components/stiebel_eltron/ @fucm
|
||||
/homeassistant/components/stookwijzer/ @fwestenberg
|
||||
/tests/components/stookwijzer/ @fwestenberg
|
||||
/homeassistant/components/stream/ @hunterjm @uvjustin @allenporter
|
||||
@@ -1678,8 +1675,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/vlc_telnet/ @rodripf @MartinHjelmare
|
||||
/homeassistant/components/vodafone_station/ @paoloantinori @chemelli74
|
||||
/tests/components/vodafone_station/ @paoloantinori @chemelli74
|
||||
/homeassistant/components/voip/ @balloob @synesthesiam @jaminh
|
||||
/tests/components/voip/ @balloob @synesthesiam @jaminh
|
||||
/homeassistant/components/voip/ @balloob @synesthesiam
|
||||
/tests/components/voip/ @balloob @synesthesiam
|
||||
/homeassistant/components/volumio/ @OnFreund
|
||||
/tests/components/volumio/ @OnFreund
|
||||
/homeassistant/components/volvooncall/ @molobrakos
|
||||
|
||||
2
Dockerfile
generated
2
Dockerfile
generated
@@ -31,7 +31,7 @@ RUN \
|
||||
&& go2rtc --version
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv==0.7.1
|
||||
RUN pip3 install uv==0.6.10
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
|
||||
10
build.yaml
10
build.yaml
@@ -1,10 +1,10 @@
|
||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.05.0
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.05.0
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.05.0
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.05.0
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.05.0
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.02.1
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.02.1
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.02.1
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.02.1
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.02.1
|
||||
codenotary:
|
||||
signer: notary@home-assistant.io
|
||||
base_image: notary@home-assistant.io
|
||||
|
||||
@@ -75,7 +75,6 @@ from .core_config import async_process_ha_core_config
|
||||
from .exceptions import HomeAssistantError
|
||||
from .helpers import (
|
||||
area_registry,
|
||||
backup,
|
||||
category_registry,
|
||||
config_validation as cv,
|
||||
device_registry,
|
||||
@@ -881,10 +880,6 @@ async def _async_set_up_integrations(
|
||||
if "recorder" in all_domains:
|
||||
recorder.async_initialize_recorder(hass)
|
||||
|
||||
# Initialize backup
|
||||
if "backup" in all_domains:
|
||||
backup.async_initialize_backup(hass)
|
||||
|
||||
stages: list[tuple[str, set[str], int | None]] = [
|
||||
*(
|
||||
(name, domain_group, timeout)
|
||||
|
||||
@@ -1,12 +1,5 @@
|
||||
{
|
||||
"domain": "amazon",
|
||||
"name": "Amazon",
|
||||
"integrations": [
|
||||
"alexa",
|
||||
"amazon_polly",
|
||||
"aws",
|
||||
"aws_s3",
|
||||
"fire_tv",
|
||||
"route53"
|
||||
]
|
||||
"integrations": ["alexa", "amazon_polly", "aws", "fire_tv", "route53"]
|
||||
}
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
"google_assistant_sdk",
|
||||
"google_cloud",
|
||||
"google_drive",
|
||||
"google_gemini",
|
||||
"google_generative_ai_conversation",
|
||||
"google_mail",
|
||||
"google_maps",
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"domain": "nuki",
|
||||
"name": "Nuki",
|
||||
"integrations": ["nuki"],
|
||||
"iot_standards": ["matter"]
|
||||
}
|
||||
@@ -67,7 +67,6 @@ POLLEN_CATEGORY_MAP = {
|
||||
2: "moderate",
|
||||
3: "high",
|
||||
4: "very_high",
|
||||
5: "extreme",
|
||||
}
|
||||
UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=40)
|
||||
UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6)
|
||||
|
||||
@@ -72,7 +72,6 @@
|
||||
"level": {
|
||||
"name": "Level",
|
||||
"state": {
|
||||
"extreme": "Extreme",
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"moderate": "Moderate",
|
||||
@@ -90,7 +89,6 @@
|
||||
"level": {
|
||||
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
||||
"state": {
|
||||
"extreme": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::extreme%]",
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
|
||||
@@ -125,7 +123,6 @@
|
||||
"level": {
|
||||
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
||||
"state": {
|
||||
"extreme": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::extreme%]",
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
|
||||
@@ -170,7 +167,6 @@
|
||||
"level": {
|
||||
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
||||
"state": {
|
||||
"extreme": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::extreme%]",
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
|
||||
@@ -185,7 +181,6 @@
|
||||
"level": {
|
||||
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
||||
"state": {
|
||||
"extreme": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::extreme%]",
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
|
||||
@@ -200,7 +195,6 @@
|
||||
"level": {
|
||||
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
|
||||
"state": {
|
||||
"extreme": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::extreme%]",
|
||||
"high": "[%key:common::state::high%]",
|
||||
"low": "[%key:common::state::low%]",
|
||||
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
|
||||
|
||||
@@ -2,38 +2,25 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import CONNECTION_TYPE, LOCAL
|
||||
from .coordinator import AdaxCloudCoordinator, AdaxConfigEntry, AdaxLocalCoordinator
|
||||
|
||||
PLATFORMS = [Platform.CLIMATE]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AdaxConfigEntry) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Adax from a config entry."""
|
||||
if entry.data.get(CONNECTION_TYPE) == LOCAL:
|
||||
local_coordinator = AdaxLocalCoordinator(hass, entry)
|
||||
entry.runtime_data = local_coordinator
|
||||
else:
|
||||
cloud_coordinator = AdaxCloudCoordinator(hass, entry)
|
||||
entry.runtime_data = cloud_coordinator
|
||||
|
||||
await entry.runtime_data.async_config_entry_first_refresh()
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AdaxConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_migrate_entry(
|
||||
hass: HomeAssistant, config_entry: AdaxConfigEntry
|
||||
) -> bool:
|
||||
async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
"""Migrate old entry."""
|
||||
# convert title and unique_id to string
|
||||
if config_entry.version == 1:
|
||||
|
||||
@@ -12,42 +12,57 @@ from homeassistant.components.climate import (
|
||||
ClimateEntityFeature,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
ATTR_TEMPERATURE,
|
||||
CONF_IP_ADDRESS,
|
||||
CONF_PASSWORD,
|
||||
CONF_TOKEN,
|
||||
CONF_UNIQUE_ID,
|
||||
PRECISION_WHOLE,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import AdaxConfigEntry
|
||||
from .const import CONNECTION_TYPE, DOMAIN, LOCAL
|
||||
from .coordinator import AdaxCloudCoordinator, AdaxLocalCoordinator
|
||||
from .const import ACCOUNT_ID, CONNECTION_TYPE, DOMAIN, LOCAL
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AdaxConfigEntry,
|
||||
entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Adax thermostat with config flow."""
|
||||
if entry.data.get(CONNECTION_TYPE) == LOCAL:
|
||||
local_coordinator = cast(AdaxLocalCoordinator, entry.runtime_data)
|
||||
async_add_entities(
|
||||
[LocalAdaxDevice(local_coordinator, entry.data[CONF_UNIQUE_ID])],
|
||||
adax_data_handler = AdaxLocal(
|
||||
entry.data[CONF_IP_ADDRESS],
|
||||
entry.data[CONF_TOKEN],
|
||||
websession=async_get_clientsession(hass, verify_ssl=False),
|
||||
)
|
||||
else:
|
||||
cloud_coordinator = cast(AdaxCloudCoordinator, entry.runtime_data)
|
||||
async_add_entities(
|
||||
AdaxDevice(cloud_coordinator, device_id)
|
||||
for device_id in cloud_coordinator.data
|
||||
[LocalAdaxDevice(adax_data_handler, entry.data[CONF_UNIQUE_ID])], True
|
||||
)
|
||||
return
|
||||
|
||||
adax_data_handler = Adax(
|
||||
entry.data[ACCOUNT_ID],
|
||||
entry.data[CONF_PASSWORD],
|
||||
websession=async_get_clientsession(hass),
|
||||
)
|
||||
|
||||
async_add_entities(
|
||||
(
|
||||
AdaxDevice(room, adax_data_handler)
|
||||
for room in await adax_data_handler.get_rooms()
|
||||
),
|
||||
True,
|
||||
)
|
||||
|
||||
|
||||
class AdaxDevice(CoordinatorEntity[AdaxCloudCoordinator], ClimateEntity):
|
||||
class AdaxDevice(ClimateEntity):
|
||||
"""Representation of a heater."""
|
||||
|
||||
_attr_hvac_modes = [HVACMode.HEAT, HVACMode.OFF]
|
||||
@@ -61,37 +76,20 @@ class AdaxDevice(CoordinatorEntity[AdaxCloudCoordinator], ClimateEntity):
|
||||
_attr_target_temperature_step = PRECISION_WHOLE
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AdaxCloudCoordinator,
|
||||
device_id: str,
|
||||
) -> None:
|
||||
def __init__(self, heater_data: dict[str, Any], adax_data_handler: Adax) -> None:
|
||||
"""Initialize the heater."""
|
||||
super().__init__(coordinator)
|
||||
self._adax_data_handler: Adax = coordinator.adax_data_handler
|
||||
self._device_id = device_id
|
||||
self._device_id = heater_data["id"]
|
||||
self._adax_data_handler = adax_data_handler
|
||||
|
||||
self._attr_name = self.room["name"]
|
||||
self._attr_unique_id = f"{self.room['homeId']}_{self._device_id}"
|
||||
self._attr_unique_id = f"{heater_data['homeId']}_{heater_data['id']}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, device_id)},
|
||||
identifiers={(DOMAIN, heater_data["id"])},
|
||||
# Instead of setting the device name to the entity name, adax
|
||||
# should be updated to set has_entity_name = True, and set the entity
|
||||
# name to None
|
||||
name=cast(str | None, self.name),
|
||||
manufacturer="Adax",
|
||||
)
|
||||
self._apply_data(self.room)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Whether the entity is available or not."""
|
||||
return super().available and self._device_id in self.coordinator.data
|
||||
|
||||
@property
|
||||
def room(self) -> dict[str, Any]:
|
||||
"""Gets the data for this particular device."""
|
||||
return self.coordinator.data[self._device_id]
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set hvac mode."""
|
||||
@@ -106,9 +104,7 @@ class AdaxDevice(CoordinatorEntity[AdaxCloudCoordinator], ClimateEntity):
|
||||
)
|
||||
else:
|
||||
return
|
||||
|
||||
# Request data refresh from source to verify that update was successful
|
||||
await self.coordinator.async_request_refresh()
|
||||
await self._adax_data_handler.update()
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
@@ -118,31 +114,28 @@ class AdaxDevice(CoordinatorEntity[AdaxCloudCoordinator], ClimateEntity):
|
||||
self._device_id, temperature, True
|
||||
)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
if room := self.room:
|
||||
self._apply_data(room)
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
def _apply_data(self, room: dict[str, Any]) -> None:
|
||||
"""Update the appropriate attributues based on received data."""
|
||||
self._attr_current_temperature = room.get("temperature")
|
||||
self._attr_target_temperature = room.get("targetTemperature")
|
||||
if room["heatingEnabled"]:
|
||||
self._attr_hvac_mode = HVACMode.HEAT
|
||||
self._attr_icon = "mdi:radiator"
|
||||
else:
|
||||
self._attr_hvac_mode = HVACMode.OFF
|
||||
self._attr_icon = "mdi:radiator-off"
|
||||
async def async_update(self) -> None:
|
||||
"""Get the latest data."""
|
||||
for room in await self._adax_data_handler.get_rooms():
|
||||
if room["id"] != self._device_id:
|
||||
continue
|
||||
self._attr_name = room["name"]
|
||||
self._attr_current_temperature = room.get("temperature")
|
||||
self._attr_target_temperature = room.get("targetTemperature")
|
||||
if room["heatingEnabled"]:
|
||||
self._attr_hvac_mode = HVACMode.HEAT
|
||||
self._attr_icon = "mdi:radiator"
|
||||
else:
|
||||
self._attr_hvac_mode = HVACMode.OFF
|
||||
self._attr_icon = "mdi:radiator-off"
|
||||
return
|
||||
|
||||
|
||||
class LocalAdaxDevice(CoordinatorEntity[AdaxLocalCoordinator], ClimateEntity):
|
||||
class LocalAdaxDevice(ClimateEntity):
|
||||
"""Representation of a heater."""
|
||||
|
||||
_attr_hvac_modes = [HVACMode.HEAT, HVACMode.OFF]
|
||||
_attr_hvac_mode = HVACMode.OFF
|
||||
_attr_icon = "mdi:radiator-off"
|
||||
_attr_hvac_mode = HVACMode.HEAT
|
||||
_attr_max_temp = 35
|
||||
_attr_min_temp = 5
|
||||
_attr_supported_features = (
|
||||
@@ -153,10 +146,9 @@ class LocalAdaxDevice(CoordinatorEntity[AdaxLocalCoordinator], ClimateEntity):
|
||||
_attr_target_temperature_step = PRECISION_WHOLE
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
|
||||
def __init__(self, coordinator: AdaxLocalCoordinator, unique_id: str) -> None:
|
||||
def __init__(self, adax_data_handler: AdaxLocal, unique_id: str) -> None:
|
||||
"""Initialize the heater."""
|
||||
super().__init__(coordinator)
|
||||
self._adax_data_handler: AdaxLocal = coordinator.adax_data_handler
|
||||
self._adax_data_handler = adax_data_handler
|
||||
self._attr_unique_id = unique_id
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, unique_id)},
|
||||
@@ -177,20 +169,17 @@ class LocalAdaxDevice(CoordinatorEntity[AdaxLocalCoordinator], ClimateEntity):
|
||||
return
|
||||
await self._adax_data_handler.set_target_temperature(temperature)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
if data := self.coordinator.data:
|
||||
self._attr_current_temperature = data["current_temperature"]
|
||||
self._attr_available = self._attr_current_temperature is not None
|
||||
if (target_temp := data["target_temperature"]) == 0:
|
||||
self._attr_hvac_mode = HVACMode.OFF
|
||||
self._attr_icon = "mdi:radiator-off"
|
||||
if target_temp == 0:
|
||||
self._attr_target_temperature = self._attr_min_temp
|
||||
else:
|
||||
self._attr_hvac_mode = HVACMode.HEAT
|
||||
self._attr_icon = "mdi:radiator"
|
||||
self._attr_target_temperature = target_temp
|
||||
|
||||
super()._handle_coordinator_update()
|
||||
async def async_update(self) -> None:
|
||||
"""Get the latest data."""
|
||||
data = await self._adax_data_handler.get_status()
|
||||
self._attr_current_temperature = data["current_temperature"]
|
||||
self._attr_available = self._attr_current_temperature is not None
|
||||
if (target_temp := data["target_temperature"]) == 0:
|
||||
self._attr_hvac_mode = HVACMode.OFF
|
||||
self._attr_icon = "mdi:radiator-off"
|
||||
if target_temp == 0:
|
||||
self._attr_target_temperature = self._attr_min_temp
|
||||
else:
|
||||
self._attr_hvac_mode = HVACMode.HEAT
|
||||
self._attr_icon = "mdi:radiator"
|
||||
self._attr_target_temperature = target_temp
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
"""Constants for the Adax integration."""
|
||||
|
||||
import datetime
|
||||
from typing import Final
|
||||
|
||||
ACCOUNT_ID: Final = "account_id"
|
||||
@@ -10,5 +9,3 @@ DOMAIN: Final = "adax"
|
||||
LOCAL = "Local"
|
||||
WIFI_SSID = "wifi_ssid"
|
||||
WIFI_PSWD = "wifi_pswd"
|
||||
|
||||
SCAN_INTERVAL = datetime.timedelta(seconds=60)
|
||||
|
||||
@@ -1,71 +0,0 @@
|
||||
"""DataUpdateCoordinator for the Adax component."""
|
||||
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
|
||||
from adax import Adax
|
||||
from adax_local import Adax as AdaxLocal
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD, CONF_TOKEN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import ACCOUNT_ID, SCAN_INTERVAL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
type AdaxConfigEntry = ConfigEntry[AdaxCloudCoordinator | AdaxLocalCoordinator]
|
||||
|
||||
|
||||
class AdaxCloudCoordinator(DataUpdateCoordinator[dict[str, dict[str, Any]]]):
|
||||
"""Coordinator for updating data to and from Adax (cloud)."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: AdaxConfigEntry) -> None:
|
||||
"""Initialize the Adax coordinator used for Cloud mode."""
|
||||
super().__init__(
|
||||
hass,
|
||||
config_entry=entry,
|
||||
logger=_LOGGER,
|
||||
name="AdaxCloud",
|
||||
update_interval=SCAN_INTERVAL,
|
||||
)
|
||||
|
||||
self.adax_data_handler = Adax(
|
||||
entry.data[ACCOUNT_ID],
|
||||
entry.data[CONF_PASSWORD],
|
||||
websession=async_get_clientsession(hass),
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> dict[str, dict[str, Any]]:
|
||||
"""Fetch data from the Adax."""
|
||||
rooms = await self.adax_data_handler.get_rooms() or []
|
||||
return {r["id"]: r for r in rooms}
|
||||
|
||||
|
||||
class AdaxLocalCoordinator(DataUpdateCoordinator[dict[str, Any] | None]):
|
||||
"""Coordinator for updating data to and from Adax (local)."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: AdaxConfigEntry) -> None:
|
||||
"""Initialize the Adax coordinator used for Local mode."""
|
||||
super().__init__(
|
||||
hass,
|
||||
config_entry=entry,
|
||||
logger=_LOGGER,
|
||||
name="AdaxLocal",
|
||||
update_interval=SCAN_INTERVAL,
|
||||
)
|
||||
|
||||
self.adax_data_handler = AdaxLocal(
|
||||
entry.data[CONF_IP_ADDRESS],
|
||||
entry.data[CONF_TOKEN],
|
||||
websession=async_get_clientsession(hass, verify_ssl=False),
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> dict[str, Any]:
|
||||
"""Fetch data from the Adax."""
|
||||
if result := await self.adax_data_handler.get_status():
|
||||
return cast(dict[str, Any], result)
|
||||
raise UpdateFailed("Got invalid status from device")
|
||||
@@ -719,7 +719,7 @@ class LockCapabilities(AlexaEntity):
|
||||
yield Alexa(self.entity)
|
||||
|
||||
|
||||
@ENTITY_ADAPTERS.register(media_player.DOMAIN)
|
||||
@ENTITY_ADAPTERS.register(media_player.const.DOMAIN)
|
||||
class MediaPlayerCapabilities(AlexaEntity):
|
||||
"""Class to represent MediaPlayer capabilities."""
|
||||
|
||||
@@ -757,7 +757,9 @@ class MediaPlayerCapabilities(AlexaEntity):
|
||||
|
||||
if supported & media_player.MediaPlayerEntityFeature.SELECT_SOURCE:
|
||||
inputs = AlexaInputController.get_valid_inputs(
|
||||
self.entity.attributes.get(media_player.ATTR_INPUT_SOURCE_LIST, [])
|
||||
self.entity.attributes.get(
|
||||
media_player.const.ATTR_INPUT_SOURCE_LIST, []
|
||||
)
|
||||
)
|
||||
if len(inputs) > 0:
|
||||
yield AlexaInputController(self.entity)
|
||||
@@ -774,7 +776,8 @@ class MediaPlayerCapabilities(AlexaEntity):
|
||||
and domain != "denonavr"
|
||||
):
|
||||
inputs = AlexaEqualizerController.get_valid_inputs(
|
||||
self.entity.attributes.get(media_player.ATTR_SOUND_MODE_LIST) or []
|
||||
self.entity.attributes.get(media_player.const.ATTR_SOUND_MODE_LIST)
|
||||
or []
|
||||
)
|
||||
if len(inputs) > 0:
|
||||
yield AlexaEqualizerController(self.entity)
|
||||
|
||||
@@ -566,7 +566,7 @@ async def async_api_set_volume(
|
||||
|
||||
data: dict[str, Any] = {
|
||||
ATTR_ENTITY_ID: entity.entity_id,
|
||||
media_player.ATTR_MEDIA_VOLUME_LEVEL: volume,
|
||||
media_player.const.ATTR_MEDIA_VOLUME_LEVEL: volume,
|
||||
}
|
||||
|
||||
await hass.services.async_call(
|
||||
@@ -589,7 +589,7 @@ async def async_api_select_input(
|
||||
|
||||
# Attempt to map the ALL UPPERCASE payload name to a source.
|
||||
# Strips trailing 1 to match single input devices.
|
||||
source_list = entity.attributes.get(media_player.ATTR_INPUT_SOURCE_LIST) or []
|
||||
source_list = entity.attributes.get(media_player.const.ATTR_INPUT_SOURCE_LIST) or []
|
||||
for source in source_list:
|
||||
formatted_source = (
|
||||
source.lower().replace("-", "").replace("_", "").replace(" ", "")
|
||||
@@ -611,7 +611,7 @@ async def async_api_select_input(
|
||||
|
||||
data: dict[str, Any] = {
|
||||
ATTR_ENTITY_ID: entity.entity_id,
|
||||
media_player.ATTR_INPUT_SOURCE: media_input,
|
||||
media_player.const.ATTR_INPUT_SOURCE: media_input,
|
||||
}
|
||||
|
||||
await hass.services.async_call(
|
||||
@@ -636,7 +636,7 @@ async def async_api_adjust_volume(
|
||||
volume_delta = int(directive.payload["volume"])
|
||||
|
||||
entity = directive.entity
|
||||
current_level = entity.attributes[media_player.ATTR_MEDIA_VOLUME_LEVEL]
|
||||
current_level = entity.attributes[media_player.const.ATTR_MEDIA_VOLUME_LEVEL]
|
||||
|
||||
# read current state
|
||||
try:
|
||||
@@ -648,7 +648,7 @@ async def async_api_adjust_volume(
|
||||
|
||||
data: dict[str, Any] = {
|
||||
ATTR_ENTITY_ID: entity.entity_id,
|
||||
media_player.ATTR_MEDIA_VOLUME_LEVEL: volume,
|
||||
media_player.const.ATTR_MEDIA_VOLUME_LEVEL: volume,
|
||||
}
|
||||
|
||||
await hass.services.async_call(
|
||||
@@ -709,7 +709,7 @@ async def async_api_set_mute(
|
||||
entity = directive.entity
|
||||
data: dict[str, Any] = {
|
||||
ATTR_ENTITY_ID: entity.entity_id,
|
||||
media_player.ATTR_MEDIA_VOLUME_MUTED: mute,
|
||||
media_player.const.ATTR_MEDIA_VOLUME_MUTED: mute,
|
||||
}
|
||||
|
||||
await hass.services.async_call(
|
||||
@@ -1708,13 +1708,15 @@ async def async_api_changechannel(
|
||||
|
||||
data: dict[str, Any] = {
|
||||
ATTR_ENTITY_ID: entity.entity_id,
|
||||
media_player.ATTR_MEDIA_CONTENT_ID: channel,
|
||||
media_player.ATTR_MEDIA_CONTENT_TYPE: (media_player.MediaType.CHANNEL),
|
||||
media_player.const.ATTR_MEDIA_CONTENT_ID: channel,
|
||||
media_player.const.ATTR_MEDIA_CONTENT_TYPE: (
|
||||
media_player.const.MEDIA_TYPE_CHANNEL
|
||||
),
|
||||
}
|
||||
|
||||
await hass.services.async_call(
|
||||
entity.domain,
|
||||
media_player.SERVICE_PLAY_MEDIA,
|
||||
media_player.const.SERVICE_PLAY_MEDIA,
|
||||
data,
|
||||
blocking=False,
|
||||
context=context,
|
||||
@@ -1823,13 +1825,13 @@ async def async_api_set_eq_mode(
|
||||
context: ha.Context,
|
||||
) -> AlexaResponse:
|
||||
"""Process a SetMode request for EqualizerController."""
|
||||
mode: str = directive.payload["mode"]
|
||||
mode = directive.payload["mode"]
|
||||
entity = directive.entity
|
||||
data: dict[str, Any] = {ATTR_ENTITY_ID: entity.entity_id}
|
||||
|
||||
sound_mode_list = entity.attributes.get(media_player.ATTR_SOUND_MODE_LIST)
|
||||
sound_mode_list = entity.attributes.get(media_player.const.ATTR_SOUND_MODE_LIST)
|
||||
if sound_mode_list and mode.lower() in sound_mode_list:
|
||||
data[media_player.ATTR_SOUND_MODE] = mode.lower()
|
||||
data[media_player.const.ATTR_SOUND_MODE] = mode.lower()
|
||||
else:
|
||||
msg = f"failed to map sound mode {mode} to a mode on {entity.entity_id}"
|
||||
raise AlexaInvalidValueError(msg)
|
||||
|
||||
@@ -9,13 +9,11 @@ from anthropic import AsyncStream
|
||||
from anthropic._types import NOT_GIVEN
|
||||
from anthropic.types import (
|
||||
InputJSONDelta,
|
||||
MessageDeltaUsage,
|
||||
MessageParam,
|
||||
MessageStreamEvent,
|
||||
RawContentBlockDeltaEvent,
|
||||
RawContentBlockStartEvent,
|
||||
RawContentBlockStopEvent,
|
||||
RawMessageDeltaEvent,
|
||||
RawMessageStartEvent,
|
||||
RawMessageStopEvent,
|
||||
RedactedThinkingBlock,
|
||||
@@ -33,7 +31,6 @@ from anthropic.types import (
|
||||
ToolResultBlockParam,
|
||||
ToolUseBlock,
|
||||
ToolUseBlockParam,
|
||||
Usage,
|
||||
)
|
||||
from voluptuous_openapi import convert
|
||||
|
||||
@@ -165,8 +162,7 @@ def _convert_content(
|
||||
return messages
|
||||
|
||||
|
||||
async def _transform_stream( # noqa: C901 - This is complex, but better to have it in one place
|
||||
chat_log: conversation.ChatLog,
|
||||
async def _transform_stream(
|
||||
result: AsyncStream[MessageStreamEvent],
|
||||
messages: list[MessageParam],
|
||||
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
|
||||
@@ -211,7 +207,6 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
| None
|
||||
) = None
|
||||
current_tool_args: str
|
||||
input_usage: Usage | None = None
|
||||
|
||||
async for response in result:
|
||||
LOGGER.debug("Received response: %s", response)
|
||||
@@ -220,7 +215,6 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
if response.message.role != "assistant":
|
||||
raise ValueError("Unexpected message role")
|
||||
current_message = MessageParam(role=response.message.role, content=[])
|
||||
input_usage = response.message.usage
|
||||
elif isinstance(response, RawContentBlockStartEvent):
|
||||
if isinstance(response.content_block, ToolUseBlock):
|
||||
current_block = ToolUseBlockParam(
|
||||
@@ -291,34 +285,12 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
raise ValueError("Unexpected stop event without a current message")
|
||||
current_message["content"].append(current_block) # type: ignore[union-attr]
|
||||
current_block = None
|
||||
elif isinstance(response, RawMessageDeltaEvent):
|
||||
if (usage := response.usage) is not None:
|
||||
chat_log.async_trace(_create_token_stats(input_usage, usage))
|
||||
elif isinstance(response, RawMessageStopEvent):
|
||||
if current_message is not None:
|
||||
messages.append(current_message)
|
||||
current_message = None
|
||||
|
||||
|
||||
def _create_token_stats(
|
||||
input_usage: Usage | None, response_usage: MessageDeltaUsage
|
||||
) -> dict[str, Any]:
|
||||
"""Create token stats for conversation agent tracing."""
|
||||
input_tokens = 0
|
||||
cached_input_tokens = 0
|
||||
if input_usage:
|
||||
input_tokens = input_usage.input_tokens
|
||||
cached_input_tokens = input_usage.cache_creation_input_tokens or 0
|
||||
output_tokens = response_usage.output_tokens
|
||||
return {
|
||||
"stats": {
|
||||
"input_tokens": input_tokens,
|
||||
"cached_input_tokens": cached_input_tokens,
|
||||
"output_tokens": output_tokens,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class AnthropicConversationEntity(
|
||||
conversation.ConversationEntity, conversation.AbstractConversationAgent
|
||||
):
|
||||
@@ -421,8 +393,7 @@ class AnthropicConversationEntity(
|
||||
[
|
||||
content
|
||||
async for content in chat_log.async_add_delta_content_stream(
|
||||
user_input.agent_id,
|
||||
_transform_stream(chat_log, stream, messages),
|
||||
user_input.agent_id, _transform_stream(stream, messages)
|
||||
)
|
||||
if not isinstance(content, conversation.AssistantContent)
|
||||
]
|
||||
|
||||
@@ -113,7 +113,4 @@ class APCUPSdCoordinator(DataUpdateCoordinator[APCUPSdData]):
|
||||
data = await aioapcaccess.request_status(self._host, self._port)
|
||||
return APCUPSdData(data)
|
||||
except (OSError, asyncio.IncompleteReadError) as error:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect",
|
||||
) from error
|
||||
raise UpdateFailed(error) from error
|
||||
|
||||
@@ -93,7 +93,7 @@
|
||||
"name": "Internal temperature"
|
||||
},
|
||||
"last_self_test": {
|
||||
"name": "Last self-test"
|
||||
"name": "Last self test"
|
||||
},
|
||||
"last_transfer": {
|
||||
"name": "Last transfer"
|
||||
@@ -177,7 +177,7 @@
|
||||
"name": "Restore requirement"
|
||||
},
|
||||
"self_test_result": {
|
||||
"name": "Self-test result"
|
||||
"name": "Self test result"
|
||||
},
|
||||
"sensitivity": {
|
||||
"name": "Sensitivity"
|
||||
@@ -195,7 +195,7 @@
|
||||
"name": "Status"
|
||||
},
|
||||
"self_test_interval": {
|
||||
"name": "Self-test interval"
|
||||
"name": "Self test interval"
|
||||
},
|
||||
"time_left": {
|
||||
"name": "Time left"
|
||||
@@ -219,10 +219,5 @@
|
||||
"name": "Transfer to battery"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"cannot_connect": {
|
||||
"message": "Cannot connect to APC UPS Daemon."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,6 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/apsystems",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["APsystemsEZ1"],
|
||||
"requirements": ["apsystems-ez1==2.6.0"]
|
||||
"requirements": ["apsystems-ez1==2.5.0"]
|
||||
}
|
||||
|
||||
@@ -18,7 +18,6 @@ from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
ATTR_MODE,
|
||||
ATTR_NAME,
|
||||
CONF_ACTIONS,
|
||||
CONF_ALIAS,
|
||||
CONF_CONDITIONS,
|
||||
CONF_DEVICE_ID,
|
||||
@@ -28,7 +27,6 @@ from homeassistant.const import (
|
||||
CONF_MODE,
|
||||
CONF_PATH,
|
||||
CONF_PLATFORM,
|
||||
CONF_TRIGGERS,
|
||||
CONF_VARIABLES,
|
||||
CONF_ZONE,
|
||||
EVENT_HOMEASSISTANT_STARTED,
|
||||
@@ -88,9 +86,11 @@ from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
from .config import AutomationConfig, ValidationStatus
|
||||
from .const import (
|
||||
CONF_ACTIONS,
|
||||
CONF_INITIAL_STATE,
|
||||
CONF_TRACE,
|
||||
CONF_TRIGGER_VARIABLES,
|
||||
CONF_TRIGGERS,
|
||||
DEFAULT_INITIAL_STATE,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
|
||||
@@ -14,15 +14,11 @@ from homeassistant.components import blueprint
|
||||
from homeassistant.components.trace import TRACE_CONFIG_SCHEMA
|
||||
from homeassistant.config import config_per_platform, config_without_domain
|
||||
from homeassistant.const import (
|
||||
CONF_ACTION,
|
||||
CONF_ACTIONS,
|
||||
CONF_ALIAS,
|
||||
CONF_CONDITION,
|
||||
CONF_CONDITIONS,
|
||||
CONF_DESCRIPTION,
|
||||
CONF_ID,
|
||||
CONF_TRIGGER,
|
||||
CONF_TRIGGERS,
|
||||
CONF_VARIABLES,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -34,10 +30,14 @@ from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.yaml.input import UndefinedSubstitution
|
||||
|
||||
from .const import (
|
||||
CONF_ACTION,
|
||||
CONF_ACTIONS,
|
||||
CONF_HIDE_ENTITY,
|
||||
CONF_INITIAL_STATE,
|
||||
CONF_TRACE,
|
||||
CONF_TRIGGER,
|
||||
CONF_TRIGGER_VARIABLES,
|
||||
CONF_TRIGGERS,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
)
|
||||
@@ -58,9 +58,34 @@ _MINIMAL_PLATFORM_SCHEMA = vol.Schema(
|
||||
def _backward_compat_schema(value: Any | None) -> Any:
|
||||
"""Backward compatibility for automations."""
|
||||
|
||||
value = cv.renamed(CONF_TRIGGER, CONF_TRIGGERS)(value)
|
||||
value = cv.renamed(CONF_ACTION, CONF_ACTIONS)(value)
|
||||
return cv.renamed(CONF_CONDITION, CONF_CONDITIONS)(value)
|
||||
if not isinstance(value, dict):
|
||||
return value
|
||||
|
||||
# `trigger` has been renamed to `triggers`
|
||||
if CONF_TRIGGER in value:
|
||||
if CONF_TRIGGERS in value:
|
||||
raise vol.Invalid(
|
||||
"Cannot specify both 'trigger' and 'triggers'. Please use 'triggers' only."
|
||||
)
|
||||
value[CONF_TRIGGERS] = value.pop(CONF_TRIGGER)
|
||||
|
||||
# `condition` has been renamed to `conditions`
|
||||
if CONF_CONDITION in value:
|
||||
if CONF_CONDITIONS in value:
|
||||
raise vol.Invalid(
|
||||
"Cannot specify both 'condition' and 'conditions'. Please use 'conditions' only."
|
||||
)
|
||||
value[CONF_CONDITIONS] = value.pop(CONF_CONDITION)
|
||||
|
||||
# `action` has been renamed to `actions`
|
||||
if CONF_ACTION in value:
|
||||
if CONF_ACTIONS in value:
|
||||
raise vol.Invalid(
|
||||
"Cannot specify both 'action' and 'actions'. Please use 'actions' only."
|
||||
)
|
||||
value[CONF_ACTIONS] = value.pop(CONF_ACTION)
|
||||
|
||||
return value
|
||||
|
||||
|
||||
PLATFORM_SCHEMA = vol.All(
|
||||
|
||||
@@ -2,6 +2,10 @@
|
||||
|
||||
import logging
|
||||
|
||||
CONF_ACTION = "action"
|
||||
CONF_ACTIONS = "actions"
|
||||
CONF_TRIGGER = "trigger"
|
||||
CONF_TRIGGERS = "triggers"
|
||||
CONF_TRIGGER_VARIABLES = "trigger_variables"
|
||||
DOMAIN = "automation"
|
||||
|
||||
|
||||
@@ -1,82 +0,0 @@
|
||||
"""The AWS S3 integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import cast
|
||||
|
||||
from aiobotocore.client import AioBaseClient as S3Client
|
||||
from aiobotocore.session import AioSession
|
||||
from botocore.exceptions import ClientError, ConnectionError, ParamValidationError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
|
||||
|
||||
from .const import (
|
||||
CONF_ACCESS_KEY_ID,
|
||||
CONF_BUCKET,
|
||||
CONF_ENDPOINT_URL,
|
||||
CONF_SECRET_ACCESS_KEY,
|
||||
DATA_BACKUP_AGENT_LISTENERS,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
type S3ConfigEntry = ConfigEntry[S3Client]
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: S3ConfigEntry) -> bool:
|
||||
"""Set up S3 from a config entry."""
|
||||
|
||||
data = cast(dict, entry.data)
|
||||
try:
|
||||
session = AioSession()
|
||||
# pylint: disable-next=unnecessary-dunder-call
|
||||
client = await session.create_client(
|
||||
"s3",
|
||||
endpoint_url=data.get(CONF_ENDPOINT_URL),
|
||||
aws_secret_access_key=data[CONF_SECRET_ACCESS_KEY],
|
||||
aws_access_key_id=data[CONF_ACCESS_KEY_ID],
|
||||
).__aenter__()
|
||||
await client.head_bucket(Bucket=data[CONF_BUCKET])
|
||||
except ClientError as err:
|
||||
raise ConfigEntryError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_credentials",
|
||||
) from err
|
||||
except ParamValidationError as err:
|
||||
if "Invalid bucket name" in str(err):
|
||||
raise ConfigEntryError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_bucket_name",
|
||||
) from err
|
||||
except ValueError as err:
|
||||
raise ConfigEntryError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_endpoint_url",
|
||||
) from err
|
||||
except ConnectionError as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect",
|
||||
) from err
|
||||
|
||||
entry.runtime_data = client
|
||||
|
||||
def notify_backup_listeners() -> None:
|
||||
for listener in hass.data.get(DATA_BACKUP_AGENT_LISTENERS, []):
|
||||
listener()
|
||||
|
||||
entry.async_on_unload(entry.async_on_state_change(notify_backup_listeners))
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: S3ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
client = entry.runtime_data
|
||||
await client.__aexit__(None, None, None)
|
||||
return True
|
||||
@@ -1,330 +0,0 @@
|
||||
"""Backup platform for the AWS S3 integration."""
|
||||
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine
|
||||
import functools
|
||||
import json
|
||||
import logging
|
||||
from time import time
|
||||
from typing import Any
|
||||
|
||||
from botocore.exceptions import BotoCoreError
|
||||
|
||||
from homeassistant.components.backup import (
|
||||
AgentBackup,
|
||||
BackupAgent,
|
||||
BackupAgentError,
|
||||
BackupNotFound,
|
||||
suggested_filename,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from . import S3ConfigEntry
|
||||
from .const import CONF_BUCKET, DATA_BACKUP_AGENT_LISTENERS, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
CACHE_TTL = 300
|
||||
|
||||
# S3 part size requirements: 5 MiB to 5 GiB per part
|
||||
# https://docs.aws.amazon.com/AmazonS3/latest/userguide/qfacts.html
|
||||
# We set the threshold to 20 MiB to avoid too many parts.
|
||||
# Note that each part is allocated in the memory.
|
||||
MULTIPART_MIN_PART_SIZE_BYTES = 20 * 2**20
|
||||
|
||||
|
||||
def handle_boto_errors[T](
|
||||
func: Callable[..., Coroutine[Any, Any, T]],
|
||||
) -> Callable[..., Coroutine[Any, Any, T]]:
|
||||
"""Handle BotoCoreError exceptions by converting them to BackupAgentError."""
|
||||
|
||||
@functools.wraps(func)
|
||||
async def wrapper(*args: Any, **kwargs: Any) -> T:
|
||||
"""Catch BotoCoreError and raise BackupAgentError."""
|
||||
try:
|
||||
return await func(*args, **kwargs)
|
||||
except BotoCoreError as err:
|
||||
error_msg = f"Failed during {func.__name__}"
|
||||
raise BackupAgentError(error_msg) from err
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
async def async_get_backup_agents(
|
||||
hass: HomeAssistant,
|
||||
) -> list[BackupAgent]:
|
||||
"""Return a list of backup agents."""
|
||||
entries: list[S3ConfigEntry] = hass.config_entries.async_loaded_entries(DOMAIN)
|
||||
return [S3BackupAgent(hass, entry) for entry in entries]
|
||||
|
||||
|
||||
@callback
|
||||
def async_register_backup_agents_listener(
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
listener: Callable[[], None],
|
||||
**kwargs: Any,
|
||||
) -> Callable[[], None]:
|
||||
"""Register a listener to be called when agents are added or removed.
|
||||
|
||||
:return: A function to unregister the listener.
|
||||
"""
|
||||
hass.data.setdefault(DATA_BACKUP_AGENT_LISTENERS, []).append(listener)
|
||||
|
||||
@callback
|
||||
def remove_listener() -> None:
|
||||
"""Remove the listener."""
|
||||
hass.data[DATA_BACKUP_AGENT_LISTENERS].remove(listener)
|
||||
if not hass.data[DATA_BACKUP_AGENT_LISTENERS]:
|
||||
del hass.data[DATA_BACKUP_AGENT_LISTENERS]
|
||||
|
||||
return remove_listener
|
||||
|
||||
|
||||
def suggested_filenames(backup: AgentBackup) -> tuple[str, str]:
|
||||
"""Return the suggested filenames for the backup and metadata files."""
|
||||
base_name = suggested_filename(backup).rsplit(".", 1)[0]
|
||||
return f"{base_name}.tar", f"{base_name}.metadata.json"
|
||||
|
||||
|
||||
class S3BackupAgent(BackupAgent):
|
||||
"""Backup agent for the S3 integration."""
|
||||
|
||||
domain = DOMAIN
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: S3ConfigEntry) -> None:
|
||||
"""Initialize the S3 agent."""
|
||||
super().__init__()
|
||||
self._client = entry.runtime_data
|
||||
self._bucket: str = entry.data[CONF_BUCKET]
|
||||
self.name = entry.title
|
||||
self.unique_id = entry.entry_id
|
||||
self._backup_cache: dict[str, AgentBackup] = {}
|
||||
self._cache_expiration = time()
|
||||
|
||||
@handle_boto_errors
|
||||
async def async_download_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> AsyncIterator[bytes]:
|
||||
"""Download a backup file.
|
||||
|
||||
:param backup_id: The ID of the backup that was returned in async_list_backups.
|
||||
:return: An async iterator that yields bytes.
|
||||
"""
|
||||
backup = await self._find_backup_by_id(backup_id)
|
||||
tar_filename, _ = suggested_filenames(backup)
|
||||
|
||||
response = await self._client.get_object(Bucket=self._bucket, Key=tar_filename)
|
||||
return response["Body"].iter_chunks()
|
||||
|
||||
async def async_upload_backup(
|
||||
self,
|
||||
*,
|
||||
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||
backup: AgentBackup,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Upload a backup.
|
||||
|
||||
:param open_stream: A function returning an async iterator that yields bytes.
|
||||
:param backup: Metadata about the backup that should be uploaded.
|
||||
"""
|
||||
tar_filename, metadata_filename = suggested_filenames(backup)
|
||||
|
||||
try:
|
||||
if backup.size < MULTIPART_MIN_PART_SIZE_BYTES:
|
||||
await self._upload_simple(tar_filename, open_stream)
|
||||
else:
|
||||
await self._upload_multipart(tar_filename, open_stream)
|
||||
|
||||
# Upload the metadata file
|
||||
metadata_content = json.dumps(backup.as_dict())
|
||||
await self._client.put_object(
|
||||
Bucket=self._bucket,
|
||||
Key=metadata_filename,
|
||||
Body=metadata_content,
|
||||
)
|
||||
except BotoCoreError as err:
|
||||
raise BackupAgentError("Failed to upload backup") from err
|
||||
else:
|
||||
# Reset cache after successful upload
|
||||
self._cache_expiration = time()
|
||||
|
||||
async def _upload_simple(
|
||||
self,
|
||||
tar_filename: str,
|
||||
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||
) -> None:
|
||||
"""Upload a small file using simple upload.
|
||||
|
||||
:param tar_filename: The target filename for the backup.
|
||||
:param open_stream: A function returning an async iterator that yields bytes.
|
||||
"""
|
||||
_LOGGER.debug("Starting simple upload for %s", tar_filename)
|
||||
stream = await open_stream()
|
||||
file_data = bytearray()
|
||||
async for chunk in stream:
|
||||
file_data.extend(chunk)
|
||||
|
||||
await self._client.put_object(
|
||||
Bucket=self._bucket,
|
||||
Key=tar_filename,
|
||||
Body=bytes(file_data),
|
||||
)
|
||||
|
||||
async def _upload_multipart(
|
||||
self,
|
||||
tar_filename: str,
|
||||
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||
):
|
||||
"""Upload a large file using multipart upload.
|
||||
|
||||
:param tar_filename: The target filename for the backup.
|
||||
:param open_stream: A function returning an async iterator that yields bytes.
|
||||
"""
|
||||
_LOGGER.debug("Starting multipart upload for %s", tar_filename)
|
||||
multipart_upload = await self._client.create_multipart_upload(
|
||||
Bucket=self._bucket,
|
||||
Key=tar_filename,
|
||||
)
|
||||
upload_id = multipart_upload["UploadId"]
|
||||
try:
|
||||
parts = []
|
||||
part_number = 1
|
||||
buffer_size = 0 # bytes
|
||||
buffer: list[bytes] = []
|
||||
|
||||
stream = await open_stream()
|
||||
async for chunk in stream:
|
||||
buffer_size += len(chunk)
|
||||
buffer.append(chunk)
|
||||
|
||||
# If buffer size meets minimum part size, upload it as a part
|
||||
if buffer_size >= MULTIPART_MIN_PART_SIZE_BYTES:
|
||||
_LOGGER.debug(
|
||||
"Uploading part number %d, size %d", part_number, buffer_size
|
||||
)
|
||||
part = await self._client.upload_part(
|
||||
Bucket=self._bucket,
|
||||
Key=tar_filename,
|
||||
PartNumber=part_number,
|
||||
UploadId=upload_id,
|
||||
Body=b"".join(buffer),
|
||||
)
|
||||
parts.append({"PartNumber": part_number, "ETag": part["ETag"]})
|
||||
part_number += 1
|
||||
buffer_size = 0
|
||||
buffer = []
|
||||
|
||||
# Upload the final buffer as the last part (no minimum size requirement)
|
||||
if buffer:
|
||||
_LOGGER.debug(
|
||||
"Uploading final part number %d, size %d", part_number, buffer_size
|
||||
)
|
||||
part = await self._client.upload_part(
|
||||
Bucket=self._bucket,
|
||||
Key=tar_filename,
|
||||
PartNumber=part_number,
|
||||
UploadId=upload_id,
|
||||
Body=b"".join(buffer),
|
||||
)
|
||||
parts.append({"PartNumber": part_number, "ETag": part["ETag"]})
|
||||
|
||||
await self._client.complete_multipart_upload(
|
||||
Bucket=self._bucket,
|
||||
Key=tar_filename,
|
||||
UploadId=upload_id,
|
||||
MultipartUpload={"Parts": parts},
|
||||
)
|
||||
|
||||
except BotoCoreError:
|
||||
try:
|
||||
await self._client.abort_multipart_upload(
|
||||
Bucket=self._bucket,
|
||||
Key=tar_filename,
|
||||
UploadId=upload_id,
|
||||
)
|
||||
except BotoCoreError:
|
||||
_LOGGER.exception("Failed to abort multipart upload")
|
||||
raise
|
||||
|
||||
@handle_boto_errors
|
||||
async def async_delete_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Delete a backup file.
|
||||
|
||||
:param backup_id: The ID of the backup that was returned in async_list_backups.
|
||||
"""
|
||||
backup = await self._find_backup_by_id(backup_id)
|
||||
tar_filename, metadata_filename = suggested_filenames(backup)
|
||||
|
||||
# Delete both the backup file and its metadata file
|
||||
await self._client.delete_object(Bucket=self._bucket, Key=tar_filename)
|
||||
await self._client.delete_object(Bucket=self._bucket, Key=metadata_filename)
|
||||
|
||||
# Reset cache after successful deletion
|
||||
self._cache_expiration = time()
|
||||
|
||||
@handle_boto_errors
|
||||
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
|
||||
"""List backups."""
|
||||
backups = await self._list_backups()
|
||||
return list(backups.values())
|
||||
|
||||
@handle_boto_errors
|
||||
async def async_get_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> AgentBackup:
|
||||
"""Return a backup."""
|
||||
return await self._find_backup_by_id(backup_id)
|
||||
|
||||
async def _find_backup_by_id(self, backup_id: str) -> AgentBackup:
|
||||
"""Find a backup by its backup ID."""
|
||||
backups = await self._list_backups()
|
||||
if backup := backups.get(backup_id):
|
||||
return backup
|
||||
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
|
||||
async def _list_backups(self) -> dict[str, AgentBackup]:
|
||||
"""List backups, using a cache if possible."""
|
||||
if time() <= self._cache_expiration:
|
||||
return self._backup_cache
|
||||
|
||||
backups = {}
|
||||
response = await self._client.list_objects_v2(Bucket=self._bucket)
|
||||
|
||||
# Filter for metadata files only
|
||||
metadata_files = [
|
||||
obj
|
||||
for obj in response.get("Contents", [])
|
||||
if obj["Key"].endswith(".metadata.json")
|
||||
]
|
||||
|
||||
for metadata_file in metadata_files:
|
||||
try:
|
||||
# Download and parse metadata file
|
||||
metadata_response = await self._client.get_object(
|
||||
Bucket=self._bucket, Key=metadata_file["Key"]
|
||||
)
|
||||
metadata_content = await metadata_response["Body"].read()
|
||||
metadata_json = json.loads(metadata_content)
|
||||
except (BotoCoreError, json.JSONDecodeError) as err:
|
||||
_LOGGER.warning(
|
||||
"Failed to process metadata file %s: %s",
|
||||
metadata_file["Key"],
|
||||
err,
|
||||
)
|
||||
continue
|
||||
backup = AgentBackup.from_dict(metadata_json)
|
||||
backups[backup.backup_id] = backup
|
||||
|
||||
self._backup_cache = backups
|
||||
self._cache_expiration = time() + CACHE_TTL
|
||||
|
||||
return self._backup_cache
|
||||
@@ -1,101 +0,0 @@
|
||||
"""Config flow for the AWS S3 integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from aiobotocore.session import AioSession
|
||||
from botocore.exceptions import ClientError, ConnectionError, ParamValidationError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.selector import (
|
||||
TextSelector,
|
||||
TextSelectorConfig,
|
||||
TextSelectorType,
|
||||
)
|
||||
|
||||
from .const import (
|
||||
AWS_DOMAIN,
|
||||
CONF_ACCESS_KEY_ID,
|
||||
CONF_BUCKET,
|
||||
CONF_ENDPOINT_URL,
|
||||
CONF_SECRET_ACCESS_KEY,
|
||||
DEFAULT_ENDPOINT_URL,
|
||||
DESCRIPTION_AWS_S3_DOCS_URL,
|
||||
DESCRIPTION_BOTO3_DOCS_URL,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ACCESS_KEY_ID): cv.string,
|
||||
vol.Required(CONF_SECRET_ACCESS_KEY): TextSelector(
|
||||
config=TextSelectorConfig(type=TextSelectorType.PASSWORD)
|
||||
),
|
||||
vol.Required(CONF_BUCKET): cv.string,
|
||||
vol.Required(CONF_ENDPOINT_URL, default=DEFAULT_ENDPOINT_URL): TextSelector(
|
||||
config=TextSelectorConfig(type=TextSelectorType.URL)
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class S3ConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow."""
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initiated by the user."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
self._async_abort_entries_match(
|
||||
{
|
||||
CONF_BUCKET: user_input[CONF_BUCKET],
|
||||
CONF_ENDPOINT_URL: user_input[CONF_ENDPOINT_URL],
|
||||
}
|
||||
)
|
||||
|
||||
if not urlparse(user_input[CONF_ENDPOINT_URL]).hostname.endswith(
|
||||
AWS_DOMAIN
|
||||
):
|
||||
errors[CONF_ENDPOINT_URL] = "invalid_endpoint_url"
|
||||
else:
|
||||
try:
|
||||
session = AioSession()
|
||||
async with session.create_client(
|
||||
"s3",
|
||||
endpoint_url=user_input.get(CONF_ENDPOINT_URL),
|
||||
aws_secret_access_key=user_input[CONF_SECRET_ACCESS_KEY],
|
||||
aws_access_key_id=user_input[CONF_ACCESS_KEY_ID],
|
||||
) as client:
|
||||
await client.head_bucket(Bucket=user_input[CONF_BUCKET])
|
||||
except ClientError:
|
||||
errors["base"] = "invalid_credentials"
|
||||
except ParamValidationError as err:
|
||||
if "Invalid bucket name" in str(err):
|
||||
errors[CONF_BUCKET] = "invalid_bucket_name"
|
||||
except ValueError:
|
||||
errors[CONF_ENDPOINT_URL] = "invalid_endpoint_url"
|
||||
except ConnectionError:
|
||||
errors[CONF_ENDPOINT_URL] = "cannot_connect"
|
||||
else:
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_BUCKET], data=user_input
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
STEP_USER_DATA_SCHEMA, user_input
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders={
|
||||
"aws_s3_docs_url": DESCRIPTION_AWS_S3_DOCS_URL,
|
||||
"boto3_docs_url": DESCRIPTION_BOTO3_DOCS_URL,
|
||||
},
|
||||
)
|
||||
@@ -1,23 +0,0 @@
|
||||
"""Constants for the AWS S3 integration."""
|
||||
|
||||
from collections.abc import Callable
|
||||
from typing import Final
|
||||
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
DOMAIN: Final = "aws_s3"
|
||||
|
||||
CONF_ACCESS_KEY_ID = "access_key_id"
|
||||
CONF_SECRET_ACCESS_KEY = "secret_access_key"
|
||||
CONF_ENDPOINT_URL = "endpoint_url"
|
||||
CONF_BUCKET = "bucket"
|
||||
|
||||
AWS_DOMAIN = "amazonaws.com"
|
||||
DEFAULT_ENDPOINT_URL = f"https://s3.eu-central-1.{AWS_DOMAIN}/"
|
||||
|
||||
DATA_BACKUP_AGENT_LISTENERS: HassKey[list[Callable[[], None]]] = HassKey(
|
||||
f"{DOMAIN}.backup_agent_listeners"
|
||||
)
|
||||
|
||||
DESCRIPTION_AWS_S3_DOCS_URL = "https://docs.aws.amazon.com/general/latest/gr/s3.html"
|
||||
DESCRIPTION_BOTO3_DOCS_URL = "https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html"
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"domain": "aws_s3",
|
||||
"name": "AWS S3",
|
||||
"codeowners": ["@tomasbedrich"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/aws_s3",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aiobotocore"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["aiobotocore==2.21.1"]
|
||||
}
|
||||
@@ -1,112 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: Integration does not register custom actions.
|
||||
appropriate-polling:
|
||||
status: exempt
|
||||
comment: This integration does not poll.
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: This integration does not have any custom actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: Entities of this integration does not explicitly subscribe to events.
|
||||
entity-unique-id:
|
||||
status: exempt
|
||||
comment: This integration does not have entities.
|
||||
has-entity-name:
|
||||
status: exempt
|
||||
comment: This integration does not have entities.
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: Integration does not register custom actions.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: This integration does not have an options flow.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable:
|
||||
status: exempt
|
||||
comment: This integration does not have entities.
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
parallel-updates:
|
||||
status: exempt
|
||||
comment: This integration does not poll.
|
||||
reauthentication-flow: todo
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices:
|
||||
status: exempt
|
||||
comment: This integration does not have entities.
|
||||
diagnostics: todo
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: S3 is a cloud service that is not discovered on the network.
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: S3 is a cloud service that is not discovered on the network.
|
||||
docs-data-update:
|
||||
status: exempt
|
||||
comment: This integration does not poll.
|
||||
docs-examples:
|
||||
status: exempt
|
||||
comment: The integration extends core functionality and does not require examples.
|
||||
docs-known-limitations:
|
||||
status: exempt
|
||||
comment: No known limitations.
|
||||
docs-supported-devices:
|
||||
status: exempt
|
||||
comment: This integration does not support physical devices.
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting:
|
||||
status: exempt
|
||||
comment: There are no more detailed troubleshooting instructions available than what is already included in strings.json.
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: This integration does not have devices.
|
||||
entity-category:
|
||||
status: exempt
|
||||
comment: This integration does not have entities.
|
||||
entity-device-class:
|
||||
status: exempt
|
||||
comment: This integration does not have entities.
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: This integration does not have entities.
|
||||
entity-translations:
|
||||
status: exempt
|
||||
comment: This integration does not have entities.
|
||||
exception-translations: done
|
||||
icon-translations:
|
||||
status: exempt
|
||||
comment: This integration does not use icons.
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: There are no issues which can be repaired.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: This integration does not have devices.
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: todo
|
||||
strict-typing: todo
|
||||
@@ -1,41 +0,0 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"access_key_id": "Access key ID",
|
||||
"secret_access_key": "Secret access key",
|
||||
"bucket": "Bucket name",
|
||||
"endpoint_url": "Endpoint URL"
|
||||
},
|
||||
"data_description": {
|
||||
"access_key_id": "Access key ID to connect to AWS S3 API",
|
||||
"secret_access_key": "Secret access key to connect to AWS S3 API",
|
||||
"bucket": "Bucket must already exist and be writable by the provided credentials.",
|
||||
"endpoint_url": "Endpoint URL provided to [Boto3 Session]({boto3_docs_url}). Region-specific [AWS S3 endpoints]({aws_s3_docs_url}) are available in their docs."
|
||||
},
|
||||
"title": "Add AWS S3 bucket"
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:component::aws_s3::exceptions::cannot_connect::message%]",
|
||||
"invalid_bucket_name": "[%key:component::aws_s3::exceptions::invalid_bucket_name::message%]",
|
||||
"invalid_credentials": "[%key:component::aws_s3::exceptions::invalid_credentials::message%]",
|
||||
"invalid_endpoint_url": "Invalid endpoint URL. Please make sure it's a valid AWS S3 endpoint URL."
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"cannot_connect": {
|
||||
"message": "Cannot connect to endpoint"
|
||||
},
|
||||
"invalid_bucket_name": {
|
||||
"message": "Invalid bucket name"
|
||||
},
|
||||
"invalid_credentials": {
|
||||
"message": "Bucket cannot be accessed using provided combination of access key ID and secret access key."
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2,9 +2,9 @@
|
||||
|
||||
from homeassistant.config_entries import SOURCE_SYSTEM
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, discovery_flow
|
||||
from homeassistant.helpers.backup import DATA_BACKUP
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
@@ -36,7 +36,6 @@ from .manager import (
|
||||
IdleEvent,
|
||||
IncorrectPasswordError,
|
||||
ManagerBackup,
|
||||
ManagerStateEvent,
|
||||
NewBackup,
|
||||
RestoreBackupEvent,
|
||||
RestoreBackupStage,
|
||||
@@ -69,12 +68,12 @@ __all__ = [
|
||||
"IncorrectPasswordError",
|
||||
"LocalBackupAgent",
|
||||
"ManagerBackup",
|
||||
"ManagerStateEvent",
|
||||
"NewBackup",
|
||||
"RestoreBackupEvent",
|
||||
"RestoreBackupStage",
|
||||
"RestoreBackupState",
|
||||
"WrittenBackup",
|
||||
"async_get_manager",
|
||||
"suggested_filename",
|
||||
"suggested_filename_from_name_date",
|
||||
]
|
||||
@@ -99,13 +98,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
backup_manager = BackupManager(hass, reader_writer)
|
||||
hass.data[DATA_MANAGER] = backup_manager
|
||||
try:
|
||||
await backup_manager.async_setup()
|
||||
except Exception as err:
|
||||
hass.data[DATA_BACKUP].manager_ready.set_exception(err)
|
||||
raise
|
||||
else:
|
||||
hass.data[DATA_BACKUP].manager_ready.set_result(None)
|
||||
await backup_manager.async_setup()
|
||||
|
||||
async_register_websocket_handlers(hass, with_hassio)
|
||||
|
||||
@@ -160,3 +153,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: BackupConfigEntry) -> bo
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: BackupConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
@callback
|
||||
def async_get_manager(hass: HomeAssistant) -> BackupManager:
|
||||
"""Get the backup manager instance.
|
||||
|
||||
Raises HomeAssistantError if the backup integration is not available.
|
||||
"""
|
||||
if DATA_MANAGER not in hass.data:
|
||||
raise HomeAssistantError("Backup integration is not available")
|
||||
|
||||
return hass.data[DATA_MANAGER]
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
"""Websocket commands for the Backup integration."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.backup import async_subscribe_events
|
||||
|
||||
from .const import DATA_MANAGER
|
||||
from .manager import ManagerStateEvent
|
||||
|
||||
|
||||
@callback
|
||||
def async_register_websocket_handlers(hass: HomeAssistant) -> None:
|
||||
"""Register websocket commands."""
|
||||
websocket_api.async_register_command(hass, handle_subscribe_events)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command({vol.Required("type"): "backup/subscribe_events"})
|
||||
@websocket_api.async_response
|
||||
async def handle_subscribe_events(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Subscribe to backup events."""
|
||||
|
||||
def on_event(event: ManagerStateEvent) -> None:
|
||||
connection.send_message(websocket_api.event_message(msg["id"], event))
|
||||
|
||||
if DATA_MANAGER in hass.data:
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
on_event(manager.last_event)
|
||||
connection.subscriptions[msg["id"]] = async_subscribe_events(hass, on_event)
|
||||
connection.send_result(msg["id"])
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass, field, replace
|
||||
import datetime as dt
|
||||
from datetime import datetime, timedelta
|
||||
@@ -88,26 +87,12 @@ class BackupConfigData:
|
||||
else:
|
||||
time = None
|
||||
days = [Day(day) for day in data["schedule"]["days"]]
|
||||
agents = {}
|
||||
for agent_id, agent_data in data["agents"].items():
|
||||
protected = agent_data["protected"]
|
||||
stored_retention = agent_data["retention"]
|
||||
agent_retention: AgentRetentionConfig | None
|
||||
if stored_retention:
|
||||
agent_retention = AgentRetentionConfig(
|
||||
copies=stored_retention["copies"],
|
||||
days=stored_retention["days"],
|
||||
)
|
||||
else:
|
||||
agent_retention = None
|
||||
agent_config = AgentConfig(
|
||||
protected=protected,
|
||||
retention=agent_retention,
|
||||
)
|
||||
agents[agent_id] = agent_config
|
||||
|
||||
return cls(
|
||||
agents=agents,
|
||||
agents={
|
||||
agent_id: AgentConfig(protected=agent_data["protected"])
|
||||
for agent_id, agent_data in data["agents"].items()
|
||||
},
|
||||
automatic_backups_configured=data["automatic_backups_configured"],
|
||||
create_backup=CreateBackupConfig(
|
||||
agent_ids=data["create_backup"]["agent_ids"],
|
||||
@@ -191,36 +176,12 @@ class BackupConfig:
|
||||
"""Update config."""
|
||||
if agents is not UNDEFINED:
|
||||
for agent_id, agent_config in agents.items():
|
||||
agent_retention = agent_config.get("retention")
|
||||
if agent_retention is None:
|
||||
new_agent_retention = None
|
||||
else:
|
||||
new_agent_retention = AgentRetentionConfig(
|
||||
copies=agent_retention.get("copies"),
|
||||
days=agent_retention.get("days"),
|
||||
)
|
||||
if agent_id not in self.data.agents:
|
||||
old_agent_retention = None
|
||||
self.data.agents[agent_id] = AgentConfig(
|
||||
protected=agent_config.get("protected", True),
|
||||
retention=new_agent_retention,
|
||||
)
|
||||
self.data.agents[agent_id] = AgentConfig(**agent_config)
|
||||
else:
|
||||
new_agent_config = self.data.agents[agent_id]
|
||||
old_agent_retention = new_agent_config.retention
|
||||
if "protected" in agent_config:
|
||||
new_agent_config = replace(
|
||||
new_agent_config, protected=agent_config["protected"]
|
||||
)
|
||||
if "retention" in agent_config:
|
||||
new_agent_config = replace(
|
||||
new_agent_config, retention=new_agent_retention
|
||||
)
|
||||
self.data.agents[agent_id] = new_agent_config
|
||||
if new_agent_retention != old_agent_retention:
|
||||
# There's a single retention application method
|
||||
# for both global and agent retention settings.
|
||||
self.data.retention.apply(self._manager)
|
||||
self.data.agents[agent_id] = replace(
|
||||
self.data.agents[agent_id], **agent_config
|
||||
)
|
||||
if automatic_backups_configured is not UNDEFINED:
|
||||
self.data.automatic_backups_configured = automatic_backups_configured
|
||||
if create_backup is not UNDEFINED:
|
||||
@@ -246,24 +207,11 @@ class AgentConfig:
|
||||
"""Represent the config for an agent."""
|
||||
|
||||
protected: bool
|
||||
"""Agent protected configuration.
|
||||
|
||||
If True, the agent backups are password protected.
|
||||
"""
|
||||
retention: AgentRetentionConfig | None = None
|
||||
"""Agent retention configuration.
|
||||
|
||||
If None, the global retention configuration is used.
|
||||
If not None, the global retention configuration is ignored for this agent.
|
||||
If an agent retention configuration is set and both copies and days are None,
|
||||
backups will be kept forever for that agent.
|
||||
"""
|
||||
|
||||
def to_dict(self) -> StoredAgentConfig:
|
||||
"""Convert agent config to a dict."""
|
||||
return {
|
||||
"protected": self.protected,
|
||||
"retention": self.retention.to_dict() if self.retention else None,
|
||||
}
|
||||
|
||||
|
||||
@@ -271,46 +219,24 @@ class StoredAgentConfig(TypedDict):
|
||||
"""Represent the stored config for an agent."""
|
||||
|
||||
protected: bool
|
||||
retention: StoredRetentionConfig | None
|
||||
|
||||
|
||||
class AgentParametersDict(TypedDict, total=False):
|
||||
"""Represent the parameters for an agent."""
|
||||
|
||||
protected: bool
|
||||
retention: RetentionParametersDict | None
|
||||
|
||||
|
||||
@dataclass(kw_only=True)
|
||||
class BaseRetentionConfig:
|
||||
"""Represent the base backup retention configuration."""
|
||||
class RetentionConfig:
|
||||
"""Represent the backup retention configuration."""
|
||||
|
||||
copies: int | None = None
|
||||
days: int | None = None
|
||||
|
||||
def to_dict(self) -> StoredRetentionConfig:
|
||||
"""Convert backup retention configuration to a dict."""
|
||||
return StoredRetentionConfig(
|
||||
copies=self.copies,
|
||||
days=self.days,
|
||||
)
|
||||
|
||||
|
||||
@dataclass(kw_only=True)
|
||||
class RetentionConfig(BaseRetentionConfig):
|
||||
"""Represent the backup retention configuration."""
|
||||
|
||||
def apply(self, manager: BackupManager) -> None:
|
||||
"""Apply backup retention configuration."""
|
||||
agents_retention = {
|
||||
agent_id: agent_config.retention
|
||||
for agent_id, agent_config in manager.config.data.agents.items()
|
||||
}
|
||||
|
||||
if self.days is not None or any(
|
||||
agent_retention and agent_retention.days is not None
|
||||
for agent_retention in agents_retention.values()
|
||||
):
|
||||
if self.days is not None:
|
||||
LOGGER.debug(
|
||||
"Scheduling next automatic delete of backups older than %s in 1 day",
|
||||
self.days,
|
||||
@@ -320,6 +246,13 @@ class RetentionConfig(BaseRetentionConfig):
|
||||
LOGGER.debug("Unscheduling next automatic delete")
|
||||
self._unschedule_next(manager)
|
||||
|
||||
def to_dict(self) -> StoredRetentionConfig:
|
||||
"""Convert backup retention configuration to a dict."""
|
||||
return StoredRetentionConfig(
|
||||
copies=self.copies,
|
||||
days=self.days,
|
||||
)
|
||||
|
||||
@callback
|
||||
def _schedule_next(
|
||||
self,
|
||||
@@ -338,81 +271,16 @@ class RetentionConfig(BaseRetentionConfig):
|
||||
"""Return backups older than days to delete."""
|
||||
# we need to check here since we await before
|
||||
# this filter is applied
|
||||
agents_retention = {
|
||||
agent_id: agent_config.retention
|
||||
for agent_id, agent_config in manager.config.data.agents.items()
|
||||
}
|
||||
has_agents_retention = any(
|
||||
agent_retention for agent_retention in agents_retention.values()
|
||||
)
|
||||
has_agents_retention_days = any(
|
||||
agent_retention and agent_retention.days is not None
|
||||
for agent_retention in agents_retention.values()
|
||||
)
|
||||
if (global_days := self.days) is None and not has_agents_retention_days:
|
||||
# No global retention days and no agent retention days
|
||||
if self.days is None:
|
||||
return {}
|
||||
|
||||
now = dt_util.utcnow()
|
||||
if global_days is not None and not has_agents_retention:
|
||||
# Return early to avoid the longer filtering below.
|
||||
return {
|
||||
backup_id: backup
|
||||
for backup_id, backup in backups.items()
|
||||
if dt_util.parse_datetime(backup.date, raise_on_error=True)
|
||||
+ timedelta(days=global_days)
|
||||
< now
|
||||
}
|
||||
|
||||
# If there are any agent retention settings, we need to check
|
||||
# the retention settings, for every backup and agent combination.
|
||||
|
||||
backups_to_delete = {}
|
||||
|
||||
for backup_id, backup in backups.items():
|
||||
backup_date = dt_util.parse_datetime(
|
||||
backup.date, raise_on_error=True
|
||||
)
|
||||
delete_from_agents = set(backup.agents)
|
||||
for agent_id in backup.agents:
|
||||
agent_retention = agents_retention.get(agent_id)
|
||||
if agent_retention is None:
|
||||
# This agent does not have a retention setting,
|
||||
# so the global retention setting should be used.
|
||||
if global_days is None:
|
||||
# This agent does not have a retention setting
|
||||
# and the global retention days setting is None,
|
||||
# so this backup should not be deleted.
|
||||
delete_from_agents.discard(agent_id)
|
||||
continue
|
||||
days = global_days
|
||||
elif (agent_days := agent_retention.days) is None:
|
||||
# This agent has a retention setting
|
||||
# where days is set to None,
|
||||
# so the backup should not be deleted.
|
||||
delete_from_agents.discard(agent_id)
|
||||
continue
|
||||
else:
|
||||
# This agent has a retention setting
|
||||
# where days is set to a number,
|
||||
# so that setting should be used.
|
||||
days = agent_days
|
||||
if backup_date + timedelta(days=days) >= now:
|
||||
# This backup is not older than the retention days,
|
||||
# so this agent should not be deleted.
|
||||
delete_from_agents.discard(agent_id)
|
||||
|
||||
filtered_backup = replace(
|
||||
backup,
|
||||
agents={
|
||||
agent_id: agent_backup_status
|
||||
for agent_id, agent_backup_status in backup.agents.items()
|
||||
if agent_id in delete_from_agents
|
||||
},
|
||||
)
|
||||
backups_to_delete[backup_id] = filtered_backup
|
||||
|
||||
return backups_to_delete
|
||||
return {
|
||||
backup_id: backup
|
||||
for backup_id, backup in backups.items()
|
||||
if dt_util.parse_datetime(backup.date, raise_on_error=True)
|
||||
+ timedelta(days=self.days)
|
||||
< now
|
||||
}
|
||||
|
||||
await manager.async_delete_filtered_backups(
|
||||
include_filter=_automatic_backups_filter, delete_filter=_delete_filter
|
||||
@@ -444,10 +312,6 @@ class RetentionParametersDict(TypedDict, total=False):
|
||||
days: int | None
|
||||
|
||||
|
||||
class AgentRetentionConfig(BaseRetentionConfig):
|
||||
"""Represent an agent retention configuration."""
|
||||
|
||||
|
||||
class StoredBackupSchedule(TypedDict):
|
||||
"""Represent the stored backup schedule configuration."""
|
||||
|
||||
@@ -690,87 +554,16 @@ async def delete_backups_exceeding_configured_count(manager: BackupManager) -> N
|
||||
backups: dict[str, ManagerBackup],
|
||||
) -> dict[str, ManagerBackup]:
|
||||
"""Return oldest backups more numerous than copies to delete."""
|
||||
agents_retention = {
|
||||
agent_id: agent_config.retention
|
||||
for agent_id, agent_config in manager.config.data.agents.items()
|
||||
}
|
||||
has_agents_retention = any(
|
||||
agent_retention for agent_retention in agents_retention.values()
|
||||
)
|
||||
has_agents_retention_copies = any(
|
||||
agent_retention and agent_retention.copies is not None
|
||||
for agent_retention in agents_retention.values()
|
||||
)
|
||||
# we need to check here since we await before
|
||||
# this filter is applied
|
||||
if (
|
||||
global_copies := manager.config.data.retention.copies
|
||||
) is None and not has_agents_retention_copies:
|
||||
# No global retention copies and no agent retention copies
|
||||
if manager.config.data.retention.copies is None:
|
||||
return {}
|
||||
if global_copies is not None and not has_agents_retention:
|
||||
# Return early to avoid the longer filtering below.
|
||||
return dict(
|
||||
sorted(
|
||||
backups.items(),
|
||||
key=lambda backup_item: backup_item[1].date,
|
||||
)[: max(len(backups) - global_copies, 0)]
|
||||
)
|
||||
|
||||
backups_by_agent: dict[str, dict[str, ManagerBackup]] = defaultdict(dict)
|
||||
for backup_id, backup in backups.items():
|
||||
for agent_id in backup.agents:
|
||||
backups_by_agent[agent_id][backup_id] = backup
|
||||
|
||||
backups_to_delete_by_agent: dict[str, dict[str, ManagerBackup]] = defaultdict(
|
||||
dict
|
||||
return dict(
|
||||
sorted(
|
||||
backups.items(),
|
||||
key=lambda backup_item: backup_item[1].date,
|
||||
)[: max(len(backups) - manager.config.data.retention.copies, 0)]
|
||||
)
|
||||
for agent_id, agent_backups in backups_by_agent.items():
|
||||
agent_retention = agents_retention.get(agent_id)
|
||||
if agent_retention is None:
|
||||
# This agent does not have a retention setting,
|
||||
# so the global retention setting should be used.
|
||||
if global_copies is None:
|
||||
# This agent does not have a retention setting
|
||||
# and the global retention copies setting is None,
|
||||
# so backups should not be deleted.
|
||||
continue
|
||||
# The global retention setting will be used.
|
||||
copies = global_copies
|
||||
elif (agent_copies := agent_retention.copies) is None:
|
||||
# This agent has a retention setting
|
||||
# where copies is set to None,
|
||||
# so backups should not be deleted.
|
||||
continue
|
||||
else:
|
||||
# This agent retention setting will be used.
|
||||
copies = agent_copies
|
||||
|
||||
backups_to_delete_by_agent[agent_id] = dict(
|
||||
sorted(
|
||||
agent_backups.items(),
|
||||
key=lambda backup_item: backup_item[1].date,
|
||||
)[: max(len(agent_backups) - copies, 0)]
|
||||
)
|
||||
|
||||
backup_ids_to_delete: dict[str, set[str]] = defaultdict(set)
|
||||
for agent_id, to_delete in backups_to_delete_by_agent.items():
|
||||
for backup_id in to_delete:
|
||||
backup_ids_to_delete[backup_id].add(agent_id)
|
||||
backups_to_delete: dict[str, ManagerBackup] = {}
|
||||
for backup_id, agent_ids in backup_ids_to_delete.items():
|
||||
backup = backups[backup_id]
|
||||
# filter the backup to only include the agents that should be deleted
|
||||
filtered_backup = replace(
|
||||
backup,
|
||||
agents={
|
||||
agent_id: agent_backup_status
|
||||
for agent_id, agent_backup_status in backup.agents.items()
|
||||
if agent_id in agent_ids
|
||||
},
|
||||
)
|
||||
backups_to_delete[backup_id] = filtered_backup
|
||||
return backups_to_delete
|
||||
|
||||
await manager.async_delete_filtered_backups(
|
||||
include_filter=_automatic_backups_filter, delete_filter=_delete_filter
|
||||
|
||||
@@ -8,10 +8,6 @@ from datetime import datetime
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.backup import (
|
||||
async_subscribe_events,
|
||||
async_subscribe_platform_events,
|
||||
)
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
@@ -54,8 +50,8 @@ class BackupDataUpdateCoordinator(DataUpdateCoordinator[BackupCoordinatorData]):
|
||||
update_interval=None,
|
||||
)
|
||||
self.unsubscribe: list[Callable[[], None]] = [
|
||||
async_subscribe_events(hass, self._on_event),
|
||||
async_subscribe_platform_events(hass, self._on_event),
|
||||
backup_manager.async_subscribe_events(self._on_event),
|
||||
backup_manager.async_subscribe_platform_events(self._on_event),
|
||||
]
|
||||
|
||||
self.backup_manager = backup_manager
|
||||
|
||||
@@ -22,7 +22,7 @@ from . import util
|
||||
from .agent import BackupAgent
|
||||
from .const import DATA_MANAGER
|
||||
from .manager import BackupManager
|
||||
from .models import AgentBackup, BackupNotFound
|
||||
from .models import BackupNotFound
|
||||
|
||||
|
||||
@callback
|
||||
@@ -85,15 +85,7 @@ class DownloadBackupView(HomeAssistantView):
|
||||
request, headers, backup_id, agent_id, agent, manager
|
||||
)
|
||||
return await self._send_backup_with_password(
|
||||
hass,
|
||||
backup,
|
||||
request,
|
||||
headers,
|
||||
backup_id,
|
||||
agent_id,
|
||||
password,
|
||||
agent,
|
||||
manager,
|
||||
hass, request, headers, backup_id, agent_id, password, agent, manager
|
||||
)
|
||||
except BackupNotFound:
|
||||
return Response(status=HTTPStatus.NOT_FOUND)
|
||||
@@ -124,7 +116,6 @@ class DownloadBackupView(HomeAssistantView):
|
||||
async def _send_backup_with_password(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
backup: AgentBackup,
|
||||
request: Request,
|
||||
headers: dict[istr, str],
|
||||
backup_id: str,
|
||||
@@ -153,8 +144,7 @@ class DownloadBackupView(HomeAssistantView):
|
||||
|
||||
stream = util.AsyncIteratorWriter(hass)
|
||||
worker = threading.Thread(
|
||||
target=util.decrypt_backup,
|
||||
args=[backup, reader, stream, password, on_done, 0, []],
|
||||
target=util.decrypt_backup, args=[reader, stream, password, on_done, 0, []]
|
||||
)
|
||||
try:
|
||||
worker.start()
|
||||
|
||||
@@ -36,7 +36,6 @@ from homeassistant.helpers import (
|
||||
issue_registry as ir,
|
||||
start,
|
||||
)
|
||||
from homeassistant.helpers.backup import DATA_BACKUP
|
||||
from homeassistant.helpers.json import json_bytes
|
||||
from homeassistant.util import dt as dt_util, json as json_util
|
||||
|
||||
@@ -359,12 +358,10 @@ class BackupManager:
|
||||
# Latest backup event and backup event subscribers
|
||||
self.last_event: ManagerStateEvent = BlockedEvent()
|
||||
self.last_action_event: ManagerStateEvent | None = None
|
||||
self._backup_event_subscriptions = hass.data[
|
||||
DATA_BACKUP
|
||||
].backup_event_subscriptions
|
||||
self._backup_platform_event_subscriptions = hass.data[
|
||||
DATA_BACKUP
|
||||
].backup_platform_event_subscriptions
|
||||
self._backup_event_subscriptions: list[Callable[[ManagerStateEvent], None]] = []
|
||||
self._backup_platform_event_subscriptions: list[
|
||||
Callable[[BackupPlatformEvent], None]
|
||||
] = []
|
||||
|
||||
async def async_setup(self) -> None:
|
||||
"""Set up the backup manager."""
|
||||
@@ -1354,6 +1351,32 @@ class BackupManager:
|
||||
for subscription in self._backup_event_subscriptions:
|
||||
subscription(event)
|
||||
|
||||
@callback
|
||||
def async_subscribe_events(
|
||||
self,
|
||||
on_event: Callable[[ManagerStateEvent], None],
|
||||
) -> Callable[[], None]:
|
||||
"""Subscribe events."""
|
||||
|
||||
def remove_subscription() -> None:
|
||||
self._backup_event_subscriptions.remove(on_event)
|
||||
|
||||
self._backup_event_subscriptions.append(on_event)
|
||||
return remove_subscription
|
||||
|
||||
@callback
|
||||
def async_subscribe_platform_events(
|
||||
self,
|
||||
on_event: Callable[[BackupPlatformEvent], None],
|
||||
) -> Callable[[], None]:
|
||||
"""Subscribe to backup platform events."""
|
||||
|
||||
def remove_subscription() -> None:
|
||||
self._backup_platform_event_subscriptions.remove(on_event)
|
||||
|
||||
self._backup_platform_event_subscriptions.append(on_event)
|
||||
return remove_subscription
|
||||
|
||||
def _update_issue_backup_failed(self) -> None:
|
||||
"""Update issue registry when a backup fails."""
|
||||
ir.async_create_issue(
|
||||
|
||||
@@ -19,9 +19,14 @@ from homeassistant.components.onboarding import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.backup import async_get_manager as async_get_backup_manager
|
||||
|
||||
from . import BackupManager, Folder, IncorrectPasswordError, http as backup_http
|
||||
from . import (
|
||||
BackupManager,
|
||||
Folder,
|
||||
IncorrectPasswordError,
|
||||
async_get_manager,
|
||||
http as backup_http,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.components.onboarding import OnboardingStoreData
|
||||
@@ -54,7 +59,7 @@ def with_backup_manager[_ViewT: BaseOnboardingView, **_P](
|
||||
if self._data["done"]:
|
||||
raise HTTPUnauthorized
|
||||
|
||||
manager = await async_get_backup_manager(request.app[KEY_HASS])
|
||||
manager = async_get_manager(request.app[KEY_HASS])
|
||||
return await func(self, manager, request, *args, **kwargs)
|
||||
|
||||
return with_backup
|
||||
|
||||
@@ -16,7 +16,7 @@ if TYPE_CHECKING:
|
||||
STORE_DELAY_SAVE = 30
|
||||
STORAGE_KEY = DOMAIN
|
||||
STORAGE_VERSION = 1
|
||||
STORAGE_VERSION_MINOR = 6
|
||||
STORAGE_VERSION_MINOR = 5
|
||||
|
||||
|
||||
class StoredBackupData(TypedDict):
|
||||
@@ -72,10 +72,6 @@ class _BackupStore(Store[StoredBackupData]):
|
||||
data["config"]["automatic_backups_configured"] = (
|
||||
data["config"]["create_backup"]["password"] is not None
|
||||
)
|
||||
if old_minor_version < 6:
|
||||
# Version 1.6 adds agent retention settings
|
||||
for agent in data["config"]["agents"]:
|
||||
data["config"]["agents"][agent]["retention"] = None
|
||||
|
||||
# Note: We allow reading data with major version 2.
|
||||
# Reject if major version is higher than 2.
|
||||
|
||||
@@ -295,26 +295,13 @@ def validate_password_stream(
|
||||
raise BackupEmpty
|
||||
|
||||
|
||||
def _get_expected_archives(backup: AgentBackup) -> set[str]:
|
||||
"""Get the expected archives in the backup."""
|
||||
expected_archives = set()
|
||||
if backup.homeassistant_included:
|
||||
expected_archives.add("homeassistant")
|
||||
for addon in backup.addons:
|
||||
expected_archives.add(addon.slug)
|
||||
for folder in backup.folders:
|
||||
expected_archives.add(folder.value)
|
||||
return expected_archives
|
||||
|
||||
|
||||
def decrypt_backup(
|
||||
backup: AgentBackup,
|
||||
input_stream: IO[bytes],
|
||||
output_stream: IO[bytes],
|
||||
password: str | None,
|
||||
on_done: Callable[[Exception | None], None],
|
||||
minimum_size: int,
|
||||
nonces: NonceGenerator,
|
||||
nonces: list[bytes],
|
||||
) -> None:
|
||||
"""Decrypt a backup."""
|
||||
error: Exception | None = None
|
||||
@@ -328,7 +315,7 @@ def decrypt_backup(
|
||||
fileobj=output_stream, mode="w|", bufsize=BUF_SIZE
|
||||
) as output_tar,
|
||||
):
|
||||
_decrypt_backup(backup, input_tar, output_tar, password)
|
||||
_decrypt_backup(input_tar, output_tar, password)
|
||||
except (DecryptError, SecureTarError, tarfile.TarError) as err:
|
||||
LOGGER.warning("Error decrypting backup: %s", err)
|
||||
error = err
|
||||
@@ -346,18 +333,15 @@ def decrypt_backup(
|
||||
|
||||
|
||||
def _decrypt_backup(
|
||||
backup: AgentBackup,
|
||||
input_tar: tarfile.TarFile,
|
||||
output_tar: tarfile.TarFile,
|
||||
password: str | None,
|
||||
) -> None:
|
||||
"""Decrypt a backup."""
|
||||
expected_archives = _get_expected_archives(backup)
|
||||
for obj in input_tar:
|
||||
# We compare with PurePath to avoid issues with different path separators,
|
||||
# for example when backup.json is added as "./backup.json"
|
||||
object_path = PurePath(obj.name)
|
||||
if object_path == PurePath("backup.json"):
|
||||
if PurePath(obj.name) == PurePath("backup.json"):
|
||||
# Rewrite the backup.json file to indicate that the backup is decrypted
|
||||
if not (reader := input_tar.extractfile(obj)):
|
||||
raise DecryptError
|
||||
@@ -368,13 +352,7 @@ def _decrypt_backup(
|
||||
metadata_obj.size = len(updated_metadata_b)
|
||||
output_tar.addfile(metadata_obj, BytesIO(updated_metadata_b))
|
||||
continue
|
||||
prefix, _, suffix = object_path.name.partition(".")
|
||||
if suffix not in ("tar", "tgz", "tar.gz"):
|
||||
LOGGER.debug("Unknown file %s will not be decrypted", obj.name)
|
||||
output_tar.addfile(obj, input_tar.extractfile(obj))
|
||||
continue
|
||||
if prefix not in expected_archives:
|
||||
LOGGER.debug("Unknown inner tar file %s will not be decrypted", obj.name)
|
||||
if not obj.name.endswith((".tar", ".tgz", ".tar.gz")):
|
||||
output_tar.addfile(obj, input_tar.extractfile(obj))
|
||||
continue
|
||||
istf = SecureTarFile(
|
||||
@@ -393,13 +371,12 @@ def _decrypt_backup(
|
||||
|
||||
|
||||
def encrypt_backup(
|
||||
backup: AgentBackup,
|
||||
input_stream: IO[bytes],
|
||||
output_stream: IO[bytes],
|
||||
password: str | None,
|
||||
on_done: Callable[[Exception | None], None],
|
||||
minimum_size: int,
|
||||
nonces: NonceGenerator,
|
||||
nonces: list[bytes],
|
||||
) -> None:
|
||||
"""Encrypt a backup."""
|
||||
error: Exception | None = None
|
||||
@@ -413,7 +390,7 @@ def encrypt_backup(
|
||||
fileobj=output_stream, mode="w|", bufsize=BUF_SIZE
|
||||
) as output_tar,
|
||||
):
|
||||
_encrypt_backup(backup, input_tar, output_tar, password, nonces)
|
||||
_encrypt_backup(input_tar, output_tar, password, nonces)
|
||||
except (EncryptError, SecureTarError, tarfile.TarError) as err:
|
||||
LOGGER.warning("Error encrypting backup: %s", err)
|
||||
error = err
|
||||
@@ -431,20 +408,17 @@ def encrypt_backup(
|
||||
|
||||
|
||||
def _encrypt_backup(
|
||||
backup: AgentBackup,
|
||||
input_tar: tarfile.TarFile,
|
||||
output_tar: tarfile.TarFile,
|
||||
password: str | None,
|
||||
nonces: NonceGenerator,
|
||||
nonces: list[bytes],
|
||||
) -> None:
|
||||
"""Encrypt a backup."""
|
||||
inner_tar_idx = 0
|
||||
expected_archives = _get_expected_archives(backup)
|
||||
for obj in input_tar:
|
||||
# We compare with PurePath to avoid issues with different path separators,
|
||||
# for example when backup.json is added as "./backup.json"
|
||||
object_path = PurePath(obj.name)
|
||||
if object_path == PurePath("backup.json"):
|
||||
if PurePath(obj.name) == PurePath("backup.json"):
|
||||
# Rewrite the backup.json file to indicate that the backup is encrypted
|
||||
if not (reader := input_tar.extractfile(obj)):
|
||||
raise EncryptError
|
||||
@@ -455,21 +429,16 @@ def _encrypt_backup(
|
||||
metadata_obj.size = len(updated_metadata_b)
|
||||
output_tar.addfile(metadata_obj, BytesIO(updated_metadata_b))
|
||||
continue
|
||||
prefix, _, suffix = object_path.name.partition(".")
|
||||
if suffix not in ("tar", "tgz", "tar.gz"):
|
||||
LOGGER.debug("Unknown file %s will not be encrypted", obj.name)
|
||||
if not obj.name.endswith((".tar", ".tgz", ".tar.gz")):
|
||||
output_tar.addfile(obj, input_tar.extractfile(obj))
|
||||
continue
|
||||
if prefix not in expected_archives:
|
||||
LOGGER.debug("Unknown inner tar file %s will not be encrypted", obj.name)
|
||||
continue
|
||||
istf = SecureTarFile(
|
||||
None, # Not used
|
||||
gzip=False,
|
||||
key=password_to_key(password) if password is not None else None,
|
||||
mode="r",
|
||||
fileobj=input_tar.extractfile(obj),
|
||||
nonce=nonces.get(inner_tar_idx),
|
||||
nonce=nonces[inner_tar_idx],
|
||||
)
|
||||
inner_tar_idx += 1
|
||||
with istf.encrypt(obj) as encrypted:
|
||||
@@ -487,33 +456,17 @@ class _CipherWorkerStatus:
|
||||
writer: AsyncIteratorWriter
|
||||
|
||||
|
||||
class NonceGenerator:
|
||||
"""Generate nonces for encryption."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the generator."""
|
||||
self._nonces: dict[int, bytes] = {}
|
||||
|
||||
def get(self, index: int) -> bytes:
|
||||
"""Get a nonce for the given index."""
|
||||
if index not in self._nonces:
|
||||
# Generate a new nonce for the given index
|
||||
self._nonces[index] = os.urandom(16)
|
||||
return self._nonces[index]
|
||||
|
||||
|
||||
class _CipherBackupStreamer:
|
||||
"""Encrypt or decrypt a backup."""
|
||||
|
||||
_cipher_func: Callable[
|
||||
[
|
||||
AgentBackup,
|
||||
IO[bytes],
|
||||
IO[bytes],
|
||||
str | None,
|
||||
Callable[[Exception | None], None],
|
||||
int,
|
||||
NonceGenerator,
|
||||
list[bytes],
|
||||
],
|
||||
None,
|
||||
]
|
||||
@@ -531,7 +484,7 @@ class _CipherBackupStreamer:
|
||||
self._hass = hass
|
||||
self._open_stream = open_stream
|
||||
self._password = password
|
||||
self._nonces = NonceGenerator()
|
||||
self._nonces: list[bytes] = []
|
||||
|
||||
def size(self) -> int:
|
||||
"""Return the maximum size of the decrypted or encrypted backup."""
|
||||
@@ -555,15 +508,7 @@ class _CipherBackupStreamer:
|
||||
writer = AsyncIteratorWriter(self._hass)
|
||||
worker = threading.Thread(
|
||||
target=self._cipher_func,
|
||||
args=[
|
||||
self._backup,
|
||||
reader,
|
||||
writer,
|
||||
self._password,
|
||||
on_done,
|
||||
self.size(),
|
||||
self._nonces,
|
||||
],
|
||||
args=[reader, writer, self._password, on_done, self.size(), self._nonces],
|
||||
)
|
||||
worker_status = _CipherWorkerStatus(
|
||||
done=asyncio.Event(), reader=reader, thread=worker, writer=writer
|
||||
@@ -593,6 +538,17 @@ class DecryptedBackupStreamer(_CipherBackupStreamer):
|
||||
class EncryptedBackupStreamer(_CipherBackupStreamer):
|
||||
"""Encrypt a backup."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
backup: AgentBackup,
|
||||
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||
password: str | None,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(hass, backup, open_stream, password)
|
||||
self._nonces = [os.urandom(16) for _ in range(self._num_tar_files())]
|
||||
|
||||
_cipher_func = staticmethod(encrypt_backup)
|
||||
|
||||
def backup(self) -> AgentBackup:
|
||||
|
||||
@@ -10,7 +10,11 @@ from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .config import Day, ScheduleRecurrence
|
||||
from .const import DATA_MANAGER, LOGGER
|
||||
from .manager import DecryptOnDowloadNotSupported, IncorrectPasswordError
|
||||
from .manager import (
|
||||
DecryptOnDowloadNotSupported,
|
||||
IncorrectPasswordError,
|
||||
ManagerStateEvent,
|
||||
)
|
||||
from .models import BackupNotFound, Folder
|
||||
|
||||
|
||||
@@ -30,6 +34,7 @@ def async_register_websocket_handlers(hass: HomeAssistant, with_hassio: bool) ->
|
||||
websocket_api.async_register_command(hass, handle_create_with_automatic_settings)
|
||||
websocket_api.async_register_command(hass, handle_delete)
|
||||
websocket_api.async_register_command(hass, handle_restore)
|
||||
websocket_api.async_register_command(hass, handle_subscribe_events)
|
||||
|
||||
websocket_api.async_register_command(hass, handle_config_info)
|
||||
websocket_api.async_register_command(hass, handle_config_update)
|
||||
@@ -346,28 +351,7 @@ async def handle_config_info(
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "backup/config/update",
|
||||
vol.Optional("agents"): vol.Schema(
|
||||
{
|
||||
str: {
|
||||
vol.Optional("protected"): bool,
|
||||
vol.Optional("retention"): vol.Any(
|
||||
vol.Schema(
|
||||
{
|
||||
# Note: We can't use cv.positive_int because it allows 0 even
|
||||
# though 0 is not positive.
|
||||
vol.Optional("copies"): vol.Any(
|
||||
vol.All(int, vol.Range(min=1)), None
|
||||
),
|
||||
vol.Optional("days"): vol.Any(
|
||||
vol.All(int, vol.Range(min=1)), None
|
||||
),
|
||||
},
|
||||
),
|
||||
None,
|
||||
),
|
||||
}
|
||||
}
|
||||
),
|
||||
vol.Optional("agents"): vol.Schema({str: {"protected": bool}}),
|
||||
vol.Optional("automatic_backups_configured"): bool,
|
||||
vol.Optional("create_backup"): vol.Schema(
|
||||
{
|
||||
@@ -417,3 +401,22 @@ def handle_config_update(
|
||||
changes.pop("type")
|
||||
manager.config.update(**changes)
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command({vol.Required("type"): "backup/subscribe_events"})
|
||||
@websocket_api.async_response
|
||||
async def handle_subscribe_events(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Subscribe to backup events."""
|
||||
|
||||
def on_event(event: ManagerStateEvent) -> None:
|
||||
connection.send_message(websocket_api.event_message(msg["id"], event))
|
||||
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
on_event(manager.last_event)
|
||||
connection.subscriptions[msg["id"]] = manager.async_subscribe_events(on_event)
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
@@ -12,5 +12,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/bluemaestro",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["bluemaestro-ble==0.4.1"]
|
||||
"requirements": ["bluemaestro-ble==0.2.3"]
|
||||
}
|
||||
|
||||
@@ -18,9 +18,9 @@
|
||||
"bleak==0.22.3",
|
||||
"bleak-retry-connector==3.9.0",
|
||||
"bluetooth-adapters==0.21.4",
|
||||
"bluetooth-auto-recovery==1.5.1",
|
||||
"bluetooth-data-tools==1.28.1",
|
||||
"bluetooth-auto-recovery==1.4.5",
|
||||
"bluetooth-data-tools==1.27.0",
|
||||
"dbus-fast==2.43.0",
|
||||
"habluetooth==3.48.2"
|
||||
"habluetooth==3.39.0"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from habluetooth import (
|
||||
from bluetooth_adapters import (
|
||||
DiscoveredDeviceAdvertisementData,
|
||||
DiscoveredDeviceAdvertisementDataDict,
|
||||
DiscoveryStorageType,
|
||||
|
||||
@@ -93,5 +93,3 @@ STT_ENTITY_UNIQUE_ID = "cloud-speech-to-text"
|
||||
TTS_ENTITY_UNIQUE_ID = "cloud-text-to-speech"
|
||||
|
||||
LOGIN_MFA_TIMEOUT = 60
|
||||
|
||||
VOICE_STYLE_SEPERATOR = "||"
|
||||
|
||||
@@ -18,7 +18,7 @@ from aiohttp import web
|
||||
import attr
|
||||
from hass_nabucasa import AlreadyConnectedError, Cloud, auth, thingtalk
|
||||
from hass_nabucasa.const import STATE_DISCONNECTED
|
||||
from hass_nabucasa.voice_data import TTS_VOICES
|
||||
from hass_nabucasa.voice import TTS_VOICES
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import websocket_api
|
||||
@@ -57,7 +57,6 @@ from .const import (
|
||||
PREF_REMOTE_ALLOW_REMOTE_ENABLE,
|
||||
PREF_TTS_DEFAULT_VOICE,
|
||||
REQUEST_TIMEOUT,
|
||||
VOICE_STYLE_SEPERATOR,
|
||||
)
|
||||
from .google_config import CLOUD_GOOGLE
|
||||
from .repairs import async_manage_legacy_subscription_issue
|
||||
@@ -592,21 +591,10 @@ async def websocket_subscription(
|
||||
def validate_language_voice(value: tuple[str, str]) -> tuple[str, str]:
|
||||
"""Validate language and voice."""
|
||||
language, voice = value
|
||||
style: str | None
|
||||
voice, _, style = voice.partition(VOICE_STYLE_SEPERATOR)
|
||||
if not style:
|
||||
style = None
|
||||
if language not in TTS_VOICES:
|
||||
raise vol.Invalid(f"Invalid language {language}")
|
||||
if voice not in (language_info := TTS_VOICES[language]):
|
||||
if voice not in TTS_VOICES[language]:
|
||||
raise vol.Invalid(f"Invalid voice {voice} for language {language}")
|
||||
voice_info = language_info[voice]
|
||||
if style and (
|
||||
isinstance(voice_info, str) or style not in voice_info.get("variants", [])
|
||||
):
|
||||
raise vol.Invalid(
|
||||
f"Invalid style {style} for voice {voice} in language {language}"
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
@@ -1024,24 +1012,13 @@ def tts_info(
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Fetch available tts info."""
|
||||
result = []
|
||||
for language, voices in TTS_VOICES.items():
|
||||
for voice_id, voice_info in voices.items():
|
||||
if isinstance(voice_info, str):
|
||||
result.append((language, voice_id, voice_info))
|
||||
continue
|
||||
|
||||
name = voice_info["name"]
|
||||
result.append((language, voice_id, name))
|
||||
result.extend(
|
||||
[
|
||||
(
|
||||
language,
|
||||
f"{voice_id}{VOICE_STYLE_SEPERATOR}{variant}",
|
||||
f"{name} ({variant})",
|
||||
)
|
||||
for variant in voice_info.get("variants", [])
|
||||
]
|
||||
)
|
||||
|
||||
connection.send_result(msg["id"], {"languages": result})
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
{
|
||||
"languages": [
|
||||
(language, voice)
|
||||
for language, voices in TTS_VOICES.items()
|
||||
for voice in voices
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==0.96.0"],
|
||||
"requirements": ["hass-nabucasa==0.94.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -6,8 +6,7 @@ import logging
|
||||
from typing import Any
|
||||
|
||||
from hass_nabucasa import Cloud
|
||||
from hass_nabucasa.voice import MAP_VOICE, AudioOutput, Gender, VoiceError
|
||||
from hass_nabucasa.voice_data import TTS_VOICES
|
||||
from hass_nabucasa.voice import MAP_VOICE, TTS_VOICES, AudioOutput, Gender, VoiceError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.tts import (
|
||||
@@ -31,13 +30,7 @@ from homeassistant.setup import async_when_setup
|
||||
|
||||
from .assist_pipeline import async_migrate_cloud_pipeline_engine
|
||||
from .client import CloudClient
|
||||
from .const import (
|
||||
DATA_CLOUD,
|
||||
DATA_PLATFORMS_SETUP,
|
||||
DOMAIN,
|
||||
TTS_ENTITY_UNIQUE_ID,
|
||||
VOICE_STYLE_SEPERATOR,
|
||||
)
|
||||
from .const import DATA_CLOUD, DATA_PLATFORMS_SETUP, DOMAIN, TTS_ENTITY_UNIQUE_ID
|
||||
from .prefs import CloudPreferences
|
||||
|
||||
ATTR_GENDER = "gender"
|
||||
@@ -64,7 +57,6 @@ DEFAULT_VOICES = {
|
||||
"ar-SY": "AmanyNeural",
|
||||
"ar-TN": "ReemNeural",
|
||||
"ar-YE": "MaryamNeural",
|
||||
"as-IN": "PriyomNeural",
|
||||
"az-AZ": "BabekNeural",
|
||||
"bg-BG": "KalinaNeural",
|
||||
"bn-BD": "NabanitaNeural",
|
||||
@@ -134,8 +126,6 @@ DEFAULT_VOICES = {
|
||||
"id-ID": "GadisNeural",
|
||||
"is-IS": "GudrunNeural",
|
||||
"it-IT": "ElsaNeural",
|
||||
"iu-Cans-CA": "SiqiniqNeural",
|
||||
"iu-Latn-CA": "SiqiniqNeural",
|
||||
"ja-JP": "NanamiNeural",
|
||||
"jv-ID": "SitiNeural",
|
||||
"ka-GE": "EkaNeural",
|
||||
@@ -157,8 +147,6 @@ DEFAULT_VOICES = {
|
||||
"ne-NP": "HemkalaNeural",
|
||||
"nl-BE": "DenaNeural",
|
||||
"nl-NL": "ColetteNeural",
|
||||
"or-IN": "SubhasiniNeural",
|
||||
"pa-IN": "OjasNeural",
|
||||
"pl-PL": "AgnieszkaNeural",
|
||||
"ps-AF": "LatifaNeural",
|
||||
"pt-BR": "FranciscaNeural",
|
||||
@@ -170,7 +158,6 @@ DEFAULT_VOICES = {
|
||||
"sl-SI": "PetraNeural",
|
||||
"so-SO": "UbaxNeural",
|
||||
"sq-AL": "AnilaNeural",
|
||||
"sr-Latn-RS": "NicholasNeural",
|
||||
"sr-RS": "SophieNeural",
|
||||
"su-ID": "TutiNeural",
|
||||
"sv-SE": "SofieNeural",
|
||||
@@ -190,9 +177,12 @@ DEFAULT_VOICES = {
|
||||
"vi-VN": "HoaiMyNeural",
|
||||
"wuu-CN": "XiaotongNeural",
|
||||
"yue-CN": "XiaoMinNeural",
|
||||
"zh-CN-henan": "YundengNeural",
|
||||
"zh-CN-shandong": "YunxiangNeural",
|
||||
"zh-CN": "XiaoxiaoNeural",
|
||||
"zh-CN-henan": "YundengNeural",
|
||||
"zh-CN-liaoning": "XiaobeiNeural",
|
||||
"zh-CN-shaanxi": "XiaoniNeural",
|
||||
"zh-CN-shandong": "YunxiangNeural",
|
||||
"zh-CN-sichuan": "YunxiNeural",
|
||||
"zh-HK": "HiuMaanNeural",
|
||||
"zh-TW": "HsiaoChenNeural",
|
||||
"zu-ZA": "ThandoNeural",
|
||||
@@ -201,39 +191,6 @@ DEFAULT_VOICES = {
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@callback
|
||||
def _prepare_voice_args(
|
||||
*,
|
||||
hass: HomeAssistant,
|
||||
language: str,
|
||||
voice: str,
|
||||
gender: str | None,
|
||||
) -> dict:
|
||||
"""Prepare voice arguments."""
|
||||
gender = handle_deprecated_gender(hass, gender)
|
||||
style: str | None
|
||||
original_voice, _, style = voice.partition(VOICE_STYLE_SEPERATOR)
|
||||
if not style:
|
||||
style = None
|
||||
updated_voice = handle_deprecated_voice(hass, original_voice)
|
||||
if updated_voice not in TTS_VOICES[language]:
|
||||
default_voice = DEFAULT_VOICES[language]
|
||||
_LOGGER.debug(
|
||||
"Unsupported voice %s detected, falling back to default %s for %s",
|
||||
voice,
|
||||
default_voice,
|
||||
language,
|
||||
)
|
||||
updated_voice = default_voice
|
||||
|
||||
return {
|
||||
"language": language,
|
||||
"voice": updated_voice,
|
||||
"gender": gender,
|
||||
"style": style,
|
||||
}
|
||||
|
||||
|
||||
def _deprecated_platform(value: str) -> str:
|
||||
"""Validate if platform is deprecated."""
|
||||
if value == DOMAIN:
|
||||
@@ -371,61 +328,36 @@ class CloudTTSEntity(TextToSpeechEntity):
|
||||
"""Return a list of supported voices for a language."""
|
||||
if not (voices := TTS_VOICES.get(language)):
|
||||
return None
|
||||
|
||||
result = []
|
||||
|
||||
for voice_id, voice_info in voices.items():
|
||||
if isinstance(voice_info, str):
|
||||
result.append(
|
||||
Voice(
|
||||
voice_id,
|
||||
voice_info,
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
name = voice_info["name"]
|
||||
|
||||
result.append(
|
||||
Voice(
|
||||
voice_id,
|
||||
name,
|
||||
)
|
||||
)
|
||||
result.extend(
|
||||
[
|
||||
Voice(
|
||||
f"{voice_id}{VOICE_STYLE_SEPERATOR}{variant}",
|
||||
f"{name} ({variant})",
|
||||
)
|
||||
for variant in voice_info.get("variants", [])
|
||||
]
|
||||
)
|
||||
|
||||
return result
|
||||
return [Voice(voice, voice) for voice in voices]
|
||||
|
||||
async def async_get_tts_audio(
|
||||
self, message: str, language: str, options: dict[str, Any]
|
||||
) -> TtsAudioType:
|
||||
"""Load TTS from Home Assistant Cloud."""
|
||||
gender: Gender | str | None = options.get(ATTR_GENDER)
|
||||
gender = handle_deprecated_gender(self.hass, gender)
|
||||
original_voice: str = options.get(
|
||||
ATTR_VOICE,
|
||||
self._voice if language == self._language else DEFAULT_VOICES[language],
|
||||
)
|
||||
voice = handle_deprecated_voice(self.hass, original_voice)
|
||||
if voice not in TTS_VOICES[language]:
|
||||
default_voice = DEFAULT_VOICES[language]
|
||||
_LOGGER.debug(
|
||||
"Unsupported voice %s detected, falling back to default %s for %s",
|
||||
voice,
|
||||
default_voice,
|
||||
language,
|
||||
)
|
||||
voice = default_voice
|
||||
# Process TTS
|
||||
try:
|
||||
data = await self.cloud.voice.process_tts(
|
||||
text=message,
|
||||
language=language,
|
||||
gender=gender,
|
||||
voice=voice,
|
||||
output=options[ATTR_AUDIO_OUTPUT],
|
||||
**_prepare_voice_args(
|
||||
hass=self.hass,
|
||||
language=language,
|
||||
voice=options.get(
|
||||
ATTR_VOICE,
|
||||
(
|
||||
self._voice
|
||||
if language == self._language
|
||||
else DEFAULT_VOICES[language]
|
||||
),
|
||||
),
|
||||
gender=options.get(ATTR_GENDER),
|
||||
),
|
||||
)
|
||||
except VoiceError as err:
|
||||
_LOGGER.error("Voice error: %s", err)
|
||||
@@ -437,8 +369,6 @@ class CloudTTSEntity(TextToSpeechEntity):
|
||||
class CloudProvider(Provider):
|
||||
"""Home Assistant Cloud speech API provider."""
|
||||
|
||||
has_entity = True
|
||||
|
||||
def __init__(self, cloud: Cloud[CloudClient]) -> None:
|
||||
"""Initialize cloud provider."""
|
||||
self.cloud = cloud
|
||||
@@ -471,38 +401,7 @@ class CloudProvider(Provider):
|
||||
"""Return a list of supported voices for a language."""
|
||||
if not (voices := TTS_VOICES.get(language)):
|
||||
return None
|
||||
|
||||
result = []
|
||||
|
||||
for voice_id, voice_info in voices.items():
|
||||
if isinstance(voice_info, str):
|
||||
result.append(
|
||||
Voice(
|
||||
voice_id,
|
||||
voice_info,
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
name = voice_info["name"]
|
||||
|
||||
result.append(
|
||||
Voice(
|
||||
voice_id,
|
||||
name,
|
||||
)
|
||||
)
|
||||
result.extend(
|
||||
[
|
||||
Voice(
|
||||
f"{voice_id}{VOICE_STYLE_SEPERATOR}{variant}",
|
||||
f"{name} ({variant})",
|
||||
)
|
||||
for variant in voice_info.get("variants", [])
|
||||
]
|
||||
)
|
||||
|
||||
return result
|
||||
return [Voice(voice, voice) for voice in voices]
|
||||
|
||||
@property
|
||||
def default_options(self) -> dict[str, str]:
|
||||
@@ -516,22 +415,30 @@ class CloudProvider(Provider):
|
||||
) -> TtsAudioType:
|
||||
"""Load TTS from Home Assistant Cloud."""
|
||||
assert self.hass is not None
|
||||
gender: Gender | str | None = options.get(ATTR_GENDER)
|
||||
gender = handle_deprecated_gender(self.hass, gender)
|
||||
original_voice: str = options.get(
|
||||
ATTR_VOICE,
|
||||
self._voice if language == self._language else DEFAULT_VOICES[language],
|
||||
)
|
||||
voice = handle_deprecated_voice(self.hass, original_voice)
|
||||
if voice not in TTS_VOICES[language]:
|
||||
default_voice = DEFAULT_VOICES[language]
|
||||
_LOGGER.debug(
|
||||
"Unsupported voice %s detected, falling back to default %s for %s",
|
||||
voice,
|
||||
default_voice,
|
||||
language,
|
||||
)
|
||||
voice = default_voice
|
||||
# Process TTS
|
||||
try:
|
||||
data = await self.cloud.voice.process_tts(
|
||||
text=message,
|
||||
language=language,
|
||||
gender=gender,
|
||||
voice=voice,
|
||||
output=options[ATTR_AUDIO_OUTPUT],
|
||||
**_prepare_voice_args(
|
||||
hass=self.hass,
|
||||
language=language,
|
||||
voice=options.get(
|
||||
ATTR_VOICE,
|
||||
self._voice
|
||||
if language == self._language
|
||||
else DEFAULT_VOICES[language],
|
||||
),
|
||||
gender=options.get(ATTR_GENDER),
|
||||
),
|
||||
)
|
||||
except VoiceError as err:
|
||||
_LOGGER.error("Voice error: %s", err)
|
||||
|
||||
@@ -12,7 +12,6 @@ from .coordinator import (
|
||||
ComelitSerialBridge,
|
||||
ComelitVedoSystem,
|
||||
)
|
||||
from .utils import async_client_session
|
||||
|
||||
BRIDGE_PLATFORMS = [
|
||||
Platform.CLIMATE,
|
||||
@@ -33,9 +32,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ComelitConfigEntry) -> b
|
||||
"""Set up Comelit platform."""
|
||||
|
||||
coordinator: ComelitBaseCoordinator
|
||||
|
||||
session = await async_client_session(hass)
|
||||
|
||||
if entry.data.get(CONF_TYPE, BRIDGE) == BRIDGE:
|
||||
coordinator = ComelitSerialBridge(
|
||||
hass,
|
||||
@@ -43,7 +39,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ComelitConfigEntry) -> b
|
||||
entry.data[CONF_HOST],
|
||||
entry.data.get(CONF_PORT, DEFAULT_PORT),
|
||||
entry.data[CONF_PIN],
|
||||
session,
|
||||
)
|
||||
platforms = BRIDGE_PLATFORMS
|
||||
else:
|
||||
@@ -53,7 +48,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ComelitConfigEntry) -> b
|
||||
entry.data[CONF_HOST],
|
||||
entry.data.get(CONF_PORT, DEFAULT_PORT),
|
||||
entry.data[CONF_PIN],
|
||||
session,
|
||||
)
|
||||
platforms = VEDO_PLATFORMS
|
||||
|
||||
|
||||
@@ -22,7 +22,6 @@ from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .const import _LOGGER, DEFAULT_PORT, DEVICE_TYPE_LIST, DOMAIN
|
||||
from .utils import async_client_session
|
||||
|
||||
DEFAULT_HOST = "192.168.1.252"
|
||||
DEFAULT_PIN = 111111
|
||||
@@ -48,14 +47,10 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str,
|
||||
"""Validate the user input allows us to connect."""
|
||||
|
||||
api: ComelitCommonApi
|
||||
|
||||
session = await async_client_session(hass)
|
||||
if data.get(CONF_TYPE, BRIDGE) == BRIDGE:
|
||||
api = ComeliteSerialBridgeApi(
|
||||
data[CONF_HOST], data[CONF_PORT], data[CONF_PIN], session
|
||||
)
|
||||
api = ComeliteSerialBridgeApi(data[CONF_HOST], data[CONF_PORT], data[CONF_PIN])
|
||||
else:
|
||||
api = ComelitVedoApi(data[CONF_HOST], data[CONF_PORT], data[CONF_PIN], session)
|
||||
api = ComelitVedoApi(data[CONF_HOST], data[CONF_PORT], data[CONF_PIN])
|
||||
|
||||
try:
|
||||
await api.login()
|
||||
|
||||
@@ -15,7 +15,6 @@ from aiocomelit.api import (
|
||||
)
|
||||
from aiocomelit.const import BRIDGE, VEDO
|
||||
from aiocomelit.exceptions import CannotAuthenticate, CannotConnect, CannotRetrieveData
|
||||
from aiohttp import ClientSession
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -96,16 +95,9 @@ class ComelitBaseCoordinator(DataUpdateCoordinator[T]):
|
||||
await self.api.login()
|
||||
return await self._async_update_system_data()
|
||||
except (CannotConnect, CannotRetrieveData) as err:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_failed",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
raise UpdateFailed(repr(err)) from err
|
||||
except CannotAuthenticate as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_authenticate",
|
||||
) from err
|
||||
raise ConfigEntryAuthFailed from err
|
||||
|
||||
@abstractmethod
|
||||
async def _async_update_system_data(self) -> T:
|
||||
@@ -127,10 +119,9 @@ class ComelitSerialBridge(
|
||||
host: str,
|
||||
port: int,
|
||||
pin: int,
|
||||
session: ClientSession,
|
||||
) -> None:
|
||||
"""Initialize the scanner."""
|
||||
self.api = ComeliteSerialBridgeApi(host, port, pin, session)
|
||||
self.api = ComeliteSerialBridgeApi(host, port, pin)
|
||||
super().__init__(hass, entry, BRIDGE, host)
|
||||
|
||||
async def _async_update_system_data(
|
||||
@@ -153,10 +144,9 @@ class ComelitVedoSystem(ComelitBaseCoordinator[AlarmDataObject]):
|
||||
host: str,
|
||||
port: int,
|
||||
pin: int,
|
||||
session: ClientSession,
|
||||
) -> None:
|
||||
"""Initialize the scanner."""
|
||||
self.api = ComelitVedoApi(host, port, pin, session)
|
||||
self.api = ComelitVedoApi(host, port, pin)
|
||||
super().__init__(hass, entry, VEDO, host)
|
||||
|
||||
async def _async_update_system_data(
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aiocomelit"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["aiocomelit==0.12.0"]
|
||||
"requirements": ["aiocomelit==0.11.3"]
|
||||
}
|
||||
|
||||
@@ -70,7 +70,9 @@ rules:
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
exception-translations:
|
||||
status: todo
|
||||
comment: PR in progress
|
||||
icon-translations: done
|
||||
reconfiguration-flow:
|
||||
status: todo
|
||||
@@ -84,5 +86,7 @@ rules:
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
inject-websession:
|
||||
status: todo
|
||||
comment: implement aiohttp_client.async_create_clientsession
|
||||
strict-typing: done
|
||||
|
||||
@@ -74,10 +74,7 @@
|
||||
"message": "Error connecting: {error}"
|
||||
},
|
||||
"cannot_authenticate": {
|
||||
"message": "Error authenticating"
|
||||
},
|
||||
"updated_failed": {
|
||||
"message": "Failed to update data: {error}"
|
||||
"message": "Error authenticating: {error}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
"""Utils for Comelit."""
|
||||
|
||||
from aiohttp import ClientSession, CookieJar
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
|
||||
|
||||
async def async_client_session(hass: HomeAssistant) -> ClientSession:
|
||||
"""Return a new aiohttp session."""
|
||||
return aiohttp_client.async_create_clientsession(
|
||||
hass, verify_ssl=False, cookie_jar=CookieJar(unsafe=True)
|
||||
)
|
||||
@@ -56,10 +56,7 @@ from homeassistant.helpers import config_validation as cv, discovery
|
||||
from homeassistant.helpers.entity_platform import async_get_platforms
|
||||
from homeassistant.helpers.reload import async_integration_yaml_config
|
||||
from homeassistant.helpers.service import async_register_admin_service
|
||||
from homeassistant.helpers.trigger_template_entity import (
|
||||
CONF_AVAILABILITY,
|
||||
ValueTemplate,
|
||||
)
|
||||
from homeassistant.helpers.trigger_template_entity import CONF_AVAILABILITY
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import (
|
||||
@@ -94,9 +91,7 @@ BINARY_SENSOR_SCHEMA = vol.Schema(
|
||||
vol.Optional(CONF_PAYLOAD_OFF, default=DEFAULT_PAYLOAD_OFF): cv.string,
|
||||
vol.Optional(CONF_PAYLOAD_ON, default=DEFAULT_PAYLOAD_ON): cv.string,
|
||||
vol.Optional(CONF_DEVICE_CLASS): BINARY_SENSOR_DEVICE_CLASSES_SCHEMA,
|
||||
vol.Optional(CONF_VALUE_TEMPLATE): vol.All(
|
||||
cv.template, ValueTemplate.from_template
|
||||
),
|
||||
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_COMMAND_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
|
||||
vol.Optional(CONF_UNIQUE_ID): cv.string,
|
||||
vol.Optional(
|
||||
@@ -113,9 +108,7 @@ COVER_SCHEMA = vol.Schema(
|
||||
vol.Optional(CONF_COMMAND_STOP, default="true"): cv.string,
|
||||
vol.Required(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_ICON): cv.template,
|
||||
vol.Optional(CONF_VALUE_TEMPLATE): vol.All(
|
||||
cv.template, ValueTemplate.from_template
|
||||
),
|
||||
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_COMMAND_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
|
||||
vol.Optional(CONF_DEVICE_CLASS): COVER_DEVICE_CLASSES_SCHEMA,
|
||||
vol.Optional(CONF_UNIQUE_ID): cv.string,
|
||||
@@ -141,9 +134,7 @@ SENSOR_SCHEMA = vol.Schema(
|
||||
vol.Optional(CONF_NAME, default=SENSOR_DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_ICON): cv.template,
|
||||
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
|
||||
vol.Optional(CONF_VALUE_TEMPLATE): vol.All(
|
||||
cv.template, ValueTemplate.from_template
|
||||
),
|
||||
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_UNIQUE_ID): cv.string,
|
||||
vol.Optional(CONF_DEVICE_CLASS): SENSOR_DEVICE_CLASSES_SCHEMA,
|
||||
vol.Optional(CONF_STATE_CLASS): SENSOR_STATE_CLASSES_SCHEMA,
|
||||
@@ -159,9 +150,7 @@ SWITCH_SCHEMA = vol.Schema(
|
||||
vol.Optional(CONF_COMMAND_ON, default="true"): cv.string,
|
||||
vol.Optional(CONF_COMMAND_STATE): cv.string,
|
||||
vol.Required(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_VALUE_TEMPLATE): vol.All(
|
||||
cv.template, ValueTemplate.from_template
|
||||
),
|
||||
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_ICON): cv.template,
|
||||
vol.Optional(CONF_COMMAND_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
|
||||
vol.Optional(CONF_UNIQUE_ID): cv.string,
|
||||
|
||||
@@ -18,10 +18,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers.template import Template
|
||||
from homeassistant.helpers.trigger_template_entity import (
|
||||
ManualTriggerEntity,
|
||||
ValueTemplate,
|
||||
)
|
||||
from homeassistant.helpers.trigger_template_entity import ManualTriggerEntity
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
@@ -53,7 +50,7 @@ async def async_setup_platform(
|
||||
scan_interval: timedelta = binary_sensor_config.get(
|
||||
CONF_SCAN_INTERVAL, SCAN_INTERVAL
|
||||
)
|
||||
value_template: ValueTemplate | None = binary_sensor_config.get(CONF_VALUE_TEMPLATE)
|
||||
value_template: Template | None = binary_sensor_config.get(CONF_VALUE_TEMPLATE)
|
||||
|
||||
data = CommandSensorData(hass, command, command_timeout)
|
||||
|
||||
@@ -89,7 +86,7 @@ class CommandBinarySensor(ManualTriggerEntity, BinarySensorEntity):
|
||||
config: ConfigType,
|
||||
payload_on: str,
|
||||
payload_off: str,
|
||||
value_template: ValueTemplate | None,
|
||||
value_template: Template | None,
|
||||
scan_interval: timedelta,
|
||||
) -> None:
|
||||
"""Initialize the Command line binary sensor."""
|
||||
@@ -136,14 +133,9 @@ class CommandBinarySensor(ManualTriggerEntity, BinarySensorEntity):
|
||||
await self.data.async_update()
|
||||
value = self.data.value
|
||||
|
||||
variables = self._template_variables_with_value(value)
|
||||
if not self._render_availability_template(variables):
|
||||
self.async_write_ha_state()
|
||||
return
|
||||
|
||||
if self._value_template is not None:
|
||||
value = self._value_template.async_render_as_value_template(
|
||||
self.entity_id, variables, None
|
||||
value = self._value_template.async_render_with_possible_json_value(
|
||||
value, None
|
||||
)
|
||||
self._attr_is_on = None
|
||||
if value == self._payload_on:
|
||||
@@ -151,7 +143,7 @@ class CommandBinarySensor(ManualTriggerEntity, BinarySensorEntity):
|
||||
elif value == self._payload_off:
|
||||
self._attr_is_on = False
|
||||
|
||||
self._process_manual_data(variables)
|
||||
self._process_manual_data(value)
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_update(self) -> None:
|
||||
|
||||
@@ -20,10 +20,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers.template import Template
|
||||
from homeassistant.helpers.trigger_template_entity import (
|
||||
ManualTriggerEntity,
|
||||
ValueTemplate,
|
||||
)
|
||||
from homeassistant.helpers.trigger_template_entity import ManualTriggerEntity
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import dt as dt_util, slugify
|
||||
|
||||
@@ -82,7 +79,7 @@ class CommandCover(ManualTriggerEntity, CoverEntity):
|
||||
command_close: str,
|
||||
command_stop: str,
|
||||
command_state: str | None,
|
||||
value_template: ValueTemplate | None,
|
||||
value_template: Template | None,
|
||||
timeout: int,
|
||||
scan_interval: timedelta,
|
||||
) -> None:
|
||||
@@ -167,20 +164,14 @@ class CommandCover(ManualTriggerEntity, CoverEntity):
|
||||
"""Update device state."""
|
||||
if self._command_state:
|
||||
payload = str(await self._async_query_state())
|
||||
|
||||
variables = self._template_variables_with_value(payload)
|
||||
if not self._render_availability_template(variables):
|
||||
self.async_write_ha_state()
|
||||
return
|
||||
|
||||
if self._value_template:
|
||||
payload = self._value_template.async_render_as_value_template(
|
||||
self.entity_id, variables, None
|
||||
payload = self._value_template.async_render_with_possible_json_value(
|
||||
payload, None
|
||||
)
|
||||
self._state = None
|
||||
if payload:
|
||||
self._state = int(payload)
|
||||
self._process_manual_data(variables)
|
||||
self._process_manual_data(payload)
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_update(self) -> None:
|
||||
|
||||
@@ -23,10 +23,7 @@ from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers.template import Template
|
||||
from homeassistant.helpers.trigger_template_entity import (
|
||||
ManualTriggerSensorEntity,
|
||||
ValueTemplate,
|
||||
)
|
||||
from homeassistant.helpers.trigger_template_entity import ManualTriggerSensorEntity
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
@@ -60,7 +57,7 @@ async def async_setup_platform(
|
||||
json_attributes: list[str] | None = sensor_config.get(CONF_JSON_ATTRIBUTES)
|
||||
json_attributes_path: str | None = sensor_config.get(CONF_JSON_ATTRIBUTES_PATH)
|
||||
scan_interval: timedelta = sensor_config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL)
|
||||
value_template: ValueTemplate | None = sensor_config.get(CONF_VALUE_TEMPLATE)
|
||||
value_template: Template | None = sensor_config.get(CONF_VALUE_TEMPLATE)
|
||||
data = CommandSensorData(hass, command, command_timeout)
|
||||
|
||||
trigger_entity_config = {
|
||||
@@ -91,7 +88,7 @@ class CommandSensor(ManualTriggerSensorEntity):
|
||||
self,
|
||||
data: CommandSensorData,
|
||||
config: ConfigType,
|
||||
value_template: ValueTemplate | None,
|
||||
value_template: Template | None,
|
||||
json_attributes: list[str] | None,
|
||||
json_attributes_path: str | None,
|
||||
scan_interval: timedelta,
|
||||
@@ -147,11 +144,6 @@ class CommandSensor(ManualTriggerSensorEntity):
|
||||
await self.data.async_update()
|
||||
value = self.data.value
|
||||
|
||||
variables = self._template_variables_with_value(self.data.value)
|
||||
if not self._render_availability_template(variables):
|
||||
self.async_write_ha_state()
|
||||
return
|
||||
|
||||
if self._json_attributes:
|
||||
self._attr_extra_state_attributes = {}
|
||||
if value:
|
||||
@@ -176,17 +168,16 @@ class CommandSensor(ManualTriggerSensorEntity):
|
||||
LOGGER.warning("Unable to parse output as JSON: %s", value)
|
||||
else:
|
||||
LOGGER.warning("Empty reply found when expecting JSON data")
|
||||
|
||||
if self._value_template is None:
|
||||
self._attr_native_value = None
|
||||
self._process_manual_data(variables)
|
||||
self.async_write_ha_state()
|
||||
self._process_manual_data(value)
|
||||
return
|
||||
|
||||
self._attr_native_value = None
|
||||
if self._value_template is not None and value is not None:
|
||||
value = self._value_template.async_render_as_value_template(
|
||||
self.entity_id, variables, None
|
||||
value = self._value_template.async_render_with_possible_json_value(
|
||||
value,
|
||||
None,
|
||||
)
|
||||
|
||||
if self.device_class not in {
|
||||
@@ -199,7 +190,7 @@ class CommandSensor(ManualTriggerSensorEntity):
|
||||
value, self.entity_id, self.device_class
|
||||
)
|
||||
|
||||
self._process_manual_data(variables)
|
||||
self._process_manual_data(value)
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_update(self) -> None:
|
||||
|
||||
@@ -19,10 +19,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers.template import Template
|
||||
from homeassistant.helpers.trigger_template_entity import (
|
||||
ManualTriggerEntity,
|
||||
ValueTemplate,
|
||||
)
|
||||
from homeassistant.helpers.trigger_template_entity import ManualTriggerEntity
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import dt as dt_util, slugify
|
||||
|
||||
@@ -81,7 +78,7 @@ class CommandSwitch(ManualTriggerEntity, SwitchEntity):
|
||||
command_on: str,
|
||||
command_off: str,
|
||||
command_state: str | None,
|
||||
value_template: ValueTemplate | None,
|
||||
value_template: Template | None,
|
||||
timeout: int,
|
||||
scan_interval: timedelta,
|
||||
) -> None:
|
||||
@@ -169,21 +166,15 @@ class CommandSwitch(ManualTriggerEntity, SwitchEntity):
|
||||
"""Update device state."""
|
||||
if self._command_state:
|
||||
payload = str(await self._async_query_state())
|
||||
|
||||
variables = self._template_variables_with_value(payload)
|
||||
if not self._render_availability_template(variables):
|
||||
self.async_write_ha_state()
|
||||
return
|
||||
|
||||
value = None
|
||||
if self._value_template:
|
||||
value = self._value_template.async_render_as_value_template(
|
||||
self.entity_id, variables, None
|
||||
value = self._value_template.async_render_with_possible_json_value(
|
||||
payload, None
|
||||
)
|
||||
self._attr_is_on = None
|
||||
if payload or value:
|
||||
self._attr_is_on = (value or payload).lower() == "true"
|
||||
self._process_manual_data(variables)
|
||||
self._process_manual_data(payload)
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_update(self) -> None:
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==2.2.3", "home-assistant-intents==2025.5.7"]
|
||||
"requirements": ["hassil==2.2.3", "home-assistant-intents==2025.3.28"]
|
||||
}
|
||||
|
||||
@@ -73,7 +73,7 @@
|
||||
"remote_moved_any_side": "Device moved with any side up",
|
||||
"remote_double_tap_any_side": "Device double tapped on any side",
|
||||
"remote_turned_clockwise": "Device turned clockwise",
|
||||
"remote_turned_counter_clockwise": "Device turned counterclockwise",
|
||||
"remote_turned_counter_clockwise": "Device turned counter clockwise",
|
||||
"remote_rotate_from_side_1": "Device rotated from \"side 1\" to \"{subtype}\"",
|
||||
"remote_rotate_from_side_2": "Device rotated from \"side 2\" to \"{subtype}\"",
|
||||
"remote_rotate_from_side_3": "Device rotated from \"side 3\" to \"{subtype}\"",
|
||||
|
||||
@@ -218,7 +218,7 @@ class TrackerEntity(
|
||||
|
||||
entity_description: TrackerEntityDescription
|
||||
_attr_latitude: float | None = None
|
||||
_attr_location_accuracy: float = 0
|
||||
_attr_location_accuracy: int = 0
|
||||
_attr_location_name: str | None = None
|
||||
_attr_longitude: float | None = None
|
||||
_attr_source_type: SourceType = SourceType.GPS
|
||||
@@ -234,7 +234,7 @@ class TrackerEntity(
|
||||
return not self.should_poll
|
||||
|
||||
@cached_property
|
||||
def location_accuracy(self) -> float:
|
||||
def location_accuracy(self) -> int:
|
||||
"""Return the location accuracy of the device.
|
||||
|
||||
Value in meters.
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["devolo_home_control_api"],
|
||||
"requirements": ["devolo-home-control-api==0.19.0"],
|
||||
"requirements": ["devolo-home-control-api==0.18.3"],
|
||||
"zeroconf": ["_dvl-deviceapi._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ import logging
|
||||
from typing import Any
|
||||
|
||||
from devolo_plc_api.device import Device
|
||||
from devolo_plc_api.exceptions.device import DeviceNotFound, DevicePasswordProtected
|
||||
from devolo_plc_api.exceptions.device import DeviceNotFound
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import zeroconf
|
||||
@@ -22,9 +22,7 @@ from .const import DOMAIN, PRODUCT, SERIAL_NUMBER, TITLE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{vol.Required(CONF_IP_ADDRESS): str, vol.Optional(CONF_PASSWORD): str}
|
||||
)
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema({vol.Required(CONF_IP_ADDRESS): str})
|
||||
STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Optional(CONF_PASSWORD): str})
|
||||
|
||||
|
||||
@@ -38,16 +36,7 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str,
|
||||
|
||||
device = Device(data[CONF_IP_ADDRESS], zeroconf_instance=zeroconf_instance)
|
||||
|
||||
device.password = data[CONF_PASSWORD]
|
||||
|
||||
await device.async_connect(session_instance=async_client)
|
||||
|
||||
# Try a password protected, non-writing device API call that raises, if the password is wrong.
|
||||
# If only the plcnet API is available, we can continue without trying a password as the plcnet
|
||||
# API does not require a password.
|
||||
if device.device:
|
||||
await device.device.async_uptime()
|
||||
|
||||
await device.async_disconnect()
|
||||
|
||||
return {
|
||||
@@ -70,22 +59,23 @@ class DevoloHomeNetworkConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle the initial step."""
|
||||
errors: dict = {}
|
||||
|
||||
if user_input is not None:
|
||||
try:
|
||||
info = await validate_input(self.hass, user_input)
|
||||
except DeviceNotFound:
|
||||
errors["base"] = "cannot_connect"
|
||||
except DevicePasswordProtected:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
await self.async_set_unique_id(
|
||||
info[SERIAL_NUMBER], raise_on_progress=False
|
||||
)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(title=info[TITLE], data=user_input)
|
||||
if user_input is None:
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
try:
|
||||
info = await validate_input(self.hass, user_input)
|
||||
except DeviceNotFound:
|
||||
errors["base"] = "cannot_connect"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
await self.async_set_unique_id(info[SERIAL_NUMBER], raise_on_progress=False)
|
||||
self._abort_if_unique_id_configured()
|
||||
user_input[CONF_PASSWORD] = ""
|
||||
return self.async_create_entry(title=info[TITLE], data=user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
@@ -116,27 +106,15 @@ class DevoloHomeNetworkConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initiated by zeroconf."""
|
||||
title = self.context["title_placeholders"][CONF_NAME]
|
||||
errors: dict = {}
|
||||
data_schema: vol.Schema | None = None
|
||||
|
||||
if user_input is not None:
|
||||
data = {
|
||||
CONF_IP_ADDRESS: self.host,
|
||||
CONF_PASSWORD: user_input.get(CONF_PASSWORD, ""),
|
||||
CONF_PASSWORD: "",
|
||||
}
|
||||
try:
|
||||
await validate_input(self.hass, data)
|
||||
except DevicePasswordProtected:
|
||||
errors = {"base": "invalid_auth"}
|
||||
data_schema = STEP_REAUTH_DATA_SCHEMA
|
||||
else:
|
||||
return self.async_create_entry(title=title, data=data)
|
||||
|
||||
return self.async_create_entry(title=title, data=data)
|
||||
return self.async_show_form(
|
||||
step_id="zeroconf_confirm",
|
||||
data_schema=data_schema,
|
||||
description_placeholders={"host_name": title},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
@@ -156,21 +134,14 @@ class DevoloHomeNetworkConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initiated by reauthentication."""
|
||||
errors: dict = {}
|
||||
if user_input is not None:
|
||||
data = {
|
||||
CONF_IP_ADDRESS: self.host,
|
||||
CONF_PASSWORD: user_input[CONF_PASSWORD],
|
||||
}
|
||||
try:
|
||||
await validate_input(self.hass, data)
|
||||
except DevicePasswordProtected:
|
||||
errors = {"base": "invalid_auth"}
|
||||
else:
|
||||
return self.async_update_reload_and_abort(self._reauth_entry, data=data)
|
||||
if user_input is None:
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=STEP_REAUTH_DATA_SCHEMA,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=STEP_REAUTH_DATA_SCHEMA,
|
||||
errors=errors,
|
||||
)
|
||||
data = {
|
||||
CONF_IP_ADDRESS: self.host,
|
||||
CONF_PASSWORD: user_input[CONF_PASSWORD],
|
||||
}
|
||||
return self.async_update_reload_and_abort(self._reauth_entry, data=data)
|
||||
|
||||
@@ -138,7 +138,7 @@ async def async_setup_entry(
|
||||
SENSOR_TYPES[CONNECTED_PLC_DEVICES],
|
||||
)
|
||||
)
|
||||
network: LogicalNetwork = coordinators[CONNECTED_PLC_DEVICES].data
|
||||
network = await device.plcnet.async_get_network_overview()
|
||||
peers = [
|
||||
peer.mac_address for peer in network.devices if peer.topology == REMOTE
|
||||
]
|
||||
|
||||
@@ -5,12 +5,10 @@
|
||||
"user": {
|
||||
"description": "[%key:common::config_flow::description::confirm_setup%]",
|
||||
"data": {
|
||||
"ip_address": "[%key:common::config_flow::data::ip%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
"ip_address": "[%key:common::config_flow::data::ip%]"
|
||||
},
|
||||
"data_description": {
|
||||
"ip_address": "IP address of your devolo Home Network device. This can be found in the devolo Home Network App on the device dashboard.",
|
||||
"password": "Password you protected the device with."
|
||||
"ip_address": "IP address of your devolo Home Network device. This can be found in the devolo Home Network App on the device dashboard."
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
@@ -18,23 +16,16 @@
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "[%key:component::devolo_home_network::config::step::user::data_description::password%]"
|
||||
"password": "Password you protected the device with."
|
||||
}
|
||||
},
|
||||
"zeroconf_confirm": {
|
||||
"description": "Do you want to add the devolo home network device with the hostname `{host_name}` to Home Assistant?",
|
||||
"title": "Discovered devolo home network device",
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "[%key:component::devolo_home_network::config::step::user::data_description::password%]"
|
||||
}
|
||||
"title": "Discovered devolo home network device"
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Set up the Dialogflow webhook",
|
||||
"title": "Set up the Dialogflow Webhook",
|
||||
"description": "Are you sure you want to set up Dialogflow?"
|
||||
}
|
||||
},
|
||||
@@ -12,7 +12,7 @@
|
||||
"webhook_not_internet_accessible": "[%key:common::config_flow::abort::webhook_not_internet_accessible%]"
|
||||
},
|
||||
"create_entry": {
|
||||
"default": "To send events to Home Assistant, you will need to set up the [webhook service of Dialogflow]({dialogflow_url}).\n\nFill in the following info:\n\n- URL: `{webhook_url}`\n- Method: POST\n- Content Type: application/json\n\nSee [the documentation]({docs_url}) for further details."
|
||||
"default": "To send events to Home Assistant, you will need to set up [webhook integration of Dialogflow]({dialogflow_url}).\n\nFill in the following info:\n\n- URL: `{webhook_url}`\n- Method: POST\n- Content Type: application/json\n\nSee [the documentation]({docs_url}) for further details."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -68,7 +68,7 @@ async def async_validate_hostname(
|
||||
result = False
|
||||
with contextlib.suppress(DNSError):
|
||||
result = bool(
|
||||
await aiodns.DNSResolver( # type: ignore[call-overload]
|
||||
await aiodns.DNSResolver(
|
||||
nameservers=[resolver], udp_port=port, tcp_port=port
|
||||
).query(hostname, qtype)
|
||||
)
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/dnsip",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["aiodns==3.4.0"]
|
||||
"requirements": ["aiodns==3.2.0"]
|
||||
}
|
||||
|
||||
@@ -106,7 +106,7 @@ class WanIpSensor(SensorEntity):
|
||||
async def async_update(self) -> None:
|
||||
"""Get the current DNS IP address for hostname."""
|
||||
try:
|
||||
response = await self.resolver.query(self.hostname, self.querytype) # type: ignore[call-overload]
|
||||
response = await self.resolver.query(self.hostname, self.querytype)
|
||||
except DNSError as err:
|
||||
_LOGGER.warning("Exception while resolving host: %s", err)
|
||||
response = None
|
||||
|
||||
@@ -5,7 +5,7 @@ from dataclasses import dataclass
|
||||
from typing import Generic
|
||||
|
||||
from deebot_client.capabilities import CapabilityEvent
|
||||
from deebot_client.events.water_info import MopAttachedEvent
|
||||
from deebot_client.events.water_info import WaterInfoEvent
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorEntity,
|
||||
@@ -32,9 +32,9 @@ class EcovacsBinarySensorEntityDescription(
|
||||
|
||||
|
||||
ENTITY_DESCRIPTIONS: tuple[EcovacsBinarySensorEntityDescription, ...] = (
|
||||
EcovacsBinarySensorEntityDescription[MopAttachedEvent](
|
||||
capability_fn=lambda caps: caps.water.mop_attached if caps.water else None,
|
||||
value_fn=lambda e: e.value,
|
||||
EcovacsBinarySensorEntityDescription[WaterInfoEvent](
|
||||
capability_fn=lambda caps: caps.water,
|
||||
value_fn=lambda e: e.mop_attached,
|
||||
key="water_mop_attached",
|
||||
translation_key="water_mop_attached",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
|
||||
@@ -1,11 +1,8 @@
|
||||
"""Ecovacs image entities."""
|
||||
|
||||
from typing import cast
|
||||
|
||||
from deebot_client.capabilities import CapabilityMap
|
||||
from deebot_client.device import Device
|
||||
from deebot_client.events.map import CachedMapInfoEvent, MapChangedEvent
|
||||
from deebot_client.map import Map
|
||||
|
||||
from homeassistant.components.image import ImageEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -50,7 +47,6 @@ class EcovacsMap(
|
||||
"""Initialize entity."""
|
||||
super().__init__(device, capability, hass=hass)
|
||||
self._attr_extra_state_attributes = {}
|
||||
self._map = cast(Map, self._device.map)
|
||||
|
||||
entity_description = EntityDescription(
|
||||
key="map",
|
||||
@@ -59,7 +55,7 @@ class EcovacsMap(
|
||||
|
||||
def image(self) -> bytes | None:
|
||||
"""Return bytes of image or None."""
|
||||
if svg := self._map.get_svg_map():
|
||||
if svg := self._device.map.get_svg_map():
|
||||
return svg.encode()
|
||||
|
||||
return None
|
||||
@@ -84,4 +80,4 @@ class EcovacsMap(
|
||||
Only used by the generic entity update service.
|
||||
"""
|
||||
await super().async_update()
|
||||
self._map.refresh()
|
||||
self._device.map.refresh()
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==13.1.0"]
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==12.5.0"]
|
||||
}
|
||||
|
||||
@@ -6,8 +6,7 @@ from typing import Any, Generic
|
||||
|
||||
from deebot_client.capabilities import CapabilitySetTypes
|
||||
from deebot_client.device import Device
|
||||
from deebot_client.events import WorkModeEvent
|
||||
from deebot_client.events.water_info import WaterAmountEvent
|
||||
from deebot_client.events import WaterInfoEvent, WorkModeEvent
|
||||
|
||||
from homeassistant.components.select import SelectEntity, SelectEntityDescription
|
||||
from homeassistant.const import EntityCategory
|
||||
@@ -32,9 +31,9 @@ class EcovacsSelectEntityDescription(
|
||||
|
||||
|
||||
ENTITY_DESCRIPTIONS: tuple[EcovacsSelectEntityDescription, ...] = (
|
||||
EcovacsSelectEntityDescription[WaterAmountEvent](
|
||||
capability_fn=lambda caps: caps.water.amount if caps.water else None,
|
||||
current_option_fn=lambda e: get_name_key(e.value),
|
||||
EcovacsSelectEntityDescription[WaterInfoEvent](
|
||||
capability_fn=lambda caps: caps.water,
|
||||
current_option_fn=lambda e: get_name_key(e.amount),
|
||||
options_fn=lambda water: [get_name_key(amount) for amount in water.types],
|
||||
key="water_amount",
|
||||
translation_key="water_amount",
|
||||
|
||||
@@ -9,14 +9,7 @@ from homeassistant.helpers.device_registry import DeviceEntry
|
||||
from .const import DOMAIN
|
||||
from .coordinator import EheimDigitalConfigEntry, EheimDigitalUpdateCoordinator
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.CLIMATE,
|
||||
Platform.LIGHT,
|
||||
Platform.NUMBER,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
Platform.TIME,
|
||||
]
|
||||
PLATFORMS = [Platform.CLIMATE, Platform.LIGHT, Platform.NUMBER, Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
|
||||
@@ -30,22 +30,6 @@
|
||||
"no_error": "mdi:check-circle"
|
||||
}
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"filter_active": {
|
||||
"default": "mdi:pump",
|
||||
"state": {
|
||||
"off": "mdi:pump-off"
|
||||
}
|
||||
}
|
||||
},
|
||||
"time": {
|
||||
"day_start_time": {
|
||||
"default": "mdi:weather-sunny"
|
||||
},
|
||||
"night_start_time": {
|
||||
"default": "mdi:moon-waning-crescent"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -79,14 +79,6 @@
|
||||
"air_in_filter": "Air in filter"
|
||||
}
|
||||
}
|
||||
},
|
||||
"time": {
|
||||
"day_start_time": {
|
||||
"name": "Day start time"
|
||||
},
|
||||
"night_start_time": {
|
||||
"name": "Night start time"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,70 +0,0 @@
|
||||
"""EHEIM Digital switches."""
|
||||
|
||||
from typing import Any, override
|
||||
|
||||
from eheimdigital.classic_vario import EheimDigitalClassicVario
|
||||
from eheimdigital.device import EheimDigitalDevice
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import EheimDigitalConfigEntry, EheimDigitalUpdateCoordinator
|
||||
from .entity import EheimDigitalEntity
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: EheimDigitalConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the callbacks for the coordinator so switches can be added as devices are found."""
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
def async_setup_device_entities(
|
||||
device_address: dict[str, EheimDigitalDevice],
|
||||
) -> None:
|
||||
"""Set up the switch entities for one or multiple devices."""
|
||||
entities: list[SwitchEntity] = []
|
||||
for device in device_address.values():
|
||||
if isinstance(device, EheimDigitalClassicVario):
|
||||
entities.append(EheimDigitalClassicVarioSwitch(coordinator, device)) # noqa: PERF401
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
coordinator.add_platform_callback(async_setup_device_entities)
|
||||
async_setup_device_entities(coordinator.hub.devices)
|
||||
|
||||
|
||||
class EheimDigitalClassicVarioSwitch(
|
||||
EheimDigitalEntity[EheimDigitalClassicVario], SwitchEntity
|
||||
):
|
||||
"""Represent an EHEIM Digital classicVARIO switch entity."""
|
||||
|
||||
_attr_translation_key = "filter_active"
|
||||
_attr_name = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: EheimDigitalUpdateCoordinator,
|
||||
device: EheimDigitalClassicVario,
|
||||
) -> None:
|
||||
"""Initialize an EHEIM Digital classicVARIO switch entity."""
|
||||
super().__init__(coordinator, device)
|
||||
self._attr_unique_id = device.mac_address
|
||||
self._async_update_attrs()
|
||||
|
||||
@override
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
await self._device.set_active(active=False)
|
||||
|
||||
@override
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
await self._device.set_active(active=True)
|
||||
|
||||
@override
|
||||
def _async_update_attrs(self) -> None:
|
||||
self._attr_is_on = self._device.is_active
|
||||
@@ -1,132 +0,0 @@
|
||||
"""EHEIM Digital time entities."""
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import time
|
||||
from typing import Generic, TypeVar, final, override
|
||||
|
||||
from eheimdigital.classic_vario import EheimDigitalClassicVario
|
||||
from eheimdigital.device import EheimDigitalDevice
|
||||
from eheimdigital.heater import EheimDigitalHeater
|
||||
|
||||
from homeassistant.components.time import TimeEntity, TimeEntityDescription
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import EheimDigitalConfigEntry, EheimDigitalUpdateCoordinator
|
||||
from .entity import EheimDigitalEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
_DeviceT_co = TypeVar("_DeviceT_co", bound=EheimDigitalDevice, covariant=True)
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class EheimDigitalTimeDescription(TimeEntityDescription, Generic[_DeviceT_co]):
|
||||
"""Class describing EHEIM Digital time entities."""
|
||||
|
||||
value_fn: Callable[[_DeviceT_co], time | None]
|
||||
set_value_fn: Callable[[_DeviceT_co, time], Awaitable[None]]
|
||||
|
||||
|
||||
CLASSICVARIO_DESCRIPTIONS: tuple[
|
||||
EheimDigitalTimeDescription[EheimDigitalClassicVario], ...
|
||||
] = (
|
||||
EheimDigitalTimeDescription[EheimDigitalClassicVario](
|
||||
key="day_start_time",
|
||||
translation_key="day_start_time",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
value_fn=lambda device: device.day_start_time,
|
||||
set_value_fn=lambda device, value: device.set_day_start_time(value),
|
||||
),
|
||||
EheimDigitalTimeDescription[EheimDigitalClassicVario](
|
||||
key="night_start_time",
|
||||
translation_key="night_start_time",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
value_fn=lambda device: device.night_start_time,
|
||||
set_value_fn=lambda device, value: device.set_night_start_time(value),
|
||||
),
|
||||
)
|
||||
|
||||
HEATER_DESCRIPTIONS: tuple[EheimDigitalTimeDescription[EheimDigitalHeater], ...] = (
|
||||
EheimDigitalTimeDescription[EheimDigitalHeater](
|
||||
key="day_start_time",
|
||||
translation_key="day_start_time",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
value_fn=lambda device: device.day_start_time,
|
||||
set_value_fn=lambda device, value: device.set_day_start_time(value),
|
||||
),
|
||||
EheimDigitalTimeDescription[EheimDigitalHeater](
|
||||
key="night_start_time",
|
||||
translation_key="night_start_time",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
value_fn=lambda device: device.night_start_time,
|
||||
set_value_fn=lambda device, value: device.set_night_start_time(value),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: EheimDigitalConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the callbacks for the coordinator so times can be added as devices are found."""
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
def async_setup_device_entities(
|
||||
device_address: dict[str, EheimDigitalDevice],
|
||||
) -> None:
|
||||
"""Set up the time entities for one or multiple devices."""
|
||||
entities: list[EheimDigitalTime[EheimDigitalDevice]] = []
|
||||
for device in device_address.values():
|
||||
if isinstance(device, EheimDigitalClassicVario):
|
||||
entities.extend(
|
||||
EheimDigitalTime[EheimDigitalClassicVario](
|
||||
coordinator, device, description
|
||||
)
|
||||
for description in CLASSICVARIO_DESCRIPTIONS
|
||||
)
|
||||
if isinstance(device, EheimDigitalHeater):
|
||||
entities.extend(
|
||||
EheimDigitalTime[EheimDigitalHeater](
|
||||
coordinator, device, description
|
||||
)
|
||||
for description in HEATER_DESCRIPTIONS
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
coordinator.add_platform_callback(async_setup_device_entities)
|
||||
async_setup_device_entities(coordinator.hub.devices)
|
||||
|
||||
|
||||
@final
|
||||
class EheimDigitalTime(
|
||||
EheimDigitalEntity[_DeviceT_co], TimeEntity, Generic[_DeviceT_co]
|
||||
):
|
||||
"""Represent an EHEIM Digital time entity."""
|
||||
|
||||
entity_description: EheimDigitalTimeDescription[_DeviceT_co]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: EheimDigitalUpdateCoordinator,
|
||||
device: _DeviceT_co,
|
||||
description: EheimDigitalTimeDescription[_DeviceT_co],
|
||||
) -> None:
|
||||
"""Initialize an EHEIM Digital time entity."""
|
||||
super().__init__(coordinator, device)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{device.mac_address}_{description.key}"
|
||||
|
||||
@override
|
||||
async def async_set_value(self, value: time) -> None:
|
||||
"""Change the time."""
|
||||
return await self.entity_description.set_value_fn(self._device, value)
|
||||
|
||||
@override
|
||||
def _async_update_attrs(self) -> None:
|
||||
"""Update the entity attributes."""
|
||||
self._attr_native_value = self.entity_description.value_fn(self._device)
|
||||
@@ -9,14 +9,12 @@ import logging
|
||||
from typing import Any
|
||||
|
||||
from pyenphase import Envoy, EnvoyError, EnvoyTokenAuth
|
||||
from pyenphase.models.home import EnvoyInterfaceInformation
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.event import async_call_later, async_track_time_interval
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
@@ -28,7 +26,7 @@ TOKEN_REFRESH_CHECK_INTERVAL = timedelta(days=1)
|
||||
STALE_TOKEN_THRESHOLD = timedelta(days=30).total_seconds()
|
||||
NOTIFICATION_ID = "enphase_envoy_notification"
|
||||
FIRMWARE_REFRESH_INTERVAL = timedelta(hours=4)
|
||||
MAC_VERIFICATION_DELAY = timedelta(seconds=34)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -41,7 +39,6 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
envoy_serial_number: str
|
||||
envoy_firmware: str
|
||||
config_entry: EnphaseConfigEntry
|
||||
interface: EnvoyInterfaceInformation | None
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, envoy: Envoy, entry: EnphaseConfigEntry
|
||||
@@ -53,10 +50,8 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
self.password = entry_data[CONF_PASSWORD]
|
||||
self._setup_complete = False
|
||||
self.envoy_firmware = ""
|
||||
self.interface = None
|
||||
self._cancel_token_refresh: CALLBACK_TYPE | None = None
|
||||
self._cancel_firmware_refresh: CALLBACK_TYPE | None = None
|
||||
self._cancel_mac_verification: CALLBACK_TYPE | None = None
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
@@ -126,66 +121,6 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
self.hass.config_entries.async_reload(self.config_entry.entry_id)
|
||||
)
|
||||
|
||||
def _schedule_mac_verification(
|
||||
self, delay: timedelta = MAC_VERIFICATION_DELAY
|
||||
) -> None:
|
||||
"""Schedule one time job to verify envoy mac address."""
|
||||
self.async_cancel_mac_verification()
|
||||
self._cancel_mac_verification = async_call_later(
|
||||
self.hass,
|
||||
delay,
|
||||
self._async_verify_mac,
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_verify_mac(self, now: datetime.datetime) -> None:
|
||||
"""Verify Envoy active interface mac address in background."""
|
||||
self.hass.async_create_background_task(
|
||||
self._async_fetch_and_compare_mac(), "{name} verify envoy mac address"
|
||||
)
|
||||
|
||||
async def _async_fetch_and_compare_mac(self) -> None:
|
||||
"""Get Envoy interface information and update mac in device connections."""
|
||||
interface: (
|
||||
EnvoyInterfaceInformation | None
|
||||
) = await self.envoy.interface_settings()
|
||||
if interface is None:
|
||||
_LOGGER.debug("%s: interface information returned None", self.name)
|
||||
return
|
||||
# remember interface information so diagnostics can include in report
|
||||
self.interface = interface
|
||||
|
||||
# Add to or update device registry connections as needed
|
||||
device_registry = dr.async_get(self.hass)
|
||||
envoy_device = device_registry.async_get_device(
|
||||
identifiers={
|
||||
(
|
||||
DOMAIN,
|
||||
self.envoy_serial_number,
|
||||
)
|
||||
}
|
||||
)
|
||||
if envoy_device is None:
|
||||
_LOGGER.error(
|
||||
"No envoy device found in device registry: %s %s",
|
||||
DOMAIN,
|
||||
self.envoy_serial_number,
|
||||
)
|
||||
return
|
||||
|
||||
connection = (dr.CONNECTION_NETWORK_MAC, interface.mac)
|
||||
if connection in envoy_device.connections:
|
||||
_LOGGER.debug(
|
||||
"connection verified as existing: %s in %s", connection, self.name
|
||||
)
|
||||
return
|
||||
|
||||
device_registry.async_update_device(
|
||||
device_id=envoy_device.id,
|
||||
new_connections={connection},
|
||||
)
|
||||
_LOGGER.debug("added connection: %s to %s", connection, self.name)
|
||||
|
||||
@callback
|
||||
def _async_mark_setup_complete(self) -> None:
|
||||
"""Mark setup as complete and setup firmware checks and token refresh if needed."""
|
||||
@@ -197,7 +132,6 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
FIRMWARE_REFRESH_INTERVAL,
|
||||
cancel_on_shutdown=True,
|
||||
)
|
||||
self._schedule_mac_verification()
|
||||
self.async_cancel_token_refresh()
|
||||
if not isinstance(self.envoy.auth, EnvoyTokenAuth):
|
||||
return
|
||||
@@ -318,10 +252,3 @@ class EnphaseUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
if self._cancel_firmware_refresh:
|
||||
self._cancel_firmware_refresh()
|
||||
self._cancel_firmware_refresh = None
|
||||
|
||||
@callback
|
||||
def async_cancel_mac_verification(self) -> None:
|
||||
"""Cancel mac verification."""
|
||||
if self._cancel_mac_verification:
|
||||
self._cancel_mac_verification()
|
||||
self._cancel_mac_verification = None
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import copy
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from attr import asdict
|
||||
@@ -64,7 +63,6 @@ async def _get_fixture_collection(envoy: Envoy, serial: str) -> dict[str, Any]:
|
||||
"/ivp/ensemble/generator",
|
||||
"/ivp/meters",
|
||||
"/ivp/meters/readings",
|
||||
"/home,",
|
||||
]
|
||||
|
||||
for end_point in end_points:
|
||||
@@ -148,25 +146,11 @@ async def async_get_config_entry_diagnostics(
|
||||
"inverters": envoy_data.inverters,
|
||||
"tariff": envoy_data.tariff,
|
||||
}
|
||||
# Add Envoy active interface information to report
|
||||
active_interface: dict[str, Any] = {}
|
||||
if coordinator.interface:
|
||||
active_interface = {
|
||||
"name": (interface := coordinator.interface).primary_interface,
|
||||
"interface type": interface.interface_type,
|
||||
"mac": interface.mac,
|
||||
"uses dhcp": interface.dhcp,
|
||||
"firmware build date": datetime.fromtimestamp(
|
||||
interface.software_build_epoch
|
||||
).strftime("%Y-%m-%d %H:%M:%S"),
|
||||
"envoy timezone": interface.timezone,
|
||||
}
|
||||
|
||||
envoy_properties: dict[str, Any] = {
|
||||
"envoy_firmware": envoy.firmware,
|
||||
"part_number": envoy.part_number,
|
||||
"envoy_model": envoy.envoy_model,
|
||||
"active interface": active_interface,
|
||||
"supported_features": [feature.name for feature in envoy.supported_features],
|
||||
"phase_mode": envoy.phase_mode,
|
||||
"phase_count": envoy.phase_count,
|
||||
|
||||
@@ -6,8 +6,8 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/enphase_envoy",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyenphase"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pyenphase==1.26.0"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyenphase==1.25.5"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_enphase-envoy._tcp.local."
|
||||
|
||||
@@ -128,7 +128,7 @@
|
||||
"storage_mode": {
|
||||
"name": "Storage mode",
|
||||
"state": {
|
||||
"self_consumption": "Self-consumption",
|
||||
"self_consumption": "Self consumption",
|
||||
"backup": "Full backup",
|
||||
"savings": "Savings mode"
|
||||
}
|
||||
@@ -393,7 +393,7 @@
|
||||
},
|
||||
"exceptions": {
|
||||
"unexpected_device": {
|
||||
"message": "Unexpected Envoy serial number found at {host}; expected {expected_serial}, found {actual_serial}"
|
||||
"message": "Unexpected Envoy serial-number found at {host}; expected {expected_serial}, found {actual_serial}"
|
||||
},
|
||||
"authentication_error": {
|
||||
"message": "Envoy authentication failure on {host}: {args}"
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/environment_canada",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["env_canada"],
|
||||
"requirements": ["env-canada==0.10.2"]
|
||||
"requirements": ["env-canada==0.10.1"]
|
||||
}
|
||||
|
||||
@@ -86,7 +86,7 @@
|
||||
"name": "AQHI"
|
||||
},
|
||||
"advisories": {
|
||||
"name": "Advisories"
|
||||
"name": "Advisory"
|
||||
},
|
||||
"endings": {
|
||||
"name": "Endings"
|
||||
|
||||
@@ -94,7 +94,6 @@ class EphEmberThermostat(ClimateEntity):
|
||||
self._ember = ember
|
||||
self._zone_name = zone_name(zone)
|
||||
self._zone = zone
|
||||
self._attr_unique_id = zone["zoneid"]
|
||||
|
||||
# hot water = true, is immersive device without target temperature control.
|
||||
self._hot_water = zone_is_hotwater(zone)
|
||||
|
||||
@@ -22,5 +22,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["eq3btsmart"],
|
||||
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.15.1"]
|
||||
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.13.1"]
|
||||
}
|
||||
|
||||
@@ -42,7 +42,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .entity import EsphomeAssistEntity, convert_api_error_ha_error
|
||||
from .entry_data import ESPHomeConfigEntry
|
||||
from .entry_data import ESPHomeConfigEntry, RuntimeEntryData
|
||||
from .enum_mapper import EsphomeEnumMapper
|
||||
from .ffmpeg_proxy import async_create_proxy_url
|
||||
|
||||
@@ -96,7 +96,7 @@ async def async_setup_entry(
|
||||
if entry_data.device_info.voice_assistant_feature_flags_compat(
|
||||
entry_data.api_version
|
||||
):
|
||||
async_add_entities([EsphomeAssistSatellite(entry)])
|
||||
async_add_entities([EsphomeAssistSatellite(entry, entry_data)])
|
||||
|
||||
|
||||
class EsphomeAssistSatellite(
|
||||
@@ -108,12 +108,17 @@ class EsphomeAssistSatellite(
|
||||
key="assist_satellite", translation_key="assist_satellite"
|
||||
)
|
||||
|
||||
def __init__(self, entry: ESPHomeConfigEntry) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
config_entry: ESPHomeConfigEntry,
|
||||
entry_data: RuntimeEntryData,
|
||||
) -> None:
|
||||
"""Initialize satellite."""
|
||||
super().__init__(entry.runtime_data)
|
||||
super().__init__(entry_data)
|
||||
|
||||
self.config_entry = entry
|
||||
self.cli = self._entry_data.client
|
||||
self.config_entry = config_entry
|
||||
self.entry_data = entry_data
|
||||
self.cli = self.entry_data.client
|
||||
|
||||
self._is_running: bool = True
|
||||
self._pipeline_task: asyncio.Task | None = None
|
||||
@@ -129,23 +134,23 @@ class EsphomeAssistSatellite(
|
||||
@property
|
||||
def pipeline_entity_id(self) -> str | None:
|
||||
"""Return the entity ID of the pipeline to use for the next conversation."""
|
||||
assert self._entry_data.device_info is not None
|
||||
assert self.entry_data.device_info is not None
|
||||
ent_reg = er.async_get(self.hass)
|
||||
return ent_reg.async_get_entity_id(
|
||||
Platform.SELECT,
|
||||
DOMAIN,
|
||||
f"{self._entry_data.device_info.mac_address}-pipeline",
|
||||
f"{self.entry_data.device_info.mac_address}-pipeline",
|
||||
)
|
||||
|
||||
@property
|
||||
def vad_sensitivity_entity_id(self) -> str | None:
|
||||
"""Return the entity ID of the VAD sensitivity to use for the next conversation."""
|
||||
assert self._entry_data.device_info is not None
|
||||
assert self.entry_data.device_info is not None
|
||||
ent_reg = er.async_get(self.hass)
|
||||
return ent_reg.async_get_entity_id(
|
||||
Platform.SELECT,
|
||||
DOMAIN,
|
||||
f"{self._entry_data.device_info.mac_address}-vad_sensitivity",
|
||||
f"{self.entry_data.device_info.mac_address}-vad_sensitivity",
|
||||
)
|
||||
|
||||
@callback
|
||||
@@ -191,16 +196,16 @@ class EsphomeAssistSatellite(
|
||||
_LOGGER.debug("Received satellite configuration: %s", self._satellite_config)
|
||||
|
||||
# Inform listeners that config has been updated
|
||||
self._entry_data.async_assist_satellite_config_updated(self._satellite_config)
|
||||
self.entry_data.async_assist_satellite_config_updated(self._satellite_config)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
|
||||
assert self._entry_data.device_info is not None
|
||||
assert self.entry_data.device_info is not None
|
||||
feature_flags = (
|
||||
self._entry_data.device_info.voice_assistant_feature_flags_compat(
|
||||
self._entry_data.api_version
|
||||
self.entry_data.device_info.voice_assistant_feature_flags_compat(
|
||||
self.entry_data.api_version
|
||||
)
|
||||
)
|
||||
if feature_flags & VoiceAssistantFeature.API_AUDIO:
|
||||
@@ -256,7 +261,7 @@ class EsphomeAssistSatellite(
|
||||
|
||||
# Update wake word select when config is updated
|
||||
self.async_on_remove(
|
||||
self._entry_data.async_register_assist_satellite_set_wake_word_callback(
|
||||
self.entry_data.async_register_assist_satellite_set_wake_word_callback(
|
||||
self.async_set_wake_word
|
||||
)
|
||||
)
|
||||
@@ -278,7 +283,7 @@ class EsphomeAssistSatellite(
|
||||
|
||||
data_to_send: dict[str, Any] = {}
|
||||
if event_type == VoiceAssistantEventType.VOICE_ASSISTANT_STT_START:
|
||||
self._entry_data.async_set_assist_pipeline_state(True)
|
||||
self.entry_data.async_set_assist_pipeline_state(True)
|
||||
elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_STT_END:
|
||||
assert event.data is not None
|
||||
data_to_send = {"text": event.data["stt_output"]["text"]}
|
||||
@@ -300,10 +305,10 @@ class EsphomeAssistSatellite(
|
||||
url = async_process_play_media_url(self.hass, path)
|
||||
data_to_send = {"url": url}
|
||||
|
||||
assert self._entry_data.device_info is not None
|
||||
assert self.entry_data.device_info is not None
|
||||
feature_flags = (
|
||||
self._entry_data.device_info.voice_assistant_feature_flags_compat(
|
||||
self._entry_data.api_version
|
||||
self.entry_data.device_info.voice_assistant_feature_flags_compat(
|
||||
self.entry_data.api_version
|
||||
)
|
||||
)
|
||||
if feature_flags & VoiceAssistantFeature.SPEAKER and (
|
||||
@@ -339,7 +344,7 @@ class EsphomeAssistSatellite(
|
||||
elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_RUN_END:
|
||||
if self._tts_streaming_task is None:
|
||||
# No TTS
|
||||
self._entry_data.async_set_assist_pipeline_state(False)
|
||||
self.entry_data.async_set_assist_pipeline_state(False)
|
||||
|
||||
self.cli.send_voice_assistant_event(event_type, data_to_send)
|
||||
|
||||
@@ -381,7 +386,7 @@ class EsphomeAssistSatellite(
|
||||
# Route media through the proxy
|
||||
format_to_use: MediaPlayerSupportedFormat | None = None
|
||||
for supported_format in chain(
|
||||
*self._entry_data.media_player_formats.values()
|
||||
*self.entry_data.media_player_formats.values()
|
||||
):
|
||||
if supported_format.purpose == MediaPlayerFormatPurpose.ANNOUNCEMENT:
|
||||
format_to_use = supported_format
|
||||
@@ -439,10 +444,10 @@ class EsphomeAssistSatellite(
|
||||
|
||||
# API or UDP output audio
|
||||
port: int = 0
|
||||
assert self._entry_data.device_info is not None
|
||||
assert self.entry_data.device_info is not None
|
||||
feature_flags = (
|
||||
self._entry_data.device_info.voice_assistant_feature_flags_compat(
|
||||
self._entry_data.api_version
|
||||
self.entry_data.device_info.voice_assistant_feature_flags_compat(
|
||||
self.entry_data.api_version
|
||||
)
|
||||
)
|
||||
if (feature_flags & VoiceAssistantFeature.SPEAKER) and not (
|
||||
@@ -543,7 +548,7 @@ class EsphomeAssistSatellite(
|
||||
|
||||
def _update_tts_format(self) -> None:
|
||||
"""Update the TTS format from the first media player."""
|
||||
for supported_format in chain(*self._entry_data.media_player_formats.values()):
|
||||
for supported_format in chain(*self.entry_data.media_player_formats.values()):
|
||||
# Find first announcement format
|
||||
if supported_format.purpose == MediaPlayerFormatPurpose.ANNOUNCEMENT:
|
||||
self._attr_tts_options = {
|
||||
@@ -629,7 +634,7 @@ class EsphomeAssistSatellite(
|
||||
|
||||
# State change
|
||||
self.tts_response_finished()
|
||||
self._entry_data.async_set_assist_pipeline_state(False)
|
||||
self.entry_data.async_set_assist_pipeline_state(False)
|
||||
|
||||
async def _wrap_audio_stream(self) -> AsyncIterable[bytes]:
|
||||
"""Yield audio chunks from the queue until None."""
|
||||
|
||||
@@ -2,22 +2,50 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import partial
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from aioesphomeapi import BinarySensorInfo, BinarySensorState, EntityInfo
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
BinarySensorEntityDescription,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util.enum import try_parse_enum
|
||||
|
||||
from .entity import EsphomeEntity, platform_async_setup_entry
|
||||
from .const import DOMAIN
|
||||
from .entity import EsphomeAssistEntity, EsphomeEntity, platform_async_setup_entry
|
||||
from .entry_data import ESPHomeConfigEntry
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ESPHomeConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up ESPHome binary sensors based on a config entry."""
|
||||
await platform_async_setup_entry(
|
||||
hass,
|
||||
entry,
|
||||
async_add_entities,
|
||||
info_type=BinarySensorInfo,
|
||||
entity_type=EsphomeBinarySensor,
|
||||
state_type=BinarySensorState,
|
||||
)
|
||||
|
||||
entry_data = entry.runtime_data
|
||||
assert entry_data.device_info is not None
|
||||
if entry_data.device_info.voice_assistant_feature_flags_compat(
|
||||
entry_data.api_version
|
||||
):
|
||||
async_add_entities([EsphomeAssistInProgressBinarySensor(entry_data)])
|
||||
|
||||
|
||||
class EsphomeBinarySensor(
|
||||
EsphomeEntity[BinarySensorInfo, BinarySensorState], BinarySensorEntity
|
||||
):
|
||||
@@ -48,9 +76,50 @@ class EsphomeBinarySensor(
|
||||
return self._static_info.is_status_binary_sensor or super().available
|
||||
|
||||
|
||||
async_setup_entry = partial(
|
||||
platform_async_setup_entry,
|
||||
info_type=BinarySensorInfo,
|
||||
entity_type=EsphomeBinarySensor,
|
||||
state_type=BinarySensorState,
|
||||
)
|
||||
class EsphomeAssistInProgressBinarySensor(EsphomeAssistEntity, BinarySensorEntity):
|
||||
"""A binary sensor implementation for ESPHome for use with assist_pipeline."""
|
||||
|
||||
entity_description = BinarySensorEntityDescription(
|
||||
entity_registry_enabled_default=False,
|
||||
key="assist_in_progress",
|
||||
translation_key="assist_in_progress",
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Create issue."""
|
||||
await super().async_added_to_hass()
|
||||
if TYPE_CHECKING:
|
||||
assert self.registry_entry is not None
|
||||
ir.async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
f"assist_in_progress_deprecated_{self.registry_entry.id}",
|
||||
breaks_in_ha_version="2025.4",
|
||||
data={
|
||||
"entity_id": self.entity_id,
|
||||
"entity_uuid": self.registry_entry.id,
|
||||
"integration_name": "ESPHome",
|
||||
},
|
||||
is_fixable=True,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="assist_in_progress_deprecated",
|
||||
translation_placeholders={
|
||||
"integration_name": "ESPHome",
|
||||
},
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Remove issue."""
|
||||
await super().async_will_remove_from_hass()
|
||||
if TYPE_CHECKING:
|
||||
assert self.registry_entry is not None
|
||||
ir.async_delete_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
f"assist_in_progress_deprecated_{self.registry_entry.id}",
|
||||
)
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return true if the binary sensor is on."""
|
||||
return self._entry_data.assist_pipeline_state
|
||||
|
||||
@@ -22,7 +22,6 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components import zeroconf
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_IGNORE,
|
||||
SOURCE_REAUTH,
|
||||
SOURCE_RECONFIGURE,
|
||||
ConfigEntry,
|
||||
@@ -32,7 +31,6 @@ from homeassistant.config_entries import (
|
||||
)
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.data_entry_flow import AbortFlow
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo
|
||||
from homeassistant.helpers.service_info.hassio import HassioServiceInfo
|
||||
@@ -179,7 +177,7 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by a reconfig request."""
|
||||
self._reconfig_entry = self._get_reconfigure_entry()
|
||||
@@ -304,15 +302,7 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
):
|
||||
return
|
||||
if entry.source == SOURCE_IGNORE:
|
||||
# Don't call _fetch_device_info() for ignored entries
|
||||
raise AbortFlow("already_configured")
|
||||
configured_host: str | None = entry.data.get(CONF_HOST)
|
||||
configured_port: int | None = entry.data.get(CONF_PORT)
|
||||
if configured_host == host and configured_port == port:
|
||||
# Don't probe to verify the mac is correct since
|
||||
# the host and port matches.
|
||||
raise AbortFlow("already_configured")
|
||||
configured_psk: str | None = entry.data.get(CONF_NOISE_PSK)
|
||||
await self._fetch_device_info(host, port or configured_port, configured_psk)
|
||||
updates: dict[str, Any] = {}
|
||||
@@ -333,9 +323,7 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
):
|
||||
return
|
||||
assert conflict_entry.unique_id is not None
|
||||
if self.source == SOURCE_RECONFIGURE:
|
||||
error = "reconfigure_already_configured"
|
||||
elif updates:
|
||||
if updates:
|
||||
error = "already_configured_updates"
|
||||
else:
|
||||
error = "already_configured_detailed"
|
||||
|
||||
@@ -5,38 +5,43 @@ from __future__ import annotations
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
import aiohttp
|
||||
from awesomeversion import AwesomeVersion
|
||||
from esphome_dashboard_api import ConfiguredDevice, ESPHomeDashboardAPI
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
MIN_VERSION_SUPPORTS_UPDATE = AwesomeVersion("2023.1.0")
|
||||
REFRESH_INTERVAL = timedelta(minutes=5)
|
||||
|
||||
|
||||
class ESPHomeDashboardCoordinator(DataUpdateCoordinator[dict[str, ConfiguredDevice]]):
|
||||
"""Class to interact with the ESPHome dashboard."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, addon_slug: str, url: str) -> None:
|
||||
"""Initialize the dashboard coordinator."""
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
addon_slug: str,
|
||||
url: str,
|
||||
session: aiohttp.ClientSession,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=None,
|
||||
name="ESPHome Dashboard",
|
||||
update_interval=REFRESH_INTERVAL,
|
||||
update_interval=timedelta(minutes=5),
|
||||
always_update=False,
|
||||
)
|
||||
self.addon_slug = addon_slug
|
||||
self.url = url
|
||||
self.api = ESPHomeDashboardAPI(url, async_get_clientsession(hass))
|
||||
self.api = ESPHomeDashboardAPI(url, session)
|
||||
self.supports_update: bool | None = None
|
||||
|
||||
async def _async_update_data(self) -> dict[str, ConfiguredDevice]:
|
||||
async def _async_update_data(self) -> dict:
|
||||
"""Fetch device data."""
|
||||
devices = await self.api.get_devices()
|
||||
configured_devices = devices["configured"]
|
||||
|
||||
@@ -9,6 +9,7 @@ from typing import Any
|
||||
from homeassistant.config_entries import SOURCE_REAUTH
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import CALLBACK_TYPE, Event, HomeAssistant, callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
from homeassistant.helpers.singleton import singleton
|
||||
from homeassistant.helpers.storage import Store
|
||||
@@ -103,7 +104,9 @@ class ESPHomeDashboardManager:
|
||||
self._cancel_shutdown = None
|
||||
self._current_dashboard = None
|
||||
|
||||
dashboard = ESPHomeDashboardCoordinator(hass, addon_slug, url)
|
||||
dashboard = ESPHomeDashboardCoordinator(
|
||||
hass, addon_slug, url, async_get_clientsession(hass)
|
||||
)
|
||||
await dashboard.async_request_refresh()
|
||||
|
||||
self._current_dashboard = dashboard
|
||||
|
||||
@@ -10,18 +10,10 @@ from homeassistant.const import CONF_PASSWORD
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import CONF_NOISE_PSK
|
||||
from .const import CONF_DEVICE_NAME
|
||||
from .dashboard import async_get_dashboard
|
||||
from .entry_data import ESPHomeConfigEntry
|
||||
|
||||
REDACT_KEYS = {CONF_NOISE_PSK, CONF_PASSWORD, "mac_address", "bluetooth_mac_address"}
|
||||
CONFIGURED_DEVICE_KEYS = (
|
||||
"configuration",
|
||||
"current_version",
|
||||
"deployed_version",
|
||||
"loaded_integrations",
|
||||
"target_platform",
|
||||
)
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
@@ -34,9 +26,6 @@ async def async_get_config_entry_diagnostics(
|
||||
|
||||
entry_data = config_entry.runtime_data
|
||||
device_info = entry_data.device_info
|
||||
device_name: str | None = (
|
||||
device_info.name if device_info else config_entry.data.get(CONF_DEVICE_NAME)
|
||||
)
|
||||
|
||||
if (storage_data := await entry_data.store.async_load()) is not None:
|
||||
diag["storage_data"] = storage_data
|
||||
@@ -56,19 +45,7 @@ async def async_get_config_entry_diagnostics(
|
||||
"scanner": await scanner.async_diagnostics(),
|
||||
}
|
||||
|
||||
diag_dashboard: dict[str, Any] = {"configured": False}
|
||||
diag["dashboard"] = diag_dashboard
|
||||
if dashboard := async_get_dashboard(hass):
|
||||
diag_dashboard["configured"] = True
|
||||
diag_dashboard["supports_update"] = dashboard.supports_update
|
||||
diag_dashboard["last_update_success"] = dashboard.last_update_success
|
||||
diag_dashboard["last_exception"] = dashboard.last_exception
|
||||
diag_dashboard["addon"] = dashboard.addon_slug
|
||||
if device_name and dashboard.data:
|
||||
diag_dashboard["has_matching_name"] = device_name in dashboard.data
|
||||
if data := dashboard.data.get(device_name):
|
||||
diag_dashboard["device"] = {
|
||||
key: data.get(key) for key in CONFIGURED_DEVICE_KEYS
|
||||
}
|
||||
diag["dashboard"] = dashboard.addon_slug
|
||||
|
||||
return async_redact_data(diag, REDACT_KEYS)
|
||||
|
||||
@@ -109,7 +109,7 @@ def _mired_to_kelvin(mired_temperature: float) -> int:
|
||||
def _color_mode_to_ha(mode: int) -> str:
|
||||
"""Convert an esphome color mode to a HA color mode constant.
|
||||
|
||||
Choose the color mode that best matches the feature-set.
|
||||
Chose the color mode that best matches the feature-set.
|
||||
"""
|
||||
candidates = []
|
||||
for ha_mode, cap_lists in _COLOR_MODE_MAPPING.items():
|
||||
|
||||
@@ -49,7 +49,6 @@ from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
issue_registry as ir,
|
||||
template,
|
||||
)
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
@@ -655,30 +654,6 @@ class ESPHomeManager:
|
||||
):
|
||||
self._async_subscribe_logs(new_log_level)
|
||||
|
||||
@callback
|
||||
def _async_cleanup(self) -> None:
|
||||
"""Cleanup stale issues and entities."""
|
||||
assert self.entry_data.device_info is not None
|
||||
ent_reg = er.async_get(self.hass)
|
||||
# Cleanup stale assist_in_progress entity and issue,
|
||||
# Remove this after 2026.4
|
||||
if not (
|
||||
stale_entry_entity_id := ent_reg.async_get_entity_id(
|
||||
DOMAIN,
|
||||
Platform.BINARY_SENSOR,
|
||||
f"{self.entry_data.device_info.mac_address}-assist_in_progress",
|
||||
)
|
||||
):
|
||||
return
|
||||
stale_entry = ent_reg.async_get(stale_entry_entity_id)
|
||||
assert stale_entry is not None
|
||||
ent_reg.async_remove(stale_entry_entity_id)
|
||||
issue_reg = ir.async_get(self.hass)
|
||||
if issue := issue_reg.async_get_issue(
|
||||
DOMAIN, f"assist_in_progress_deprecated_{stale_entry.id}"
|
||||
):
|
||||
issue_reg.async_delete(DOMAIN, issue.issue_id)
|
||||
|
||||
async def async_start(self) -> None:
|
||||
"""Start the esphome connection manager."""
|
||||
hass = self.hass
|
||||
@@ -721,7 +696,6 @@ class ESPHomeManager:
|
||||
_setup_services(hass, entry_data, services)
|
||||
|
||||
if (device_info := entry_data.device_info) is not None:
|
||||
self._async_cleanup()
|
||||
if device_info.name:
|
||||
reconnect_logic.name = device_info.name
|
||||
if (
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user