Compare commits

..

1 Commits

Author SHA1 Message Date
Erik
01bfc09072 Remove backup helper 2025-04-24 12:20:59 +02:00
3514 changed files with 51182 additions and 171826 deletions

View File

@@ -32,7 +32,7 @@ jobs:
fetch-depth: 0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.6.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
@@ -116,7 +116,7 @@ jobs:
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
if: needs.init.outputs.channel == 'dev'
uses: actions/setup-python@v5.6.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
@@ -175,7 +175,7 @@ jobs:
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
- name: Download translations
uses: actions/download-artifact@v4.3.0
uses: actions/download-artifact@v4.2.1
with:
name: translations
@@ -457,12 +457,12 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.6.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Download translations
uses: actions/download-artifact@v4.3.0
uses: actions/download-artifact@v4.2.1
with:
name: translations
@@ -509,7 +509,7 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker image
uses: docker/build-push-action@1dc73863535b631f98b2378be8619f83b136f4a0 # v6.17.0
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
with:
context: . # So action will not pull the repository again
file: ./script/hassfest/docker/Dockerfile
@@ -522,7 +522,7 @@ jobs:
- name: Push Docker image
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
id: push
uses: docker/build-push-action@1dc73863535b631f98b2378be8619f83b136f4a0 # v6.17.0
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
with:
context: . # So action will not pull the repository again
file: ./script/hassfest/docker/Dockerfile
@@ -531,7 +531,7 @@ jobs:
- name: Generate artifact attestation
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
uses: actions/attest-build-provenance@db473fddc028af60658334401dc6fa3ffd8669fd # v2.3.0
uses: actions/attest-build-provenance@c074443f1aee8d4aeeae555aebba3282517141b2 # v2.2.3
with:
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
subject-digest: ${{ steps.push.outputs.digest }}

View File

@@ -37,10 +37,10 @@ on:
type: boolean
env:
CACHE_VERSION: 2
CACHE_VERSION: 12
UV_CACHE_VERSION: 1
MYPY_CACHE_VERSION: 1
HA_SHORT_VERSION: "2025.6"
MYPY_CACHE_VERSION: 9
HA_SHORT_VERSION: "2025.5"
DEFAULT_PYTHON: "3.13"
ALL_PYTHON_VERSIONS: "['3.13']"
# 10.3 is the oldest supported version
@@ -249,7 +249,7 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.6.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true
@@ -259,7 +259,7 @@ jobs:
with:
path: venv
key: >-
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-venv-${{
${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{
needs.info.outputs.pre-commit_cache_key }}
- name: Create Python virtual environment
if: steps.cache-venv.outputs.cache-hit != 'true'
@@ -276,7 +276,7 @@ jobs:
path: ${{ env.PRE_COMMIT_CACHE }}
lookup-only: true
key: >-
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.pre-commit_cache_key }}
- name: Install pre-commit dependencies
if: steps.cache-precommit.outputs.cache-hit != 'true'
@@ -294,7 +294,7 @@ jobs:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.6.0
uses: actions/setup-python@v5.5.0
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
@@ -306,7 +306,7 @@ jobs:
path: venv
fail-on-cache-miss: true
key: >-
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-venv-${{
${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
@@ -315,7 +315,7 @@ jobs:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
key: >-
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.pre-commit_cache_key }}
- name: Run ruff-format
run: |
@@ -334,7 +334,7 @@ jobs:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.6.0
uses: actions/setup-python@v5.5.0
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
@@ -346,7 +346,7 @@ jobs:
path: venv
fail-on-cache-miss: true
key: >-
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-venv-${{
${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
@@ -355,7 +355,7 @@ jobs:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
key: >-
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.pre-commit_cache_key }}
- name: Run ruff
run: |
@@ -374,7 +374,7 @@ jobs:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.6.0
uses: actions/setup-python@v5.5.0
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
@@ -386,7 +386,7 @@ jobs:
path: venv
fail-on-cache-miss: true
key: >-
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-venv-${{
${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
@@ -395,7 +395,7 @@ jobs:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
key: >-
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.pre-commit_cache_key }}
- name: Register yamllint problem matcher
@@ -484,7 +484,7 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.6.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ matrix.python-version }}
check-latest: true
@@ -501,7 +501,7 @@ jobs:
with:
path: venv
key: >-
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.python_cache_key }}
- name: Restore uv wheel cache
if: steps.cache-venv.outputs.cache-hit != 'true'
@@ -509,10 +509,10 @@ jobs:
with:
path: ${{ env.UV_CACHE_DIR }}
key: >-
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
steps.generate-uv-key.outputs.key }}
restore-keys: |
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-uv-${{
${{ runner.os }}-${{ steps.python.outputs.python-version }}-uv-${{
env.UV_CACHE_VERSION }}-${{ steps.generate-uv-key.outputs.version }}-${{
env.HA_SHORT_VERSION }}-
- name: Install additional OS dependencies
@@ -587,7 +587,7 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.6.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true
@@ -598,7 +598,7 @@ jobs:
path: venv
fail-on-cache-miss: true
key: >-
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.python_cache_key }}
- name: Run hassfest
run: |
@@ -620,7 +620,7 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.6.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true
@@ -631,7 +631,7 @@ jobs:
path: venv
fail-on-cache-miss: true
key: >-
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.python_cache_key }}
- name: Run gen_requirements_all.py
run: |
@@ -653,7 +653,7 @@ jobs:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Dependency review
uses: actions/dependency-review-action@v4.7.1
uses: actions/dependency-review-action@v4.6.0
with:
license-check: false # We use our own license audit checks
@@ -677,7 +677,7 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.6.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ matrix.python-version }}
check-latest: true
@@ -688,7 +688,7 @@ jobs:
path: venv
fail-on-cache-miss: true
key: >-
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.python_cache_key }}
- name: Extract license data
run: |
@@ -720,7 +720,7 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.6.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true
@@ -731,7 +731,7 @@ jobs:
path: venv
fail-on-cache-miss: true
key: >-
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.python_cache_key }}
- name: Register pylint problem matcher
run: |
@@ -767,7 +767,7 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.6.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true
@@ -778,7 +778,7 @@ jobs:
path: venv
fail-on-cache-miss: true
key: >-
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.python_cache_key }}
- name: Register pylint problem matcher
run: |
@@ -812,7 +812,7 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.6.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true
@@ -830,17 +830,17 @@ jobs:
path: venv
fail-on-cache-miss: true
key: >-
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.python_cache_key }}
- name: Restore mypy cache
uses: actions/cache@v4.2.3
with:
path: .mypy_cache
key: >-
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
steps.generate-mypy-key.outputs.key }}
restore-keys: |
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-mypy-${{
${{ runner.os }}-${{ steps.python.outputs.python-version }}-mypy-${{
env.MYPY_CACHE_VERSION }}-${{ steps.generate-mypy-key.outputs.version }}-${{
env.HA_SHORT_VERSION }}-
- name: Register mypy problem matcher
@@ -889,7 +889,7 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.6.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true
@@ -900,7 +900,7 @@ jobs:
path: venv
fail-on-cache-miss: true
key: >-
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.python_cache_key }}
- name: Run split_tests.py
run: |
@@ -944,13 +944,12 @@ jobs:
bluez \
ffmpeg \
libturbojpeg \
libgammu-dev \
libxml2-utils
libgammu-dev
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.6.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ matrix.python-version }}
check-latest: true
@@ -960,8 +959,7 @@ jobs:
with:
path: venv
fail-on-cache-miss: true
key: >-
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.python_cache_key }}
- name: Register Python problem matcher
run: |
@@ -970,7 +968,7 @@ jobs:
run: |
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
- name: Download pytest_buckets
uses: actions/download-artifact@v4.3.0
uses: actions/download-artifact@v4.2.1
with:
name: pytest_buckets
- name: Compile English translations
@@ -1021,12 +1019,6 @@ jobs:
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
path: coverage.xml
overwrite: true
- name: Beautify test results
# For easier identification of parsing errors
if: needs.info.outputs.skip_coverage != 'true'
run: |
xmllint --format "junit.xml" > "junit.xml-tmp"
mv "junit.xml-tmp" "junit.xml"
- name: Upload test results artifact
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
uses: actions/upload-artifact@v4.6.2
@@ -1077,13 +1069,12 @@ jobs:
bluez \
ffmpeg \
libturbojpeg \
libmariadb-dev-compat \
libxml2-utils
libmariadb-dev-compat
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.6.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ matrix.python-version }}
check-latest: true
@@ -1093,8 +1084,7 @@ jobs:
with:
path: venv
fail-on-cache-miss: true
key: >-
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.python_cache_key }}
- name: Register Python problem matcher
run: |
@@ -1162,12 +1152,6 @@ jobs:
steps.pytest-partial.outputs.mariadb }}
path: coverage.xml
overwrite: true
- name: Beautify test results
# For easier identification of parsing errors
if: needs.info.outputs.skip_coverage != 'true'
run: |
xmllint --format "junit.xml" > "junit.xml-tmp"
mv "junit.xml-tmp" "junit.xml"
- name: Upload test results artifact
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
uses: actions/upload-artifact@v4.6.2
@@ -1216,8 +1200,7 @@ jobs:
sudo apt-get -y install \
bluez \
ffmpeg \
libturbojpeg \
libxml2-utils
libturbojpeg
sudo /usr/share/postgresql-common/pgdg/apt.postgresql.org.sh -y
sudo apt-get -y install \
postgresql-server-dev-14
@@ -1225,7 +1208,7 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.6.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ matrix.python-version }}
check-latest: true
@@ -1235,8 +1218,7 @@ jobs:
with:
path: venv
fail-on-cache-miss: true
key: >-
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.python_cache_key }}
- name: Register Python problem matcher
run: |
@@ -1305,12 +1287,6 @@ jobs:
steps.pytest-partial.outputs.postgresql }}
path: coverage.xml
overwrite: true
- name: Beautify test results
# For easier identification of parsing errors
if: needs.info.outputs.skip_coverage != 'true'
run: |
xmllint --format "junit.xml" > "junit.xml-tmp"
mv "junit.xml-tmp" "junit.xml"
- name: Upload test results artifact
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
uses: actions/upload-artifact@v4.6.2
@@ -1336,12 +1312,12 @@ jobs:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Download all coverage artifacts
uses: actions/download-artifact@v4.3.0
uses: actions/download-artifact@v4.2.1
with:
pattern: coverage-*
- name: Upload coverage to Codecov
if: needs.info.outputs.test_full_suite == 'true'
uses: codecov/codecov-action@v5.4.3
uses: codecov/codecov-action@v5.4.2
with:
fail_ci_if_error: true
flags: full-suite
@@ -1378,13 +1354,12 @@ jobs:
bluez \
ffmpeg \
libturbojpeg \
libgammu-dev \
libxml2-utils
libgammu-dev
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.6.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ matrix.python-version }}
check-latest: true
@@ -1394,8 +1369,7 @@ jobs:
with:
path: venv
fail-on-cache-miss: true
key: >-
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.python_cache_key }}
- name: Register Python problem matcher
run: |
@@ -1458,12 +1432,6 @@ jobs:
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
path: coverage.xml
overwrite: true
- name: Beautify test results
# For easier identification of parsing errors
if: needs.info.outputs.skip_coverage != 'true'
run: |
xmllint --format "junit.xml" > "junit.xml-tmp"
mv "junit.xml-tmp" "junit.xml"
- name: Upload test results artifact
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
uses: actions/upload-artifact@v4.6.2
@@ -1486,12 +1454,12 @@ jobs:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
- name: Download all coverage artifacts
uses: actions/download-artifact@v4.3.0
uses: actions/download-artifact@v4.2.1
with:
pattern: coverage-*
- name: Upload coverage to Codecov
if: needs.info.outputs.test_full_suite == 'false'
uses: codecov/codecov-action@v5.4.3
uses: codecov/codecov-action@v5.4.2
with:
fail_ci_if_error: true
token: ${{ secrets.CODECOV_TOKEN }}
@@ -1511,7 +1479,7 @@ jobs:
timeout-minutes: 10
steps:
- name: Download all coverage artifacts
uses: actions/download-artifact@v4.3.0
uses: actions/download-artifact@v4.2.1
with:
pattern: test-results-*
- name: Upload test results to Codecov

View File

@@ -24,11 +24,11 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Initialize CodeQL
uses: github/codeql-action/init@v3.28.18
uses: github/codeql-action/init@v3.28.15
with:
languages: python
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3.28.18
uses: github/codeql-action/analyze@v3.28.15
with:
category: "/language:python"

View File

@@ -22,7 +22,7 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.6.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}

View File

@@ -36,7 +36,7 @@ jobs:
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.6.0
uses: actions/setup-python@v5.5.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true
@@ -138,17 +138,17 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Download env_file
uses: actions/download-artifact@v4.3.0
uses: actions/download-artifact@v4.2.1
with:
name: env_file
- name: Download build_constraints
uses: actions/download-artifact@v4.3.0
uses: actions/download-artifact@v4.2.1
with:
name: build_constraints
- name: Download requirements_diff
uses: actions/download-artifact@v4.3.0
uses: actions/download-artifact@v4.2.1
with:
name: requirements_diff
@@ -187,22 +187,22 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Download env_file
uses: actions/download-artifact@v4.3.0
uses: actions/download-artifact@v4.2.1
with:
name: env_file
- name: Download build_constraints
uses: actions/download-artifact@v4.3.0
uses: actions/download-artifact@v4.2.1
with:
name: build_constraints
- name: Download requirements_diff
uses: actions/download-artifact@v4.3.0
uses: actions/download-artifact@v4.2.1
with:
name: requirements_diff
- name: Download requirements_all_wheels
uses: actions/download-artifact@v4.3.0
uses: actions/download-artifact@v4.2.1
with:
name: requirements_all_wheels

View File

@@ -66,7 +66,6 @@ homeassistant.components.alarm_control_panel.*
homeassistant.components.alert.*
homeassistant.components.alexa.*
homeassistant.components.alpha_vantage.*
homeassistant.components.amazon_devices.*
homeassistant.components.amazon_polly.*
homeassistant.components.amberelectric.*
homeassistant.components.ambient_network.*
@@ -271,7 +270,6 @@ homeassistant.components.image_processing.*
homeassistant.components.image_upload.*
homeassistant.components.imap.*
homeassistant.components.imgw_pib.*
homeassistant.components.immich.*
homeassistant.components.incomfort.*
homeassistant.components.input_button.*
homeassistant.components.input_select.*
@@ -334,7 +332,6 @@ homeassistant.components.media_player.*
homeassistant.components.media_source.*
homeassistant.components.met_eireann.*
homeassistant.components.metoffice.*
homeassistant.components.miele.*
homeassistant.components.mikrotik.*
homeassistant.components.min_max.*
homeassistant.components.minecraft_server.*
@@ -387,10 +384,8 @@ homeassistant.components.overseerr.*
homeassistant.components.p1_monitor.*
homeassistant.components.pandora.*
homeassistant.components.panel_custom.*
homeassistant.components.paperless_ngx.*
homeassistant.components.peblar.*
homeassistant.components.peco.*
homeassistant.components.pegel_online.*
homeassistant.components.persistent_notification.*
homeassistant.components.person.*
homeassistant.components.pi_hole.*
@@ -437,6 +432,7 @@ homeassistant.components.roku.*
homeassistant.components.romy.*
homeassistant.components.rpi_power.*
homeassistant.components.rss_feed_template.*
homeassistant.components.rtsp_to_webrtc.*
homeassistant.components.russound_rio.*
homeassistant.components.ruuvi_gateway.*
homeassistant.components.ruuvitag_ble.*
@@ -466,7 +462,6 @@ homeassistant.components.slack.*
homeassistant.components.sleepiq.*
homeassistant.components.smhi.*
homeassistant.components.smlight.*
homeassistant.components.smtp.*
homeassistant.components.snooz.*
homeassistant.components.solarlog.*
homeassistant.components.sonarr.*

61
CODEOWNERS generated
View File

@@ -46,8 +46,8 @@ build.json @home-assistant/supervisor
/tests/components/accuweather/ @bieniu
/homeassistant/components/acmeda/ @atmurray
/tests/components/acmeda/ @atmurray
/homeassistant/components/adax/ @danielhiversen @lazytarget
/tests/components/adax/ @danielhiversen @lazytarget
/homeassistant/components/adax/ @danielhiversen
/tests/components/adax/ @danielhiversen
/homeassistant/components/adguard/ @frenck
/tests/components/adguard/ @frenck
/homeassistant/components/ads/ @mrpasztoradam
@@ -89,8 +89,6 @@ build.json @home-assistant/supervisor
/tests/components/alert/ @home-assistant/core @frenck
/homeassistant/components/alexa/ @home-assistant/cloud @ochlocracy @jbouwh
/tests/components/alexa/ @home-assistant/cloud @ochlocracy @jbouwh
/homeassistant/components/amazon_devices/ @chemelli74
/tests/components/amazon_devices/ @chemelli74
/homeassistant/components/amazon_polly/ @jschlyter
/homeassistant/components/amberelectric/ @madpilot
/tests/components/amberelectric/ @madpilot
@@ -173,8 +171,6 @@ build.json @home-assistant/supervisor
/homeassistant/components/avea/ @pattyland
/homeassistant/components/awair/ @ahayworth @danielsjf
/tests/components/awair/ @ahayworth @danielsjf
/homeassistant/components/aws_s3/ @tomasbedrich
/tests/components/aws_s3/ @tomasbedrich
/homeassistant/components/axis/ @Kane610
/tests/components/axis/ @Kane610
/homeassistant/components/azure_data_explorer/ @kaareseras
@@ -204,8 +200,8 @@ build.json @home-assistant/supervisor
/tests/components/blebox/ @bbx-a @swistakm
/homeassistant/components/blink/ @fronzbot @mkmer
/tests/components/blink/ @fronzbot @mkmer
/homeassistant/components/blue_current/ @gleeuwen @NickKoepr @jtodorova23
/tests/components/blue_current/ @gleeuwen @NickKoepr @jtodorova23
/homeassistant/components/blue_current/ @Floris272 @gleeuwen
/tests/components/blue_current/ @Floris272 @gleeuwen
/homeassistant/components/bluemaestro/ @bdraco
/tests/components/bluemaestro/ @bdraco
/homeassistant/components/blueprint/ @home-assistant/core
@@ -305,7 +301,6 @@ build.json @home-assistant/supervisor
/homeassistant/components/crownstone/ @Crownstone @RicArch97
/tests/components/crownstone/ @Crownstone @RicArch97
/homeassistant/components/cups/ @fabaff
/tests/components/cups/ @fabaff
/homeassistant/components/daikin/ @fredrike
/tests/components/daikin/ @fredrike
/homeassistant/components/date/ @home-assistant/core
@@ -458,8 +453,8 @@ build.json @home-assistant/supervisor
/tests/components/evil_genius_labs/ @balloob
/homeassistant/components/evohome/ @zxdavb
/tests/components/evohome/ @zxdavb
/homeassistant/components/ezviz/ @RenierM26
/tests/components/ezviz/ @RenierM26
/homeassistant/components/ezviz/ @RenierM26 @baqs
/tests/components/ezviz/ @RenierM26 @baqs
/homeassistant/components/faa_delays/ @ntilley905
/tests/components/faa_delays/ @ntilley905
/homeassistant/components/fan/ @home-assistant/core
@@ -713,8 +708,6 @@ build.json @home-assistant/supervisor
/tests/components/imeon_inverter/ @Imeon-Energy
/homeassistant/components/imgw_pib/ @bieniu
/tests/components/imgw_pib/ @bieniu
/homeassistant/components/immich/ @mib1185
/tests/components/immich/ @mib1185
/homeassistant/components/improv_ble/ @emontnemery
/tests/components/improv_ble/ @emontnemery
/homeassistant/components/incomfort/ @jbouwh
@@ -1088,6 +1081,8 @@ build.json @home-assistant/supervisor
/homeassistant/components/ombi/ @larssont
/homeassistant/components/onboarding/ @home-assistant/core
/tests/components/onboarding/ @home-assistant/core
/homeassistant/components/oncue/ @bdraco @peterager
/tests/components/oncue/ @bdraco @peterager
/homeassistant/components/ondilo_ico/ @JeromeHXP
/tests/components/ondilo_ico/ @JeromeHXP
/homeassistant/components/onedrive/ @zweckj
@@ -1116,8 +1111,8 @@ build.json @home-assistant/supervisor
/tests/components/opentherm_gw/ @mvn23
/homeassistant/components/openuv/ @bachya
/tests/components/openuv/ @bachya
/homeassistant/components/openweathermap/ @fabaff @freekode @nzapponi @wittypluck
/tests/components/openweathermap/ @fabaff @freekode @nzapponi @wittypluck
/homeassistant/components/openweathermap/ @fabaff @freekode @nzapponi
/tests/components/openweathermap/ @fabaff @freekode @nzapponi
/homeassistant/components/opnsense/ @mtreinish
/tests/components/opnsense/ @mtreinish
/homeassistant/components/opower/ @tronikos
@@ -1143,8 +1138,6 @@ build.json @home-assistant/supervisor
/tests/components/palazzetti/ @dotvav
/homeassistant/components/panel_custom/ @home-assistant/frontend
/tests/components/panel_custom/ @home-assistant/frontend
/homeassistant/components/paperless_ngx/ @fvgarrel
/tests/components/paperless_ngx/ @fvgarrel
/homeassistant/components/peblar/ @frenck
/tests/components/peblar/ @frenck
/homeassistant/components/peco/ @IceBotYT
@@ -1183,8 +1176,6 @@ build.json @home-assistant/supervisor
/tests/components/powerwall/ @bdraco @jrester @daniel-simpson
/homeassistant/components/private_ble_device/ @Jc2k
/tests/components/private_ble_device/ @Jc2k
/homeassistant/components/probe_plus/ @pantherale0
/tests/components/probe_plus/ @pantherale0
/homeassistant/components/profiler/ @bdraco
/tests/components/profiler/ @bdraco
/homeassistant/components/progettihwsw/ @ardaseremet
@@ -1231,7 +1222,6 @@ build.json @home-assistant/supervisor
/homeassistant/components/qnap_qsw/ @Noltari
/tests/components/qnap_qsw/ @Noltari
/homeassistant/components/quantum_gateway/ @cisasteelersfan
/tests/components/quantum_gateway/ @cisasteelersfan
/homeassistant/components/qvr_pro/ @oblogic7
/homeassistant/components/qwikswitch/ @kellerza
/tests/components/qwikswitch/ @kellerza
@@ -1270,8 +1260,6 @@ build.json @home-assistant/supervisor
/tests/components/recovery_mode/ @home-assistant/core
/homeassistant/components/refoss/ @ashionky
/tests/components/refoss/ @ashionky
/homeassistant/components/rehlko/ @bdraco @peterager
/tests/components/rehlko/ @bdraco @peterager
/homeassistant/components/remote/ @home-assistant/core
/tests/components/remote/ @home-assistant/core
/homeassistant/components/remote_calendar/ @Thomas55555
@@ -1317,6 +1305,8 @@ build.json @home-assistant/supervisor
/tests/components/rpi_power/ @shenxn @swetoast
/homeassistant/components/rss_feed_template/ @home-assistant/core
/tests/components/rss_feed_template/ @home-assistant/core
/homeassistant/components/rtsp_to_webrtc/ @allenporter
/tests/components/rtsp_to_webrtc/ @allenporter
/homeassistant/components/ruckus_unleashed/ @lanrat @ms264556 @gabe565
/tests/components/ruckus_unleashed/ @lanrat @ms264556 @gabe565
/homeassistant/components/russound_rio/ @noahhusby
@@ -1420,8 +1410,6 @@ build.json @home-assistant/supervisor
/tests/components/sma/ @kellerza @rklomp @erwindouna
/homeassistant/components/smappee/ @bsmappee
/tests/components/smappee/ @bsmappee
/homeassistant/components/smarla/ @explicatis @rlint-explicatis
/tests/components/smarla/ @explicatis @rlint-explicatis
/homeassistant/components/smart_meter_texas/ @grahamwetzler
/tests/components/smart_meter_texas/ @grahamwetzler
/homeassistant/components/smartthings/ @joostlek
@@ -1451,8 +1439,8 @@ build.json @home-assistant/supervisor
/tests/components/solarlog/ @Ernst79 @dontinelli
/homeassistant/components/solax/ @squishykid @Darsstar
/tests/components/solax/ @squishykid @Darsstar
/homeassistant/components/soma/ @ratsept
/tests/components/soma/ @ratsept
/homeassistant/components/soma/ @ratsept @sebfortier2288
/tests/components/soma/ @ratsept @sebfortier2288
/homeassistant/components/sonarr/ @ctalkington
/tests/components/sonarr/ @ctalkington
/homeassistant/components/songpal/ @rytilahti @shenxn
@@ -1484,8 +1472,7 @@ build.json @home-assistant/supervisor
/tests/components/steam_online/ @tkdrob
/homeassistant/components/steamist/ @bdraco
/tests/components/steamist/ @bdraco
/homeassistant/components/stiebel_eltron/ @fucm @ThyMYthOS
/tests/components/stiebel_eltron/ @fucm @ThyMYthOS
/homeassistant/components/stiebel_eltron/ @fucm
/homeassistant/components/stookwijzer/ @fwestenberg
/tests/components/stookwijzer/ @fwestenberg
/homeassistant/components/stream/ @hunterjm @uvjustin @allenporter
@@ -1496,8 +1483,8 @@ build.json @home-assistant/supervisor
/tests/components/subaru/ @G-Two
/homeassistant/components/suez_water/ @ooii @jb101010-2
/tests/components/suez_water/ @ooii @jb101010-2
/homeassistant/components/sun/ @home-assistant/core
/tests/components/sun/ @home-assistant/core
/homeassistant/components/sun/ @Swamp-Ig
/tests/components/sun/ @Swamp-Ig
/homeassistant/components/supla/ @mwegrzynek
/homeassistant/components/surepetcare/ @benleb @danielhiversen
/tests/components/surepetcare/ @benleb @danielhiversen
@@ -1510,8 +1497,8 @@ build.json @home-assistant/supervisor
/tests/components/switch_as_x/ @home-assistant/core
/homeassistant/components/switchbee/ @jafar-atili
/tests/components/switchbee/ @jafar-atili
/homeassistant/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski @zerzhang
/tests/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski @zerzhang
/homeassistant/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski
/tests/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski
/homeassistant/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur
/tests/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur
/homeassistant/components/switcher_kis/ @thecode @YogevBokobza
@@ -1551,8 +1538,8 @@ build.json @home-assistant/supervisor
/tests/components/tedee/ @patrickhilker @zweckj
/homeassistant/components/tellduslive/ @fredrike
/tests/components/tellduslive/ @fredrike
/homeassistant/components/template/ @Petro31 @home-assistant/core
/tests/components/template/ @Petro31 @home-assistant/core
/homeassistant/components/template/ @Petro31 @PhracturedBlue @home-assistant/core
/tests/components/template/ @Petro31 @PhracturedBlue @home-assistant/core
/homeassistant/components/tesla_fleet/ @Bre77
/tests/components/tesla_fleet/ @Bre77
/homeassistant/components/tesla_wall_connector/ @einarhauks
@@ -1688,8 +1675,8 @@ build.json @home-assistant/supervisor
/tests/components/vlc_telnet/ @rodripf @MartinHjelmare
/homeassistant/components/vodafone_station/ @paoloantinori @chemelli74
/tests/components/vodafone_station/ @paoloantinori @chemelli74
/homeassistant/components/voip/ @balloob @synesthesiam @jaminh
/tests/components/voip/ @balloob @synesthesiam @jaminh
/homeassistant/components/voip/ @balloob @synesthesiam
/tests/components/voip/ @balloob @synesthesiam
/homeassistant/components/volumio/ @OnFreund
/tests/components/volumio/ @OnFreund
/homeassistant/components/volvooncall/ @molobrakos
@@ -1806,8 +1793,6 @@ build.json @home-assistant/supervisor
/tests/components/zeversolar/ @kvanzuijlen
/homeassistant/components/zha/ @dmulcahey @adminiuga @puddly @TheJulianJES
/tests/components/zha/ @dmulcahey @adminiuga @puddly @TheJulianJES
/homeassistant/components/zimi/ @markhannon
/tests/components/zimi/ @markhannon
/homeassistant/components/zodiac/ @JulienTant
/tests/components/zodiac/ @JulienTant
/homeassistant/components/zone/ @home-assistant/core

2
Dockerfile generated
View File

@@ -31,7 +31,7 @@ RUN \
&& go2rtc --version
# Install uv
RUN pip3 install uv==0.7.1
RUN pip3 install uv==0.6.10
WORKDIR /usr/src

View File

@@ -1,10 +1,10 @@
image: ghcr.io/home-assistant/{arch}-homeassistant
build_from:
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.05.0
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.05.0
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.05.0
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.05.0
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.05.0
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.02.1
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.02.1
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.02.1
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.02.1
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.02.1
codenotary:
signer: notary@home-assistant.io
base_image: notary@home-assistant.io

View File

@@ -75,7 +75,6 @@ from .core_config import async_process_ha_core_config
from .exceptions import HomeAssistantError
from .helpers import (
area_registry,
backup,
category_registry,
config_validation as cv,
device_registry,
@@ -881,10 +880,6 @@ async def _async_set_up_integrations(
if "recorder" in all_domains:
recorder.async_initialize_recorder(hass)
# Initialize backup
if "backup" in all_domains:
backup.async_initialize_backup(hass)
stages: list[tuple[str, set[str], int | None]] = [
*(
(name, domain_group, timeout)

View File

@@ -1,13 +1,5 @@
{
"domain": "amazon",
"name": "Amazon",
"integrations": [
"alexa",
"amazon_devices",
"amazon_polly",
"aws",
"aws_s3",
"fire_tv",
"route53"
]
"integrations": ["alexa", "amazon_polly", "aws", "fire_tv", "route53"]
}

View File

@@ -6,7 +6,6 @@
"google_assistant_sdk",
"google_cloud",
"google_drive",
"google_gemini",
"google_generative_ai_conversation",
"google_mail",
"google_maps",

View File

@@ -1,6 +0,0 @@
{
"domain": "nuki",
"name": "Nuki",
"integrations": ["nuki"],
"iot_standards": ["matter"]
}

View File

@@ -67,7 +67,6 @@ POLLEN_CATEGORY_MAP = {
2: "moderate",
3: "high",
4: "very_high",
5: "extreme",
}
UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=40)
UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6)

View File

@@ -72,7 +72,6 @@
"level": {
"name": "Level",
"state": {
"extreme": "Extreme",
"high": "[%key:common::state::high%]",
"low": "[%key:common::state::low%]",
"moderate": "Moderate",
@@ -90,7 +89,6 @@
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
"state": {
"extreme": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::extreme%]",
"high": "[%key:common::state::high%]",
"low": "[%key:common::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
@@ -125,7 +123,6 @@
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
"state": {
"extreme": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::extreme%]",
"high": "[%key:common::state::high%]",
"low": "[%key:common::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
@@ -170,7 +167,6 @@
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
"state": {
"extreme": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::extreme%]",
"high": "[%key:common::state::high%]",
"low": "[%key:common::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
@@ -185,7 +181,6 @@
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
"state": {
"extreme": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::extreme%]",
"high": "[%key:common::state::high%]",
"low": "[%key:common::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",
@@ -200,7 +195,6 @@
"level": {
"name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]",
"state": {
"extreme": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::extreme%]",
"high": "[%key:common::state::high%]",
"low": "[%key:common::state::low%]",
"moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]",

View File

@@ -2,38 +2,25 @@
from __future__ import annotations
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from .const import CONNECTION_TYPE, LOCAL
from .coordinator import AdaxCloudCoordinator, AdaxConfigEntry, AdaxLocalCoordinator
PLATFORMS = [Platform.CLIMATE]
async def async_setup_entry(hass: HomeAssistant, entry: AdaxConfigEntry) -> bool:
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Adax from a config entry."""
if entry.data.get(CONNECTION_TYPE) == LOCAL:
local_coordinator = AdaxLocalCoordinator(hass, entry)
entry.runtime_data = local_coordinator
else:
cloud_coordinator = AdaxCloudCoordinator(hass, entry)
entry.runtime_data = cloud_coordinator
await entry.runtime_data.async_config_entry_first_refresh()
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: AdaxConfigEntry) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
async def async_migrate_entry(
hass: HomeAssistant, config_entry: AdaxConfigEntry
) -> bool:
async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Migrate old entry."""
# convert title and unique_id to string
if config_entry.version == 1:

View File

@@ -12,42 +12,57 @@ from homeassistant.components.climate import (
ClimateEntityFeature,
HVACMode,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
ATTR_TEMPERATURE,
CONF_IP_ADDRESS,
CONF_PASSWORD,
CONF_TOKEN,
CONF_UNIQUE_ID,
PRECISION_WHOLE,
UnitOfTemperature,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from . import AdaxConfigEntry
from .const import CONNECTION_TYPE, DOMAIN, LOCAL
from .coordinator import AdaxCloudCoordinator, AdaxLocalCoordinator
from .const import ACCOUNT_ID, CONNECTION_TYPE, DOMAIN, LOCAL
async def async_setup_entry(
hass: HomeAssistant,
entry: AdaxConfigEntry,
entry: ConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the Adax thermostat with config flow."""
if entry.data.get(CONNECTION_TYPE) == LOCAL:
local_coordinator = cast(AdaxLocalCoordinator, entry.runtime_data)
async_add_entities(
[LocalAdaxDevice(local_coordinator, entry.data[CONF_UNIQUE_ID])],
adax_data_handler = AdaxLocal(
entry.data[CONF_IP_ADDRESS],
entry.data[CONF_TOKEN],
websession=async_get_clientsession(hass, verify_ssl=False),
)
else:
cloud_coordinator = cast(AdaxCloudCoordinator, entry.runtime_data)
async_add_entities(
AdaxDevice(cloud_coordinator, device_id)
for device_id in cloud_coordinator.data
[LocalAdaxDevice(adax_data_handler, entry.data[CONF_UNIQUE_ID])], True
)
return
adax_data_handler = Adax(
entry.data[ACCOUNT_ID],
entry.data[CONF_PASSWORD],
websession=async_get_clientsession(hass),
)
async_add_entities(
(
AdaxDevice(room, adax_data_handler)
for room in await adax_data_handler.get_rooms()
),
True,
)
class AdaxDevice(CoordinatorEntity[AdaxCloudCoordinator], ClimateEntity):
class AdaxDevice(ClimateEntity):
"""Representation of a heater."""
_attr_hvac_modes = [HVACMode.HEAT, HVACMode.OFF]
@@ -61,37 +76,20 @@ class AdaxDevice(CoordinatorEntity[AdaxCloudCoordinator], ClimateEntity):
_attr_target_temperature_step = PRECISION_WHOLE
_attr_temperature_unit = UnitOfTemperature.CELSIUS
def __init__(
self,
coordinator: AdaxCloudCoordinator,
device_id: str,
) -> None:
def __init__(self, heater_data: dict[str, Any], adax_data_handler: Adax) -> None:
"""Initialize the heater."""
super().__init__(coordinator)
self._adax_data_handler: Adax = coordinator.adax_data_handler
self._device_id = device_id
self._device_id = heater_data["id"]
self._adax_data_handler = adax_data_handler
self._attr_name = self.room["name"]
self._attr_unique_id = f"{self.room['homeId']}_{self._device_id}"
self._attr_unique_id = f"{heater_data['homeId']}_{heater_data['id']}"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, device_id)},
identifiers={(DOMAIN, heater_data["id"])},
# Instead of setting the device name to the entity name, adax
# should be updated to set has_entity_name = True, and set the entity
# name to None
name=cast(str | None, self.name),
manufacturer="Adax",
)
self._apply_data(self.room)
@property
def available(self) -> bool:
"""Whether the entity is available or not."""
return super().available and self._device_id in self.coordinator.data
@property
def room(self) -> dict[str, Any]:
"""Gets the data for this particular device."""
return self.coordinator.data[self._device_id]
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
"""Set hvac mode."""
@@ -106,9 +104,7 @@ class AdaxDevice(CoordinatorEntity[AdaxCloudCoordinator], ClimateEntity):
)
else:
return
# Request data refresh from source to verify that update was successful
await self.coordinator.async_request_refresh()
await self._adax_data_handler.update()
async def async_set_temperature(self, **kwargs: Any) -> None:
"""Set new target temperature."""
@@ -118,31 +114,28 @@ class AdaxDevice(CoordinatorEntity[AdaxCloudCoordinator], ClimateEntity):
self._device_id, temperature, True
)
@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
if room := self.room:
self._apply_data(room)
super()._handle_coordinator_update()
def _apply_data(self, room: dict[str, Any]) -> None:
"""Update the appropriate attributues based on received data."""
self._attr_current_temperature = room.get("temperature")
self._attr_target_temperature = room.get("targetTemperature")
if room["heatingEnabled"]:
self._attr_hvac_mode = HVACMode.HEAT
self._attr_icon = "mdi:radiator"
else:
self._attr_hvac_mode = HVACMode.OFF
self._attr_icon = "mdi:radiator-off"
async def async_update(self) -> None:
"""Get the latest data."""
for room in await self._adax_data_handler.get_rooms():
if room["id"] != self._device_id:
continue
self._attr_name = room["name"]
self._attr_current_temperature = room.get("temperature")
self._attr_target_temperature = room.get("targetTemperature")
if room["heatingEnabled"]:
self._attr_hvac_mode = HVACMode.HEAT
self._attr_icon = "mdi:radiator"
else:
self._attr_hvac_mode = HVACMode.OFF
self._attr_icon = "mdi:radiator-off"
return
class LocalAdaxDevice(CoordinatorEntity[AdaxLocalCoordinator], ClimateEntity):
class LocalAdaxDevice(ClimateEntity):
"""Representation of a heater."""
_attr_hvac_modes = [HVACMode.HEAT, HVACMode.OFF]
_attr_hvac_mode = HVACMode.OFF
_attr_icon = "mdi:radiator-off"
_attr_hvac_mode = HVACMode.HEAT
_attr_max_temp = 35
_attr_min_temp = 5
_attr_supported_features = (
@@ -153,10 +146,9 @@ class LocalAdaxDevice(CoordinatorEntity[AdaxLocalCoordinator], ClimateEntity):
_attr_target_temperature_step = PRECISION_WHOLE
_attr_temperature_unit = UnitOfTemperature.CELSIUS
def __init__(self, coordinator: AdaxLocalCoordinator, unique_id: str) -> None:
def __init__(self, adax_data_handler: AdaxLocal, unique_id: str) -> None:
"""Initialize the heater."""
super().__init__(coordinator)
self._adax_data_handler: AdaxLocal = coordinator.adax_data_handler
self._adax_data_handler = adax_data_handler
self._attr_unique_id = unique_id
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, unique_id)},
@@ -177,20 +169,17 @@ class LocalAdaxDevice(CoordinatorEntity[AdaxLocalCoordinator], ClimateEntity):
return
await self._adax_data_handler.set_target_temperature(temperature)
@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
if data := self.coordinator.data:
self._attr_current_temperature = data["current_temperature"]
self._attr_available = self._attr_current_temperature is not None
if (target_temp := data["target_temperature"]) == 0:
self._attr_hvac_mode = HVACMode.OFF
self._attr_icon = "mdi:radiator-off"
if target_temp == 0:
self._attr_target_temperature = self._attr_min_temp
else:
self._attr_hvac_mode = HVACMode.HEAT
self._attr_icon = "mdi:radiator"
self._attr_target_temperature = target_temp
super()._handle_coordinator_update()
async def async_update(self) -> None:
"""Get the latest data."""
data = await self._adax_data_handler.get_status()
self._attr_current_temperature = data["current_temperature"]
self._attr_available = self._attr_current_temperature is not None
if (target_temp := data["target_temperature"]) == 0:
self._attr_hvac_mode = HVACMode.OFF
self._attr_icon = "mdi:radiator-off"
if target_temp == 0:
self._attr_target_temperature = self._attr_min_temp
else:
self._attr_hvac_mode = HVACMode.HEAT
self._attr_icon = "mdi:radiator"
self._attr_target_temperature = target_temp

View File

@@ -1,6 +1,5 @@
"""Constants for the Adax integration."""
import datetime
from typing import Final
ACCOUNT_ID: Final = "account_id"
@@ -10,5 +9,3 @@ DOMAIN: Final = "adax"
LOCAL = "Local"
WIFI_SSID = "wifi_ssid"
WIFI_PSWD = "wifi_pswd"
SCAN_INTERVAL = datetime.timedelta(seconds=60)

View File

@@ -1,71 +0,0 @@
"""DataUpdateCoordinator for the Adax component."""
import logging
from typing import Any, cast
from adax import Adax
from adax_local import Adax as AdaxLocal
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD, CONF_TOKEN
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import ACCOUNT_ID, SCAN_INTERVAL
_LOGGER = logging.getLogger(__name__)
type AdaxConfigEntry = ConfigEntry[AdaxCloudCoordinator | AdaxLocalCoordinator]
class AdaxCloudCoordinator(DataUpdateCoordinator[dict[str, dict[str, Any]]]):
"""Coordinator for updating data to and from Adax (cloud)."""
def __init__(self, hass: HomeAssistant, entry: AdaxConfigEntry) -> None:
"""Initialize the Adax coordinator used for Cloud mode."""
super().__init__(
hass,
config_entry=entry,
logger=_LOGGER,
name="AdaxCloud",
update_interval=SCAN_INTERVAL,
)
self.adax_data_handler = Adax(
entry.data[ACCOUNT_ID],
entry.data[CONF_PASSWORD],
websession=async_get_clientsession(hass),
)
async def _async_update_data(self) -> dict[str, dict[str, Any]]:
"""Fetch data from the Adax."""
rooms = await self.adax_data_handler.get_rooms() or []
return {r["id"]: r for r in rooms}
class AdaxLocalCoordinator(DataUpdateCoordinator[dict[str, Any] | None]):
"""Coordinator for updating data to and from Adax (local)."""
def __init__(self, hass: HomeAssistant, entry: AdaxConfigEntry) -> None:
"""Initialize the Adax coordinator used for Local mode."""
super().__init__(
hass,
config_entry=entry,
logger=_LOGGER,
name="AdaxLocal",
update_interval=SCAN_INTERVAL,
)
self.adax_data_handler = AdaxLocal(
entry.data[CONF_IP_ADDRESS],
entry.data[CONF_TOKEN],
websession=async_get_clientsession(hass, verify_ssl=False),
)
async def _async_update_data(self) -> dict[str, Any]:
"""Fetch data from the Adax."""
if result := await self.adax_data_handler.get_status():
return cast(dict[str, Any], result)
raise UpdateFailed("Got invalid status from device")

View File

@@ -1,7 +1,7 @@
{
"domain": "adax",
"name": "Adax",
"codeowners": ["@danielhiversen", "@lazytarget"],
"codeowners": ["@danielhiversen"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/adax",
"iot_class": "local_polling",

View File

@@ -8,7 +8,7 @@ from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import AdvantageAirDataConfigEntry
from .const import ADVANTAGE_AIR_STATE_ON, DOMAIN
from .const import ADVANTAGE_AIR_STATE_ON, DOMAIN as ADVANTAGE_AIR_DOMAIN
from .entity import AdvantageAirEntity, AdvantageAirThingEntity
from .models import AdvantageAirData
@@ -52,8 +52,8 @@ class AdvantageAirLight(AdvantageAirEntity, LightEntity):
self._id: str = light["id"]
self._attr_unique_id += f"-{self._id}"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, self._attr_unique_id)},
via_device=(DOMAIN, self.coordinator.data["system"]["rid"]),
identifiers={(ADVANTAGE_AIR_DOMAIN, self._attr_unique_id)},
via_device=(ADVANTAGE_AIR_DOMAIN, self.coordinator.data["system"]["rid"]),
manufacturer="Advantage Air",
model=light.get("moduleType"),
name=light["name"],

View File

@@ -6,7 +6,7 @@ from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import AdvantageAirDataConfigEntry
from .const import DOMAIN
from .const import DOMAIN as ADVANTAGE_AIR_DOMAIN
from .entity import AdvantageAirEntity
from .models import AdvantageAirData
@@ -32,7 +32,9 @@ class AdvantageAirApp(AdvantageAirEntity, UpdateEntity):
"""Initialize the Advantage Air App."""
super().__init__(instance)
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, self.coordinator.data["system"]["rid"])},
identifiers={
(ADVANTAGE_AIR_DOMAIN, self.coordinator.data["system"]["rid"])
},
manufacturer="Advantage Air",
model=self.coordinator.data["system"]["sysType"],
name=self.coordinator.data["system"]["name"],

View File

@@ -10,7 +10,7 @@ from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import DOMAIN, SERVER_URL
from .const import DOMAIN as AGENT_DOMAIN, SERVER_URL
ATTRIBUTION = "ispyconnect.com"
DEFAULT_BRAND = "Agent DVR by ispyconnect.com"
@@ -46,7 +46,7 @@ async def async_setup_entry(
device_registry.async_get_or_create(
config_entry_id=config_entry.entry_id,
identifiers={(DOMAIN, agent_client.unique)},
identifiers={(AGENT_DOMAIN, agent_client.unique)},
manufacturer="iSpyConnect",
name=f"Agent {agent_client.name}",
model="Agent DVR",

View File

@@ -12,7 +12,7 @@ from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import AgentDVRConfigEntry
from .const import DOMAIN
from .const import DOMAIN as AGENT_DOMAIN
CONF_HOME_MODE_NAME = "home"
CONF_AWAY_MODE_NAME = "away"
@@ -47,7 +47,7 @@ class AgentBaseStation(AlarmControlPanelEntity):
self._client = client
self._attr_unique_id = f"{client.unique}_CP"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, client.unique)},
identifiers={(AGENT_DOMAIN, client.unique)},
name=f"{client.name} {CONST_ALARM_CONTROL_PANEL_NAME}",
manufacturer="Agent",
model=CONST_ALARM_CONTROL_PANEL_NAME,

View File

@@ -6,7 +6,6 @@ from typing import Any, Concatenate
from airgradient import AirGradientConnectionError, AirGradientError, get_model_name
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.update_coordinator import CoordinatorEntity
@@ -30,7 +29,6 @@ class AirGradientEntity(CoordinatorEntity[AirGradientCoordinator]):
model_id=measures.model,
serial_number=coordinator.serial_number,
sw_version=measures.firmware_version,
connections={(dr.CONNECTION_NETWORK_MAC, coordinator.serial_number)},
)

View File

@@ -3,19 +3,6 @@
"name": "Airthings",
"codeowners": ["@danielhiversen", "@LaStrada"],
"config_flow": true,
"dhcp": [
{
"hostname": "airthings-view"
},
{
"hostname": "airthings-hub",
"macaddress": "D0141190*"
},
{
"hostname": "airthings-hub",
"macaddress": "70B3D52A0*"
}
],
"documentation": "https://www.home-assistant.io/integrations/airthings",
"iot_class": "cloud_polling",
"loggers": ["airthings"],

View File

@@ -14,7 +14,6 @@ from homeassistant.const import (
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_PARTS_PER_MILLION,
LIGHT_LUX,
PERCENTAGE,
SIGNAL_STRENGTH_DECIBELS,
EntityCategory,
@@ -79,12 +78,6 @@ SENSORS: dict[str, SensorEntityDescription] = {
translation_key="light",
state_class=SensorStateClass.MEASUREMENT,
),
"lux": SensorEntityDescription(
key="lux",
device_class=SensorDeviceClass.ILLUMINANCE,
native_unit_of_measurement=LIGHT_LUX,
state_class=SensorStateClass.MEASUREMENT,
),
"virusRisk": SensorEntityDescription(
key="virusRisk",
translation_key="virus_risk",

View File

@@ -142,7 +142,7 @@ class AirtouchAC(CoordinatorEntity, ClimateEntity):
return AT_TO_HA_STATE[self._airtouch.acs[self._ac_number].AcMode]
@property
def hvac_modes(self) -> list[HVACMode]:
def hvac_modes(self):
"""Return the list of available operation modes."""
airtouch_modes = self._airtouch.GetSupportedCoolingModesForAc(self._ac_number)
modes = [AT_TO_HA_STATE[mode] for mode in airtouch_modes]
@@ -226,12 +226,12 @@ class AirtouchGroup(CoordinatorEntity, ClimateEntity):
return super()._handle_coordinator_update()
@property
def min_temp(self) -> float:
def min_temp(self):
"""Return Minimum Temperature for AC of this group."""
return self._airtouch.acs[self._unit.BelongsToAc].MinSetpoint
@property
def max_temp(self) -> float:
def max_temp(self):
"""Return Max Temperature for AC of this group."""
return self._airtouch.acs[self._unit.BelongsToAc].MaxSetpoint

View File

@@ -2,7 +2,7 @@
"config": {
"step": {
"user": {
"title": "Choose AlarmDecoder protocol",
"title": "Choose AlarmDecoder Protocol",
"data": {
"protocol": "Protocol"
}
@@ -12,8 +12,8 @@
"data": {
"host": "[%key:common::config_flow::data::host%]",
"port": "[%key:common::config_flow::data::port%]",
"device_baudrate": "Device baud rate",
"device_path": "Device path"
"device_baudrate": "Device Baud Rate",
"device_path": "Device Path"
},
"data_description": {
"host": "The hostname or IP address of the AlarmDecoder device that is connected to your alarm panel.",
@@ -44,36 +44,36 @@
"arm_settings": {
"title": "[%key:component::alarmdecoder::options::step::init::title%]",
"data": {
"auto_bypass": "Auto-bypass on arm",
"code_arm_required": "Code required for arming",
"alt_night_mode": "Alternative night mode"
"auto_bypass": "Auto Bypass on Arm",
"code_arm_required": "Code Required for Arming",
"alt_night_mode": "Alternative Night Mode"
}
},
"zone_select": {
"title": "[%key:component::alarmdecoder::options::step::init::title%]",
"description": "Enter the zone number you'd like to to add, edit, or remove.",
"data": {
"zone_number": "Zone number"
"zone_number": "Zone Number"
}
},
"zone_details": {
"title": "[%key:component::alarmdecoder::options::step::init::title%]",
"description": "Enter details for zone {zone_number}. To delete zone {zone_number}, leave 'Zone name' blank.",
"description": "Enter details for zone {zone_number}. To delete zone {zone_number}, leave Zone Name blank.",
"data": {
"zone_name": "Zone name",
"zone_type": "Zone type",
"zone_rfid": "RF serial",
"zone_loop": "RF loop",
"zone_relayaddr": "Relay address",
"zone_relaychan": "Relay channel"
"zone_name": "Zone Name",
"zone_type": "Zone Type",
"zone_rfid": "RF Serial",
"zone_loop": "RF Loop",
"zone_relayaddr": "Relay Address",
"zone_relaychan": "Relay Channel"
}
}
},
"error": {
"relay_inclusive": "'Relay address' and 'Relay channel' are codependent and must be included together.",
"relay_inclusive": "Relay Address and Relay Channel are codependent and must be included together.",
"int": "The field below must be an integer.",
"loop_rfid": "'RF loop' cannot be used without 'RF serial'.",
"loop_range": "'RF loop' must be an integer between 1 and 4."
"loop_rfid": "RF Loop cannot be used without RF Serial.",
"loop_range": "RF Loop must be an integer between 1 and 4."
}
},
"services": {

View File

@@ -719,7 +719,7 @@ class LockCapabilities(AlexaEntity):
yield Alexa(self.entity)
@ENTITY_ADAPTERS.register(media_player.DOMAIN)
@ENTITY_ADAPTERS.register(media_player.const.DOMAIN)
class MediaPlayerCapabilities(AlexaEntity):
"""Class to represent MediaPlayer capabilities."""
@@ -757,7 +757,9 @@ class MediaPlayerCapabilities(AlexaEntity):
if supported & media_player.MediaPlayerEntityFeature.SELECT_SOURCE:
inputs = AlexaInputController.get_valid_inputs(
self.entity.attributes.get(media_player.ATTR_INPUT_SOURCE_LIST, [])
self.entity.attributes.get(
media_player.const.ATTR_INPUT_SOURCE_LIST, []
)
)
if len(inputs) > 0:
yield AlexaInputController(self.entity)
@@ -774,7 +776,8 @@ class MediaPlayerCapabilities(AlexaEntity):
and domain != "denonavr"
):
inputs = AlexaEqualizerController.get_valid_inputs(
self.entity.attributes.get(media_player.ATTR_SOUND_MODE_LIST) or []
self.entity.attributes.get(media_player.const.ATTR_SOUND_MODE_LIST)
or []
)
if len(inputs) > 0:
yield AlexaEqualizerController(self.entity)

View File

@@ -566,7 +566,7 @@ async def async_api_set_volume(
data: dict[str, Any] = {
ATTR_ENTITY_ID: entity.entity_id,
media_player.ATTR_MEDIA_VOLUME_LEVEL: volume,
media_player.const.ATTR_MEDIA_VOLUME_LEVEL: volume,
}
await hass.services.async_call(
@@ -589,7 +589,7 @@ async def async_api_select_input(
# Attempt to map the ALL UPPERCASE payload name to a source.
# Strips trailing 1 to match single input devices.
source_list = entity.attributes.get(media_player.ATTR_INPUT_SOURCE_LIST) or []
source_list = entity.attributes.get(media_player.const.ATTR_INPUT_SOURCE_LIST) or []
for source in source_list:
formatted_source = (
source.lower().replace("-", "").replace("_", "").replace(" ", "")
@@ -611,7 +611,7 @@ async def async_api_select_input(
data: dict[str, Any] = {
ATTR_ENTITY_ID: entity.entity_id,
media_player.ATTR_INPUT_SOURCE: media_input,
media_player.const.ATTR_INPUT_SOURCE: media_input,
}
await hass.services.async_call(
@@ -636,7 +636,7 @@ async def async_api_adjust_volume(
volume_delta = int(directive.payload["volume"])
entity = directive.entity
current_level = entity.attributes[media_player.ATTR_MEDIA_VOLUME_LEVEL]
current_level = entity.attributes[media_player.const.ATTR_MEDIA_VOLUME_LEVEL]
# read current state
try:
@@ -648,7 +648,7 @@ async def async_api_adjust_volume(
data: dict[str, Any] = {
ATTR_ENTITY_ID: entity.entity_id,
media_player.ATTR_MEDIA_VOLUME_LEVEL: volume,
media_player.const.ATTR_MEDIA_VOLUME_LEVEL: volume,
}
await hass.services.async_call(
@@ -709,7 +709,7 @@ async def async_api_set_mute(
entity = directive.entity
data: dict[str, Any] = {
ATTR_ENTITY_ID: entity.entity_id,
media_player.ATTR_MEDIA_VOLUME_MUTED: mute,
media_player.const.ATTR_MEDIA_VOLUME_MUTED: mute,
}
await hass.services.async_call(
@@ -1708,13 +1708,15 @@ async def async_api_changechannel(
data: dict[str, Any] = {
ATTR_ENTITY_ID: entity.entity_id,
media_player.ATTR_MEDIA_CONTENT_ID: channel,
media_player.ATTR_MEDIA_CONTENT_TYPE: (media_player.MediaType.CHANNEL),
media_player.const.ATTR_MEDIA_CONTENT_ID: channel,
media_player.const.ATTR_MEDIA_CONTENT_TYPE: (
media_player.const.MEDIA_TYPE_CHANNEL
),
}
await hass.services.async_call(
entity.domain,
media_player.SERVICE_PLAY_MEDIA,
media_player.const.SERVICE_PLAY_MEDIA,
data,
blocking=False,
context=context,
@@ -1823,13 +1825,13 @@ async def async_api_set_eq_mode(
context: ha.Context,
) -> AlexaResponse:
"""Process a SetMode request for EqualizerController."""
mode: str = directive.payload["mode"]
mode = directive.payload["mode"]
entity = directive.entity
data: dict[str, Any] = {ATTR_ENTITY_ID: entity.entity_id}
sound_mode_list = entity.attributes.get(media_player.ATTR_SOUND_MODE_LIST)
sound_mode_list = entity.attributes.get(media_player.const.ATTR_SOUND_MODE_LIST)
if sound_mode_list and mode.lower() in sound_mode_list:
data[media_player.ATTR_SOUND_MODE] = mode.lower()
data[media_player.const.ATTR_SOUND_MODE] = mode.lower()
else:
msg = f"failed to map sound mode {mode} to a mode on {entity.entity_id}"
raise AlexaInvalidValueError(msg)

View File

@@ -1,32 +0,0 @@
"""Amazon Devices integration."""
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from .coordinator import AmazonConfigEntry, AmazonDevicesCoordinator
PLATFORMS = [
Platform.BINARY_SENSOR,
Platform.NOTIFY,
Platform.SWITCH,
]
async def async_setup_entry(hass: HomeAssistant, entry: AmazonConfigEntry) -> bool:
"""Set up Amazon Devices platform."""
coordinator = AmazonDevicesCoordinator(hass, entry)
await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: AmazonConfigEntry) -> bool:
"""Unload a config entry."""
await entry.runtime_data.api.close()
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)

View File

@@ -1,71 +0,0 @@
"""Support for binary sensors."""
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from typing import Final
from aioamazondevices.api import AmazonDevice
from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
BinarySensorEntity,
BinarySensorEntityDescription,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import AmazonConfigEntry
from .entity import AmazonEntity
# Coordinator is used to centralize the data updates
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
class AmazonBinarySensorEntityDescription(BinarySensorEntityDescription):
"""Amazon Devices binary sensor entity description."""
is_on_fn: Callable[[AmazonDevice], bool]
BINARY_SENSORS: Final = (
AmazonBinarySensorEntityDescription(
key="online",
device_class=BinarySensorDeviceClass.CONNECTIVITY,
is_on_fn=lambda _device: _device.online,
),
AmazonBinarySensorEntityDescription(
key="bluetooth",
translation_key="bluetooth",
is_on_fn=lambda _device: _device.bluetooth_state,
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: AmazonConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Amazon Devices binary sensors based on a config entry."""
coordinator = entry.runtime_data
async_add_entities(
AmazonBinarySensorEntity(coordinator, serial_num, sensor_desc)
for sensor_desc in BINARY_SENSORS
for serial_num in coordinator.data
)
class AmazonBinarySensorEntity(AmazonEntity, BinarySensorEntity):
"""Binary sensor device."""
entity_description: AmazonBinarySensorEntityDescription
@property
def is_on(self) -> bool:
"""Return True if the binary sensor is on."""
return self.entity_description.is_on_fn(self.device)

View File

@@ -1,63 +0,0 @@
"""Config flow for Amazon Devices integration."""
from __future__ import annotations
from typing import Any
from aioamazondevices.api import AmazonEchoApi
from aioamazondevices.exceptions import CannotAuthenticate, CannotConnect
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_CODE, CONF_COUNTRY, CONF_PASSWORD, CONF_USERNAME
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.selector import CountrySelector
from .const import CONF_LOGIN_DATA, DOMAIN
class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Amazon Devices."""
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
errors = {}
if user_input:
client = AmazonEchoApi(
user_input[CONF_COUNTRY],
user_input[CONF_USERNAME],
user_input[CONF_PASSWORD],
)
try:
data = await client.login_mode_interactive(user_input[CONF_CODE])
except CannotConnect:
errors["base"] = "cannot_connect"
except CannotAuthenticate:
errors["base"] = "invalid_auth"
else:
await self.async_set_unique_id(data["customer_info"]["user_id"])
self._abort_if_unique_id_configured()
user_input.pop(CONF_CODE)
return self.async_create_entry(
title=user_input[CONF_USERNAME],
data=user_input | {CONF_LOGIN_DATA: data},
)
finally:
await client.close()
return self.async_show_form(
step_id="user",
errors=errors,
data_schema=vol.Schema(
{
vol.Required(
CONF_COUNTRY, default=self.hass.config.country
): CountrySelector(),
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_CODE): cv.positive_int,
}
),
)

View File

@@ -1,8 +0,0 @@
"""Amazon Devices constants."""
import logging
_LOGGER = logging.getLogger(__package__)
DOMAIN = "amazon_devices"
CONF_LOGIN_DATA = "login_data"

View File

@@ -1,58 +0,0 @@
"""Support for Amazon Devices."""
from datetime import timedelta
from aioamazondevices.api import AmazonDevice, AmazonEchoApi
from aioamazondevices.exceptions import (
CannotAuthenticate,
CannotConnect,
CannotRetrieveData,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_COUNTRY, CONF_PASSWORD, CONF_USERNAME
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryError
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import _LOGGER, CONF_LOGIN_DATA
SCAN_INTERVAL = 30
type AmazonConfigEntry = ConfigEntry[AmazonDevicesCoordinator]
class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
"""Base coordinator for Amazon Devices."""
config_entry: AmazonConfigEntry
def __init__(
self,
hass: HomeAssistant,
entry: AmazonConfigEntry,
) -> None:
"""Initialize the scanner."""
super().__init__(
hass,
_LOGGER,
name=entry.title,
config_entry=entry,
update_interval=timedelta(seconds=SCAN_INTERVAL),
)
self.api = AmazonEchoApi(
entry.data[CONF_COUNTRY],
entry.data[CONF_USERNAME],
entry.data[CONF_PASSWORD],
entry.data[CONF_LOGIN_DATA],
)
async def _async_update_data(self) -> dict[str, AmazonDevice]:
"""Update device data."""
try:
await self.api.login_mode_stored_data()
return await self.api.get_devices_data()
except (CannotConnect, CannotRetrieveData) as err:
raise UpdateFailed(f"Error occurred while updating {self.name}") from err
except CannotAuthenticate as err:
raise ConfigEntryError("Could not authenticate") from err

View File

@@ -1,53 +0,0 @@
"""Defines a base Amazon Devices entity."""
from aioamazondevices.api import AmazonDevice
from aioamazondevices.const import SPEAKER_GROUP_MODEL
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity import EntityDescription
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN
from .coordinator import AmazonDevicesCoordinator
class AmazonEntity(CoordinatorEntity[AmazonDevicesCoordinator]):
"""Defines a base Amazon Devices entity."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: AmazonDevicesCoordinator,
serial_num: str,
description: EntityDescription,
) -> None:
"""Initialize the entity."""
super().__init__(coordinator)
self._serial_num = serial_num
model_details = coordinator.api.get_model_details(self.device)
model = model_details["model"] if model_details else None
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, serial_num)},
name=self.device.account_name,
model=model,
model_id=self.device.device_type,
manufacturer="Amazon",
hw_version=model_details["hw_version"] if model_details else None,
sw_version=(
self.device.software_version if model != SPEAKER_GROUP_MODEL else None
),
serial_number=serial_num if model != SPEAKER_GROUP_MODEL else None,
)
self.entity_description = description
self._attr_unique_id = f"{serial_num}-{description.key}"
@property
def device(self) -> AmazonDevice:
"""Return the device."""
return self.coordinator.data[self._serial_num]
@property
def available(self) -> bool:
"""Return True if entity is available."""
return super().available and self._serial_num in self.coordinator.data

View File

@@ -1,12 +0,0 @@
{
"entity": {
"binary_sensor": {
"bluetooth": {
"default": "mdi:bluetooth",
"state": {
"off": "mdi:bluetooth-off"
}
}
}
}
}

View File

@@ -1,33 +0,0 @@
{
"domain": "amazon_devices",
"name": "Amazon Devices",
"codeowners": ["@chemelli74"],
"config_flow": true,
"dhcp": [
{ "macaddress": "08A6BC*" },
{ "macaddress": "10BF67*" },
{ "macaddress": "440049*" },
{ "macaddress": "443D54*" },
{ "macaddress": "48B423*" },
{ "macaddress": "4C1744*" },
{ "macaddress": "50D45C*" },
{ "macaddress": "50DCE7*" },
{ "macaddress": "68F63B*" },
{ "macaddress": "74D637*" },
{ "macaddress": "7C6166*" },
{ "macaddress": "901195*" },
{ "macaddress": "943A91*" },
{ "macaddress": "98226E*" },
{ "macaddress": "9CC8E9*" },
{ "macaddress": "A8E621*" },
{ "macaddress": "C095CF*" },
{ "macaddress": "D8BE65*" },
{ "macaddress": "EC2BEB*" }
],
"documentation": "https://www.home-assistant.io/integrations/amazon_devices",
"integration_type": "hub",
"iot_class": "cloud_polling",
"loggers": ["aioamazondevices"],
"quality_scale": "bronze",
"requirements": ["aioamazondevices==2.1.1"]
}

View File

@@ -1,74 +0,0 @@
"""Support for notification entity."""
from __future__ import annotations
from collections.abc import Awaitable, Callable
from dataclasses import dataclass
from typing import Any, Final
from aioamazondevices.api import AmazonDevice, AmazonEchoApi
from homeassistant.components.notify import NotifyEntity, NotifyEntityDescription
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import AmazonConfigEntry
from .entity import AmazonEntity
PARALLEL_UPDATES = 1
@dataclass(frozen=True, kw_only=True)
class AmazonNotifyEntityDescription(NotifyEntityDescription):
"""Amazon Devices notify entity description."""
method: Callable[[AmazonEchoApi, AmazonDevice, str], Awaitable[None]]
subkey: str
NOTIFY: Final = (
AmazonNotifyEntityDescription(
key="speak",
translation_key="speak",
subkey="AUDIO_PLAYER",
method=lambda api, device, message: api.call_alexa_speak(device, message),
),
AmazonNotifyEntityDescription(
key="announce",
translation_key="announce",
subkey="AUDIO_PLAYER",
method=lambda api, device, message: api.call_alexa_announcement(
device, message
),
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: AmazonConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Amazon Devices notification entity based on a config entry."""
coordinator = entry.runtime_data
async_add_entities(
AmazonNotifyEntity(coordinator, serial_num, sensor_desc)
for sensor_desc in NOTIFY
for serial_num in coordinator.data
if sensor_desc.subkey in coordinator.data[serial_num].capabilities
)
class AmazonNotifyEntity(AmazonEntity, NotifyEntity):
"""Binary sensor notify platform."""
entity_description: AmazonNotifyEntityDescription
async def async_send_message(
self, message: str, title: str | None = None, **kwargs: Any
) -> None:
"""Send a message."""
await self.entity_description.method(self.coordinator.api, self.device, message)

View File

@@ -1,74 +0,0 @@
rules:
# Bronze
action-setup:
status: exempt
comment: no actions
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions:
status: exempt
comment: no actions
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup:
status: exempt
comment: entities do not explicitly subscribe to events
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions: todo
config-entry-unloading: done
docs-configuration-parameters: todo
docs-installation-parameters: todo
entity-unavailable: done
integration-owner: done
log-when-unavailable: done
parallel-updates: done
reauthentication-flow: todo
test-coverage:
status: todo
comment: all tests missing
# Gold
devices: done
diagnostics: todo
discovery-update-info:
status: exempt
comment: Network information not relevant
discovery: done
docs-data-update: todo
docs-examples: todo
docs-known-limitations: todo
docs-supported-devices: todo
docs-supported-functions: todo
docs-troubleshooting: todo
docs-use-cases: todo
dynamic-devices: todo
entity-category: done
entity-device-class: done
entity-disabled-by-default: done
entity-translations: done
exception-translations: todo
icon-translations: done
reconfiguration-flow: todo
repair-issues:
status: exempt
comment: no known use cases for repair issues or flows, yet
stale-devices:
status: todo
comment: automate the cleanup process
# Platinum
async-dependency: done
inject-websession: todo
strict-typing: done

View File

@@ -1,60 +0,0 @@
{
"common": {
"data_country": "Country code",
"data_code": "One-time password (OTP code)",
"data_description_country": "The country of your Amazon account.",
"data_description_username": "The email address of your Amazon account.",
"data_description_password": "The password of your Amazon account.",
"data_description_code": "The one-time password sent to your email address."
},
"config": {
"flow_title": "{username}",
"step": {
"user": {
"data": {
"country": "[%key:component::amazon_devices::common::data_country%]",
"username": "[%key:common::config_flow::data::username%]",
"password": "[%key:common::config_flow::data::password%]",
"code": "[%key:component::amazon_devices::common::data_description_code%]"
},
"data_description": {
"country": "[%key:component::amazon_devices::common::data_description_country%]",
"username": "[%key:component::amazon_devices::common::data_description_username%]",
"password": "[%key:component::amazon_devices::common::data_description_password%]",
"code": "[%key:component::amazon_devices::common::data_description_code%]"
}
}
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"unknown": "[%key:common::config_flow::error::unknown%]"
}
},
"entity": {
"binary_sensor": {
"bluetooth": {
"name": "Bluetooth"
}
},
"notify": {
"speak": {
"name": "Speak"
},
"announce": {
"name": "Announce"
}
},
"switch": {
"do_not_disturb": {
"name": "Do not disturb"
}
}
}
}

View File

@@ -1,84 +0,0 @@
"""Support for switches."""
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from typing import TYPE_CHECKING, Any, Final
from aioamazondevices.api import AmazonDevice
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import AmazonConfigEntry
from .entity import AmazonEntity
PARALLEL_UPDATES = 1
@dataclass(frozen=True, kw_only=True)
class AmazonSwitchEntityDescription(SwitchEntityDescription):
"""Amazon Devices switch entity description."""
is_on_fn: Callable[[AmazonDevice], bool]
subkey: str
method: str
SWITCHES: Final = (
AmazonSwitchEntityDescription(
key="do_not_disturb",
subkey="AUDIO_PLAYER",
translation_key="do_not_disturb",
is_on_fn=lambda _device: _device.do_not_disturb,
method="set_do_not_disturb",
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: AmazonConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Amazon Devices switches based on a config entry."""
coordinator = entry.runtime_data
async_add_entities(
AmazonSwitchEntity(coordinator, serial_num, switch_desc)
for switch_desc in SWITCHES
for serial_num in coordinator.data
if switch_desc.subkey in coordinator.data[serial_num].capabilities
)
class AmazonSwitchEntity(AmazonEntity, SwitchEntity):
"""Switch device."""
entity_description: AmazonSwitchEntityDescription
async def _switch_set_state(self, state: bool) -> None:
"""Set desired switch state."""
method = getattr(self.coordinator.api, self.entity_description.method)
if TYPE_CHECKING:
assert method is not None
await method(self.device, state)
await self.coordinator.async_request_refresh()
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn the switch on."""
await self._switch_set_state(True)
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn the switch off."""
await self._switch_set_state(False)
@property
def is_on(self) -> bool:
"""Return True if switch is on."""
return self.entity_description.is_on_fn(self.device)

View File

@@ -7,6 +7,6 @@
"integration_type": "device",
"iot_class": "local_push",
"loggers": ["androidtvremote2"],
"requirements": ["androidtvremote2==0.2.2"],
"requirements": ["androidtvremote2==0.2.1"],
"zeroconf": ["_androidtvremote2._tcp.local."]
}

View File

@@ -51,10 +51,6 @@
"app_id": "Application ID",
"app_icon": "Application icon",
"app_delete": "Check to delete this application"
},
"data_description": {
"app_id": "E.g. com.plexapp.android for https://play.google.com/store/apps/details?id=com.plexapp.android",
"app_icon": "Image URL. From the Play Store app page, right click on the icon and select 'Copy image address' and then paste it here. Alternatively, download the image, upload it under /config/www/ and use the URL /local/filename"
}
}
}

View File

@@ -17,11 +17,4 @@ CONF_THINKING_BUDGET = "thinking_budget"
RECOMMENDED_THINKING_BUDGET = 0
MIN_THINKING_BUDGET = 1024
THINKING_MODELS = [
"claude-3-7-sonnet-20250219",
"claude-3-7-sonnet-latest",
"claude-opus-4-20250514",
"claude-opus-4-0",
"claude-sonnet-4-20250514",
"claude-sonnet-4-0",
]
THINKING_MODELS = ["claude-3-7-sonnet-20250219", "claude-3-7-sonnet-latest"]

View File

@@ -9,13 +9,11 @@ from anthropic import AsyncStream
from anthropic._types import NOT_GIVEN
from anthropic.types import (
InputJSONDelta,
MessageDeltaUsage,
MessageParam,
MessageStreamEvent,
RawContentBlockDeltaEvent,
RawContentBlockStartEvent,
RawContentBlockStopEvent,
RawMessageDeltaEvent,
RawMessageStartEvent,
RawMessageStopEvent,
RedactedThinkingBlock,
@@ -33,7 +31,6 @@ from anthropic.types import (
ToolResultBlockParam,
ToolUseBlock,
ToolUseBlockParam,
Usage,
)
from voluptuous_openapi import convert
@@ -165,8 +162,7 @@ def _convert_content(
return messages
async def _transform_stream( # noqa: C901 - This is complex, but better to have it in one place
chat_log: conversation.ChatLog,
async def _transform_stream(
result: AsyncStream[MessageStreamEvent],
messages: list[MessageParam],
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
@@ -211,7 +207,6 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
| None
) = None
current_tool_args: str
input_usage: Usage | None = None
async for response in result:
LOGGER.debug("Received response: %s", response)
@@ -220,7 +215,6 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
if response.message.role != "assistant":
raise ValueError("Unexpected message role")
current_message = MessageParam(role=response.message.role, content=[])
input_usage = response.message.usage
elif isinstance(response, RawContentBlockStartEvent):
if isinstance(response.content_block, ToolUseBlock):
current_block = ToolUseBlockParam(
@@ -291,36 +285,12 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
raise ValueError("Unexpected stop event without a current message")
current_message["content"].append(current_block) # type: ignore[union-attr]
current_block = None
elif isinstance(response, RawMessageDeltaEvent):
if (usage := response.usage) is not None:
chat_log.async_trace(_create_token_stats(input_usage, usage))
if response.delta.stop_reason == "refusal":
raise HomeAssistantError("Potential policy violation detected")
elif isinstance(response, RawMessageStopEvent):
if current_message is not None:
messages.append(current_message)
current_message = None
def _create_token_stats(
input_usage: Usage | None, response_usage: MessageDeltaUsage
) -> dict[str, Any]:
"""Create token stats for conversation agent tracing."""
input_tokens = 0
cached_input_tokens = 0
if input_usage:
input_tokens = input_usage.input_tokens
cached_input_tokens = input_usage.cache_creation_input_tokens or 0
output_tokens = response_usage.output_tokens
return {
"stats": {
"input_tokens": input_tokens,
"cached_input_tokens": cached_input_tokens,
"output_tokens": output_tokens,
}
}
class AnthropicConversationEntity(
conversation.ConversationEntity, conversation.AbstractConversationAgent
):
@@ -328,7 +298,6 @@ class AnthropicConversationEntity(
_attr_has_entity_name = True
_attr_name = None
_attr_supports_streaming = True
def __init__(self, entry: AnthropicConfigEntry) -> None:
"""Initialize the agent."""
@@ -424,8 +393,7 @@ class AnthropicConversationEntity(
[
content
async for content in chat_log.async_add_delta_content_stream(
user_input.agent_id,
_transform_stream(chat_log, stream, messages),
user_input.agent_id, _transform_stream(stream, messages)
)
if not isinstance(content, conversation.AssistantContent)
]

View File

@@ -8,5 +8,5 @@
"documentation": "https://www.home-assistant.io/integrations/anthropic",
"integration_type": "service",
"iot_class": "cloud_polling",
"requirements": ["anthropic==0.52.0"]
"requirements": ["anthropic==0.47.2"]
}

View File

@@ -46,7 +46,11 @@ class ConfigFlowHandler(ConfigFlow, domain=DOMAIN):
return self.async_show_form(step_id="user", data_schema=_SCHEMA)
host, port = user_input[CONF_HOST], user_input[CONF_PORT]
# Abort if an entry with same host and port is present.
self._async_abort_entries_match({CONF_HOST: host, CONF_PORT: port})
# Test the connection to the host and get the current status for serial number.
try:
async with asyncio.timeout(CONNECTION_TIMEOUT):
data = APCUPSdData(await aioapcaccess.request_status(host, port))
@@ -63,30 +67,3 @@ class ConfigFlowHandler(ConfigFlow, domain=DOMAIN):
title = data.name or data.model or data.serial_no or "APC UPS"
return self.async_create_entry(title=title, data=user_input)
async def async_step_reconfigure(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle reconfiguration of an existing entry."""
if user_input is None:
return self.async_show_form(step_id="reconfigure", data_schema=_SCHEMA)
host, port = user_input[CONF_HOST], user_input[CONF_PORT]
self._async_abort_entries_match({CONF_HOST: host, CONF_PORT: port})
try:
async with asyncio.timeout(CONNECTION_TIMEOUT):
data = APCUPSdData(await aioapcaccess.request_status(host, port))
except (OSError, asyncio.IncompleteReadError, TimeoutError):
errors = {"base": "cannot_connect"}
return self.async_show_form(
step_id="reconfigure", data_schema=_SCHEMA, errors=errors
)
await self.async_set_unique_id(data.serial_no)
self._abort_if_unique_id_mismatch(reason="wrong_apcupsd_daemon")
return self.async_update_reload_and_abort(
self._get_reconfigure_entry(),
data_updates=user_input,
)

View File

@@ -113,7 +113,4 @@ class APCUPSdCoordinator(DataUpdateCoordinator[APCUPSdData]):
data = await aioapcaccess.request_status(self._host, self._port)
return APCUPSdData(data)
except (OSError, asyncio.IncompleteReadError) as error:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="cannot_connect",
) from error
raise UpdateFailed(error) from error

View File

@@ -1,9 +1,7 @@
{
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"wrong_apcupsd_daemon": "The reconfigured APC UPS Daemon is not the same as the one already configured.",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
@@ -95,7 +93,7 @@
"name": "Internal temperature"
},
"last_self_test": {
"name": "Last self-test"
"name": "Last self test"
},
"last_transfer": {
"name": "Last transfer"
@@ -179,7 +177,7 @@
"name": "Restore requirement"
},
"self_test_result": {
"name": "Self-test result"
"name": "Self test result"
},
"sensitivity": {
"name": "Sensitivity"
@@ -197,7 +195,7 @@
"name": "Status"
},
"self_test_interval": {
"name": "Self-test interval"
"name": "Self test interval"
},
"time_left": {
"name": "Time left"
@@ -221,10 +219,5 @@
"name": "Transfer to battery"
}
}
},
"exceptions": {
"cannot_connect": {
"message": "Cannot connect to APC UPS Daemon."
}
}
}

View File

@@ -62,8 +62,6 @@ async def async_setup_entry(
target_humidity_key=Attribute.HUMIDIFICATION_SETPOINT,
min_humidity=10,
max_humidity=50,
auto_status_key=Attribute.HUMIDIFICATION_AVAILABLE,
auto_status_value=1,
default_humidity=30,
set_humidity_fn=coordinator.client.set_humidification_setpoint,
)
@@ -79,8 +77,6 @@ async def async_setup_entry(
action_map=DEHUMIDIFIER_ACTION_MAP,
current_humidity_key=Attribute.INDOOR_HUMIDITY_CONTROLLING_SENSOR_VALUE,
target_humidity_key=Attribute.DEHUMIDIFICATION_SETPOINT,
auto_status_key=None,
auto_status_value=None,
min_humidity=40,
max_humidity=90,
default_humidity=60,
@@ -104,8 +100,6 @@ class AprilaireHumidifierDescription(HumidifierEntityDescription):
target_humidity_key: str
min_humidity: int
max_humidity: int
auto_status_key: str | None
auto_status_value: int | None
default_humidity: int
set_humidity_fn: Callable[[int], Awaitable]
@@ -169,31 +163,14 @@ class AprilaireHumidifierEntity(BaseAprilaireEntity, HumidifierEntity):
def min_humidity(self) -> float:
"""Return the minimum humidity."""
if self.is_auto_humidity_mode():
return 1
return self.entity_description.min_humidity
@property
def max_humidity(self) -> float:
"""Return the maximum humidity."""
if self.is_auto_humidity_mode():
return 7
return self.entity_description.max_humidity
def is_auto_humidity_mode(self) -> bool:
"""Return whether the humidifier is in auto mode."""
if self.entity_description.auto_status_key is None:
return False
return (
self.coordinator.data.get(self.entity_description.auto_status_key)
== self.entity_description.auto_status_value
)
async def async_set_humidity(self, humidity: int) -> None:
"""Set the humidity."""

View File

@@ -7,5 +7,5 @@
"integration_type": "device",
"iot_class": "local_push",
"loggers": ["pyaprilaire"],
"requirements": ["pyaprilaire==0.9.0"]
"requirements": ["pyaprilaire==0.8.1"]
}

View File

@@ -6,6 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/apsystems",
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["APsystemsEZ1"],
"requirements": ["apsystems-ez1==2.6.0"]
"requirements": ["apsystems-ez1==2.5.0"]
}

View File

@@ -1,9 +1,6 @@
{
"entity": {
"sensor": {
"last_update": {
"default": "mdi:update"
},
"salt_left_side_percentage": {
"default": "mdi:basket-fill"
},

View File

@@ -4,7 +4,6 @@ from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from datetime import datetime
from aioaquacell import Softener
@@ -29,7 +28,7 @@ PARALLEL_UPDATES = 1
class SoftenerSensorEntityDescription(SensorEntityDescription):
"""Describes Softener sensor entity."""
value_fn: Callable[[Softener], StateType | datetime]
value_fn: Callable[[Softener], StateType]
SENSORS: tuple[SoftenerSensorEntityDescription, ...] = (
@@ -78,12 +77,6 @@ SENSORS: tuple[SoftenerSensorEntityDescription, ...] = (
"low",
],
),
SoftenerSensorEntityDescription(
key="last_update",
translation_key="last_update",
device_class=SensorDeviceClass.TIMESTAMP,
value_fn=lambda softener: softener.lastUpdate,
),
)
@@ -118,6 +111,6 @@ class SoftenerSensor(AquacellEntity, SensorEntity):
self.entity_description = description
@property
def native_value(self) -> StateType | datetime:
def native_value(self) -> StateType:
"""Return the state of the sensor."""
return self.entity_description.value_fn(self.softener)

View File

@@ -21,9 +21,6 @@
},
"entity": {
"sensor": {
"last_update": {
"name": "Last update"
},
"salt_left_side_percentage": {
"name": "Salt left side percentage"
},

View File

@@ -20,6 +20,9 @@ import hass_nabucasa
import voluptuous as vol
from homeassistant.components import conversation, stt, tts, wake_word, websocket_api
from homeassistant.components.tts import (
generate_media_source_id as tts_generate_media_source_id,
)
from homeassistant.const import ATTR_SUPPORTED_FEATURES, MATCH_ALL
from homeassistant.core import Context, HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
@@ -89,8 +92,6 @@ KEY_ASSIST_PIPELINE: HassKey[PipelineData] = HassKey(DOMAIN)
KEY_PIPELINE_CONVERSATION_DATA: HassKey[dict[str, PipelineConversationData]] = HassKey(
"pipeline_conversation_data"
)
# Number of response parts to handle before streaming the response
STREAM_RESPONSE_CHARS = 60
def validate_language(data: dict[str, Any]) -> Any:
@@ -554,7 +555,7 @@ class PipelineRun:
event_callback: PipelineEventCallback
language: str = None # type: ignore[assignment]
runner_data: Any | None = None
intent_agent: conversation.AgentInfo | None = None
intent_agent: str | None = None
tts_audio_output: str | dict[str, Any] | None = None
wake_word_settings: WakeWordSettings | None = None
audio_settings: AudioSettings = field(default_factory=AudioSettings)
@@ -590,9 +591,6 @@ class PipelineRun:
_intent_agent_only = False
"""If request should only be handled by agent, ignoring sentence triggers and local processing."""
_streamed_response_text = False
"""If the conversation agent streamed response text to TTS result."""
def __post_init__(self) -> None:
"""Set language for pipeline."""
self.language = self.pipeline.language or self.hass.config.language
@@ -654,11 +652,6 @@ class PipelineRun:
"token": self.tts_stream.token,
"url": self.tts_stream.url,
"mime_type": self.tts_stream.content_type,
"stream_response": (
self.tts_stream.supports_streaming_input
and self.intent_agent
and self.intent_agent.supports_streaming
),
}
self.process_event(PipelineEvent(PipelineEventType.RUN_START, data))
@@ -906,12 +899,12 @@ class PipelineRun:
) -> str:
"""Run speech-to-text portion of pipeline. Returns the spoken text."""
# Create a background task to prepare the conversation agent
if self.end_stage >= PipelineStage.INTENT and self.intent_agent:
if self.end_stage >= PipelineStage.INTENT:
self.hass.async_create_background_task(
conversation.async_prepare_agent(
self.hass, self.intent_agent.id, self.language
self.hass, self.intent_agent, self.language
),
f"prepare conversation agent {self.intent_agent.id}",
f"prepare conversation agent {self.intent_agent}",
)
if isinstance(self.stt_provider, stt.Provider):
@@ -1052,7 +1045,7 @@ class PipelineRun:
message=f"Intent recognition engine {engine} is not found",
)
self.intent_agent = agent_info
self.intent_agent = agent_info.id
async def recognize_intent(
self,
@@ -1085,7 +1078,7 @@ class PipelineRun:
PipelineEvent(
PipelineEventType.INTENT_START,
{
"engine": self.intent_agent.id,
"engine": self.intent_agent,
"language": input_language,
"intent_input": intent_input,
"conversation_id": conversation_id,
@@ -1102,11 +1095,11 @@ class PipelineRun:
conversation_id=conversation_id,
device_id=device_id,
language=input_language,
agent_id=self.intent_agent.id,
agent_id=self.intent_agent,
extra_system_prompt=conversation_extra_system_prompt,
)
agent_id = self.intent_agent.id
agent_id = self.intent_agent
processed_locally = agent_id == conversation.HOME_ASSISTANT_AGENT
intent_response: intent.IntentResponse | None = None
if not processed_locally and not self._intent_agent_only:
@@ -1128,7 +1121,7 @@ class PipelineRun:
# If the LLM has API access, we filter out some sentences that are
# interfering with LLM operation.
if (
intent_agent_state := self.hass.states.get(self.intent_agent.id)
intent_agent_state := self.hass.states.get(self.intent_agent)
) and intent_agent_state.attributes.get(
ATTR_SUPPORTED_FEATURES, 0
) & conversation.ConversationEntityFeature.CONTROL:
@@ -1150,13 +1143,6 @@ class PipelineRun:
agent_id = conversation.HOME_ASSISTANT_AGENT
processed_locally = True
if self.tts_stream and self.tts_stream.supports_streaming_input:
tts_input_stream: asyncio.Queue[str | None] | None = asyncio.Queue()
else:
tts_input_stream = None
chat_log_role = None
delta_character_count = 0
@callback
def chat_log_delta_listener(
chat_log: conversation.ChatLog, delta: dict
@@ -1170,61 +1156,6 @@ class PipelineRun:
},
)
)
if tts_input_stream is None:
return
nonlocal chat_log_role
if role := delta.get("role"):
chat_log_role = role
# We are only interested in assistant deltas
if chat_log_role != "assistant":
return
if content := delta.get("content"):
tts_input_stream.put_nowait(content)
if self._streamed_response_text:
return
nonlocal delta_character_count
# Streamed responses are not cached. That's why we only start streaming text after
# we have received enough characters that indicates it will be a long response
# or if we have received text, and then a tool call.
# Tool call after we already received text
start_streaming = delta_character_count > 0 and delta.get("tool_calls")
# Count characters in the content and test if we exceed streaming threshold
if not start_streaming and content:
delta_character_count += len(content)
start_streaming = delta_character_count > STREAM_RESPONSE_CHARS
if not start_streaming:
return
self._streamed_response_text = True
async def tts_input_stream_generator() -> AsyncGenerator[str]:
"""Yield TTS input stream."""
while (tts_input := await tts_input_stream.get()) is not None:
yield tts_input
# Concatenate all existing queue items
parts = []
while not tts_input_stream.empty():
parts.append(tts_input_stream.get_nowait())
tts_input_stream.put_nowait(
"".join(
# At this point parts is only strings, None indicates end of queue
cast(list[str], parts)
)
)
assert self.tts_stream is not None
self.tts_stream.async_set_message_stream(tts_input_stream_generator())
with (
chat_session.async_get_chat_session(
@@ -1268,8 +1199,6 @@ class PipelineRun:
speech = conversation_result.response.speech.get("plain", {}).get(
"speech", ""
)
if tts_input_stream and self._streamed_response_text:
tts_input_stream.put_nowait(None)
except Exception as src_error:
_LOGGER.exception("Unexpected error during intent recognition")
@@ -1347,11 +1276,26 @@ class PipelineRun:
)
)
if not self._streamed_response_text:
self.tts_stream.async_set_message(tts_input)
try:
# Synthesize audio and get URL
tts_media_id = tts_generate_media_source_id(
self.hass,
tts_input,
engine=self.tts_stream.engine,
language=self.tts_stream.language,
options=self.tts_stream.options,
)
except Exception as src_error:
_LOGGER.exception("Unexpected error during text-to-speech")
raise TextToSpeechError(
code="tts-failed",
message="Unexpected error during text-to-speech",
) from src_error
self.tts_stream.async_set_message(tts_input)
tts_output = {
"media_id": self.tts_stream.media_source_id,
"media_id": tts_media_id,
"token": self.tts_stream.token,
"url": self.tts_stream.url,
"mime_type": self.tts_stream.content_type,

View File

@@ -18,7 +18,7 @@
},
"step": {
"validation": {
"title": "Two-factor authentication",
"title": "Two factor authentication",
"data": {
"verification_code": "Verification code"
},

View File

@@ -4,8 +4,8 @@
"user": {
"description": "The inverter must be connected via an RS485 adaptor, please select serial port and the inverter's address as configured on the LCD panel",
"data": {
"port": "RS485 or USB-RS485 adaptor port",
"address": "Inverter address"
"port": "RS485 or USB-RS485 Adaptor Port",
"address": "Inverter Address"
}
}
},
@@ -16,7 +16,7 @@
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"no_serial_ports": "No com ports found. The integration needs a valid RS485 device to communicate."
"no_serial_ports": "No com ports found. Need a valid RS485 device to communicate."
}
},
"entity": {

View File

@@ -5,7 +5,7 @@
"step": {
"init": {
"title": "Set up two-factor authentication using TOTP",
"description": "To activate two-factor authentication using time-based one-time passwords, scan the QR code with your authentication app. If you don't have one, we recommend either [Google Authenticator](https://support.google.com/accounts/answer/1066447) or [Authy](https://authy.com/).\n\n{qr_code}\n\nAfter scanning the code, enter the six-digit code from your app to verify the setup. If you have problems scanning the QR code, do a manual setup with code **`{code}`**."
"description": "To activate two factor authentication using time-based one-time passwords, scan the QR code with your authentication app. If you don't have one, we recommend either [Google Authenticator](https://support.google.com/accounts/answer/1066447) or [Authy](https://authy.com/).\n\n{qr_code}\n\nAfter scanning the code, enter the six digit code from your app to verify the setup. If you have problems scanning the QR code, do a manual setup with code **`{code}`**."
}
},
"error": {
@@ -13,7 +13,7 @@
}
},
"notify": {
"title": "Notify one-time password",
"title": "Notify One-Time Password",
"step": {
"init": {
"title": "Set up one-time password delivered by notify component",

View File

@@ -18,7 +18,6 @@ from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_MODE,
ATTR_NAME,
CONF_ACTIONS,
CONF_ALIAS,
CONF_CONDITIONS,
CONF_DEVICE_ID,
@@ -28,7 +27,6 @@ from homeassistant.const import (
CONF_MODE,
CONF_PATH,
CONF_PLATFORM,
CONF_TRIGGERS,
CONF_VARIABLES,
CONF_ZONE,
EVENT_HOMEASSISTANT_STARTED,
@@ -88,9 +86,11 @@ from homeassistant.util.hass_dict import HassKey
from .config import AutomationConfig, ValidationStatus
from .const import (
CONF_ACTIONS,
CONF_INITIAL_STATE,
CONF_TRACE,
CONF_TRIGGER_VARIABLES,
CONF_TRIGGERS,
DEFAULT_INITIAL_STATE,
DOMAIN,
LOGGER,

View File

@@ -14,15 +14,11 @@ from homeassistant.components import blueprint
from homeassistant.components.trace import TRACE_CONFIG_SCHEMA
from homeassistant.config import config_per_platform, config_without_domain
from homeassistant.const import (
CONF_ACTION,
CONF_ACTIONS,
CONF_ALIAS,
CONF_CONDITION,
CONF_CONDITIONS,
CONF_DESCRIPTION,
CONF_ID,
CONF_TRIGGER,
CONF_TRIGGERS,
CONF_VARIABLES,
)
from homeassistant.core import HomeAssistant
@@ -34,10 +30,14 @@ from homeassistant.helpers.typing import ConfigType
from homeassistant.util.yaml.input import UndefinedSubstitution
from .const import (
CONF_ACTION,
CONF_ACTIONS,
CONF_HIDE_ENTITY,
CONF_INITIAL_STATE,
CONF_TRACE,
CONF_TRIGGER,
CONF_TRIGGER_VARIABLES,
CONF_TRIGGERS,
DOMAIN,
LOGGER,
)
@@ -58,9 +58,34 @@ _MINIMAL_PLATFORM_SCHEMA = vol.Schema(
def _backward_compat_schema(value: Any | None) -> Any:
"""Backward compatibility for automations."""
value = cv.renamed(CONF_TRIGGER, CONF_TRIGGERS)(value)
value = cv.renamed(CONF_ACTION, CONF_ACTIONS)(value)
return cv.renamed(CONF_CONDITION, CONF_CONDITIONS)(value)
if not isinstance(value, dict):
return value
# `trigger` has been renamed to `triggers`
if CONF_TRIGGER in value:
if CONF_TRIGGERS in value:
raise vol.Invalid(
"Cannot specify both 'trigger' and 'triggers'. Please use 'triggers' only."
)
value[CONF_TRIGGERS] = value.pop(CONF_TRIGGER)
# `condition` has been renamed to `conditions`
if CONF_CONDITION in value:
if CONF_CONDITIONS in value:
raise vol.Invalid(
"Cannot specify both 'condition' and 'conditions'. Please use 'conditions' only."
)
value[CONF_CONDITIONS] = value.pop(CONF_CONDITION)
# `action` has been renamed to `actions`
if CONF_ACTION in value:
if CONF_ACTIONS in value:
raise vol.Invalid(
"Cannot specify both 'action' and 'actions'. Please use 'actions' only."
)
value[CONF_ACTIONS] = value.pop(CONF_ACTION)
return value
PLATFORM_SCHEMA = vol.All(

View File

@@ -2,6 +2,10 @@
import logging
CONF_ACTION = "action"
CONF_ACTIONS = "actions"
CONF_TRIGGER = "trigger"
CONF_TRIGGERS = "triggers"
CONF_TRIGGER_VARIABLES = "trigger_variables"
DOMAIN = "automation"

View File

@@ -1,82 +0,0 @@
"""The AWS S3 integration."""
from __future__ import annotations
import logging
from typing import cast
from aiobotocore.client import AioBaseClient as S3Client
from aiobotocore.session import AioSession
from botocore.exceptions import ClientError, ConnectionError, ParamValidationError
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
from .const import (
CONF_ACCESS_KEY_ID,
CONF_BUCKET,
CONF_ENDPOINT_URL,
CONF_SECRET_ACCESS_KEY,
DATA_BACKUP_AGENT_LISTENERS,
DOMAIN,
)
type S3ConfigEntry = ConfigEntry[S3Client]
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass: HomeAssistant, entry: S3ConfigEntry) -> bool:
"""Set up S3 from a config entry."""
data = cast(dict, entry.data)
try:
session = AioSession()
# pylint: disable-next=unnecessary-dunder-call
client = await session.create_client(
"s3",
endpoint_url=data.get(CONF_ENDPOINT_URL),
aws_secret_access_key=data[CONF_SECRET_ACCESS_KEY],
aws_access_key_id=data[CONF_ACCESS_KEY_ID],
).__aenter__()
await client.head_bucket(Bucket=data[CONF_BUCKET])
except ClientError as err:
raise ConfigEntryError(
translation_domain=DOMAIN,
translation_key="invalid_credentials",
) from err
except ParamValidationError as err:
if "Invalid bucket name" in str(err):
raise ConfigEntryError(
translation_domain=DOMAIN,
translation_key="invalid_bucket_name",
) from err
except ValueError as err:
raise ConfigEntryError(
translation_domain=DOMAIN,
translation_key="invalid_endpoint_url",
) from err
except ConnectionError as err:
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="cannot_connect",
) from err
entry.runtime_data = client
def notify_backup_listeners() -> None:
for listener in hass.data.get(DATA_BACKUP_AGENT_LISTENERS, []):
listener()
entry.async_on_unload(entry.async_on_state_change(notify_backup_listeners))
return True
async def async_unload_entry(hass: HomeAssistant, entry: S3ConfigEntry) -> bool:
"""Unload a config entry."""
client = entry.runtime_data
await client.__aexit__(None, None, None)
return True

View File

@@ -1,330 +0,0 @@
"""Backup platform for the AWS S3 integration."""
from collections.abc import AsyncIterator, Callable, Coroutine
import functools
import json
import logging
from time import time
from typing import Any
from botocore.exceptions import BotoCoreError
from homeassistant.components.backup import (
AgentBackup,
BackupAgent,
BackupAgentError,
BackupNotFound,
suggested_filename,
)
from homeassistant.core import HomeAssistant, callback
from . import S3ConfigEntry
from .const import CONF_BUCKET, DATA_BACKUP_AGENT_LISTENERS, DOMAIN
_LOGGER = logging.getLogger(__name__)
CACHE_TTL = 300
# S3 part size requirements: 5 MiB to 5 GiB per part
# https://docs.aws.amazon.com/AmazonS3/latest/userguide/qfacts.html
# We set the threshold to 20 MiB to avoid too many parts.
# Note that each part is allocated in the memory.
MULTIPART_MIN_PART_SIZE_BYTES = 20 * 2**20
def handle_boto_errors[T](
func: Callable[..., Coroutine[Any, Any, T]],
) -> Callable[..., Coroutine[Any, Any, T]]:
"""Handle BotoCoreError exceptions by converting them to BackupAgentError."""
@functools.wraps(func)
async def wrapper(*args: Any, **kwargs: Any) -> T:
"""Catch BotoCoreError and raise BackupAgentError."""
try:
return await func(*args, **kwargs)
except BotoCoreError as err:
error_msg = f"Failed during {func.__name__}"
raise BackupAgentError(error_msg) from err
return wrapper
async def async_get_backup_agents(
hass: HomeAssistant,
) -> list[BackupAgent]:
"""Return a list of backup agents."""
entries: list[S3ConfigEntry] = hass.config_entries.async_loaded_entries(DOMAIN)
return [S3BackupAgent(hass, entry) for entry in entries]
@callback
def async_register_backup_agents_listener(
hass: HomeAssistant,
*,
listener: Callable[[], None],
**kwargs: Any,
) -> Callable[[], None]:
"""Register a listener to be called when agents are added or removed.
:return: A function to unregister the listener.
"""
hass.data.setdefault(DATA_BACKUP_AGENT_LISTENERS, []).append(listener)
@callback
def remove_listener() -> None:
"""Remove the listener."""
hass.data[DATA_BACKUP_AGENT_LISTENERS].remove(listener)
if not hass.data[DATA_BACKUP_AGENT_LISTENERS]:
del hass.data[DATA_BACKUP_AGENT_LISTENERS]
return remove_listener
def suggested_filenames(backup: AgentBackup) -> tuple[str, str]:
"""Return the suggested filenames for the backup and metadata files."""
base_name = suggested_filename(backup).rsplit(".", 1)[0]
return f"{base_name}.tar", f"{base_name}.metadata.json"
class S3BackupAgent(BackupAgent):
"""Backup agent for the S3 integration."""
domain = DOMAIN
def __init__(self, hass: HomeAssistant, entry: S3ConfigEntry) -> None:
"""Initialize the S3 agent."""
super().__init__()
self._client = entry.runtime_data
self._bucket: str = entry.data[CONF_BUCKET]
self.name = entry.title
self.unique_id = entry.entry_id
self._backup_cache: dict[str, AgentBackup] = {}
self._cache_expiration = time()
@handle_boto_errors
async def async_download_backup(
self,
backup_id: str,
**kwargs: Any,
) -> AsyncIterator[bytes]:
"""Download a backup file.
:param backup_id: The ID of the backup that was returned in async_list_backups.
:return: An async iterator that yields bytes.
"""
backup = await self._find_backup_by_id(backup_id)
tar_filename, _ = suggested_filenames(backup)
response = await self._client.get_object(Bucket=self._bucket, Key=tar_filename)
return response["Body"].iter_chunks()
async def async_upload_backup(
self,
*,
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
backup: AgentBackup,
**kwargs: Any,
) -> None:
"""Upload a backup.
:param open_stream: A function returning an async iterator that yields bytes.
:param backup: Metadata about the backup that should be uploaded.
"""
tar_filename, metadata_filename = suggested_filenames(backup)
try:
if backup.size < MULTIPART_MIN_PART_SIZE_BYTES:
await self._upload_simple(tar_filename, open_stream)
else:
await self._upload_multipart(tar_filename, open_stream)
# Upload the metadata file
metadata_content = json.dumps(backup.as_dict())
await self._client.put_object(
Bucket=self._bucket,
Key=metadata_filename,
Body=metadata_content,
)
except BotoCoreError as err:
raise BackupAgentError("Failed to upload backup") from err
else:
# Reset cache after successful upload
self._cache_expiration = time()
async def _upload_simple(
self,
tar_filename: str,
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
) -> None:
"""Upload a small file using simple upload.
:param tar_filename: The target filename for the backup.
:param open_stream: A function returning an async iterator that yields bytes.
"""
_LOGGER.debug("Starting simple upload for %s", tar_filename)
stream = await open_stream()
file_data = bytearray()
async for chunk in stream:
file_data.extend(chunk)
await self._client.put_object(
Bucket=self._bucket,
Key=tar_filename,
Body=bytes(file_data),
)
async def _upload_multipart(
self,
tar_filename: str,
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
):
"""Upload a large file using multipart upload.
:param tar_filename: The target filename for the backup.
:param open_stream: A function returning an async iterator that yields bytes.
"""
_LOGGER.debug("Starting multipart upload for %s", tar_filename)
multipart_upload = await self._client.create_multipart_upload(
Bucket=self._bucket,
Key=tar_filename,
)
upload_id = multipart_upload["UploadId"]
try:
parts = []
part_number = 1
buffer_size = 0 # bytes
buffer: list[bytes] = []
stream = await open_stream()
async for chunk in stream:
buffer_size += len(chunk)
buffer.append(chunk)
# If buffer size meets minimum part size, upload it as a part
if buffer_size >= MULTIPART_MIN_PART_SIZE_BYTES:
_LOGGER.debug(
"Uploading part number %d, size %d", part_number, buffer_size
)
part = await self._client.upload_part(
Bucket=self._bucket,
Key=tar_filename,
PartNumber=part_number,
UploadId=upload_id,
Body=b"".join(buffer),
)
parts.append({"PartNumber": part_number, "ETag": part["ETag"]})
part_number += 1
buffer_size = 0
buffer = []
# Upload the final buffer as the last part (no minimum size requirement)
if buffer:
_LOGGER.debug(
"Uploading final part number %d, size %d", part_number, buffer_size
)
part = await self._client.upload_part(
Bucket=self._bucket,
Key=tar_filename,
PartNumber=part_number,
UploadId=upload_id,
Body=b"".join(buffer),
)
parts.append({"PartNumber": part_number, "ETag": part["ETag"]})
await self._client.complete_multipart_upload(
Bucket=self._bucket,
Key=tar_filename,
UploadId=upload_id,
MultipartUpload={"Parts": parts},
)
except BotoCoreError:
try:
await self._client.abort_multipart_upload(
Bucket=self._bucket,
Key=tar_filename,
UploadId=upload_id,
)
except BotoCoreError:
_LOGGER.exception("Failed to abort multipart upload")
raise
@handle_boto_errors
async def async_delete_backup(
self,
backup_id: str,
**kwargs: Any,
) -> None:
"""Delete a backup file.
:param backup_id: The ID of the backup that was returned in async_list_backups.
"""
backup = await self._find_backup_by_id(backup_id)
tar_filename, metadata_filename = suggested_filenames(backup)
# Delete both the backup file and its metadata file
await self._client.delete_object(Bucket=self._bucket, Key=tar_filename)
await self._client.delete_object(Bucket=self._bucket, Key=metadata_filename)
# Reset cache after successful deletion
self._cache_expiration = time()
@handle_boto_errors
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
"""List backups."""
backups = await self._list_backups()
return list(backups.values())
@handle_boto_errors
async def async_get_backup(
self,
backup_id: str,
**kwargs: Any,
) -> AgentBackup:
"""Return a backup."""
return await self._find_backup_by_id(backup_id)
async def _find_backup_by_id(self, backup_id: str) -> AgentBackup:
"""Find a backup by its backup ID."""
backups = await self._list_backups()
if backup := backups.get(backup_id):
return backup
raise BackupNotFound(f"Backup {backup_id} not found")
async def _list_backups(self) -> dict[str, AgentBackup]:
"""List backups, using a cache if possible."""
if time() <= self._cache_expiration:
return self._backup_cache
backups = {}
response = await self._client.list_objects_v2(Bucket=self._bucket)
# Filter for metadata files only
metadata_files = [
obj
for obj in response.get("Contents", [])
if obj["Key"].endswith(".metadata.json")
]
for metadata_file in metadata_files:
try:
# Download and parse metadata file
metadata_response = await self._client.get_object(
Bucket=self._bucket, Key=metadata_file["Key"]
)
metadata_content = await metadata_response["Body"].read()
metadata_json = json.loads(metadata_content)
except (BotoCoreError, json.JSONDecodeError) as err:
_LOGGER.warning(
"Failed to process metadata file %s: %s",
metadata_file["Key"],
err,
)
continue
backup = AgentBackup.from_dict(metadata_json)
backups[backup.backup_id] = backup
self._backup_cache = backups
self._cache_expiration = time() + CACHE_TTL
return self._backup_cache

View File

@@ -1,101 +0,0 @@
"""Config flow for the AWS S3 integration."""
from __future__ import annotations
from typing import Any
from urllib.parse import urlparse
from aiobotocore.session import AioSession
from botocore.exceptions import ClientError, ConnectionError, ParamValidationError
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.selector import (
TextSelector,
TextSelectorConfig,
TextSelectorType,
)
from .const import (
AWS_DOMAIN,
CONF_ACCESS_KEY_ID,
CONF_BUCKET,
CONF_ENDPOINT_URL,
CONF_SECRET_ACCESS_KEY,
DEFAULT_ENDPOINT_URL,
DESCRIPTION_AWS_S3_DOCS_URL,
DESCRIPTION_BOTO3_DOCS_URL,
DOMAIN,
)
STEP_USER_DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_ACCESS_KEY_ID): cv.string,
vol.Required(CONF_SECRET_ACCESS_KEY): TextSelector(
config=TextSelectorConfig(type=TextSelectorType.PASSWORD)
),
vol.Required(CONF_BUCKET): cv.string,
vol.Required(CONF_ENDPOINT_URL, default=DEFAULT_ENDPOINT_URL): TextSelector(
config=TextSelectorConfig(type=TextSelectorType.URL)
),
}
)
class S3ConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow."""
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle a flow initiated by the user."""
errors: dict[str, str] = {}
if user_input is not None:
self._async_abort_entries_match(
{
CONF_BUCKET: user_input[CONF_BUCKET],
CONF_ENDPOINT_URL: user_input[CONF_ENDPOINT_URL],
}
)
if not urlparse(user_input[CONF_ENDPOINT_URL]).hostname.endswith(
AWS_DOMAIN
):
errors[CONF_ENDPOINT_URL] = "invalid_endpoint_url"
else:
try:
session = AioSession()
async with session.create_client(
"s3",
endpoint_url=user_input.get(CONF_ENDPOINT_URL),
aws_secret_access_key=user_input[CONF_SECRET_ACCESS_KEY],
aws_access_key_id=user_input[CONF_ACCESS_KEY_ID],
) as client:
await client.head_bucket(Bucket=user_input[CONF_BUCKET])
except ClientError:
errors["base"] = "invalid_credentials"
except ParamValidationError as err:
if "Invalid bucket name" in str(err):
errors[CONF_BUCKET] = "invalid_bucket_name"
except ValueError:
errors[CONF_ENDPOINT_URL] = "invalid_endpoint_url"
except ConnectionError:
errors[CONF_ENDPOINT_URL] = "cannot_connect"
else:
return self.async_create_entry(
title=user_input[CONF_BUCKET], data=user_input
)
return self.async_show_form(
step_id="user",
data_schema=self.add_suggested_values_to_schema(
STEP_USER_DATA_SCHEMA, user_input
),
errors=errors,
description_placeholders={
"aws_s3_docs_url": DESCRIPTION_AWS_S3_DOCS_URL,
"boto3_docs_url": DESCRIPTION_BOTO3_DOCS_URL,
},
)

View File

@@ -1,23 +0,0 @@
"""Constants for the AWS S3 integration."""
from collections.abc import Callable
from typing import Final
from homeassistant.util.hass_dict import HassKey
DOMAIN: Final = "aws_s3"
CONF_ACCESS_KEY_ID = "access_key_id"
CONF_SECRET_ACCESS_KEY = "secret_access_key"
CONF_ENDPOINT_URL = "endpoint_url"
CONF_BUCKET = "bucket"
AWS_DOMAIN = "amazonaws.com"
DEFAULT_ENDPOINT_URL = f"https://s3.eu-central-1.{AWS_DOMAIN}/"
DATA_BACKUP_AGENT_LISTENERS: HassKey[list[Callable[[], None]]] = HassKey(
f"{DOMAIN}.backup_agent_listeners"
)
DESCRIPTION_AWS_S3_DOCS_URL = "https://docs.aws.amazon.com/general/latest/gr/s3.html"
DESCRIPTION_BOTO3_DOCS_URL = "https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html"

View File

@@ -1,12 +0,0 @@
{
"domain": "aws_s3",
"name": "AWS S3",
"codeowners": ["@tomasbedrich"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/aws_s3",
"integration_type": "service",
"iot_class": "cloud_push",
"loggers": ["aiobotocore"],
"quality_scale": "bronze",
"requirements": ["aiobotocore==2.21.1"]
}

View File

@@ -1,112 +0,0 @@
rules:
# Bronze
action-setup:
status: exempt
comment: Integration does not register custom actions.
appropriate-polling:
status: exempt
comment: This integration does not poll.
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions:
status: exempt
comment: This integration does not have any custom actions.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup:
status: exempt
comment: Entities of this integration does not explicitly subscribe to events.
entity-unique-id:
status: exempt
comment: This integration does not have entities.
has-entity-name:
status: exempt
comment: This integration does not have entities.
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions:
status: exempt
comment: Integration does not register custom actions.
config-entry-unloading: done
docs-configuration-parameters:
status: exempt
comment: This integration does not have an options flow.
docs-installation-parameters: done
entity-unavailable:
status: exempt
comment: This integration does not have entities.
integration-owner: done
log-when-unavailable: todo
parallel-updates:
status: exempt
comment: This integration does not poll.
reauthentication-flow: todo
test-coverage: done
# Gold
devices:
status: exempt
comment: This integration does not have entities.
diagnostics: todo
discovery-update-info:
status: exempt
comment: S3 is a cloud service that is not discovered on the network.
discovery:
status: exempt
comment: S3 is a cloud service that is not discovered on the network.
docs-data-update:
status: exempt
comment: This integration does not poll.
docs-examples:
status: exempt
comment: The integration extends core functionality and does not require examples.
docs-known-limitations:
status: exempt
comment: No known limitations.
docs-supported-devices:
status: exempt
comment: This integration does not support physical devices.
docs-supported-functions: done
docs-troubleshooting:
status: exempt
comment: There are no more detailed troubleshooting instructions available than what is already included in strings.json.
docs-use-cases: done
dynamic-devices:
status: exempt
comment: This integration does not have devices.
entity-category:
status: exempt
comment: This integration does not have entities.
entity-device-class:
status: exempt
comment: This integration does not have entities.
entity-disabled-by-default:
status: exempt
comment: This integration does not have entities.
entity-translations:
status: exempt
comment: This integration does not have entities.
exception-translations: done
icon-translations:
status: exempt
comment: This integration does not use icons.
reconfiguration-flow: todo
repair-issues:
status: exempt
comment: There are no issues which can be repaired.
stale-devices:
status: exempt
comment: This integration does not have devices.
# Platinum
async-dependency: done
inject-websession: todo
strict-typing: todo

View File

@@ -1,41 +0,0 @@
{
"config": {
"step": {
"user": {
"data": {
"access_key_id": "Access key ID",
"secret_access_key": "Secret access key",
"bucket": "Bucket name",
"endpoint_url": "Endpoint URL"
},
"data_description": {
"access_key_id": "Access key ID to connect to AWS S3 API",
"secret_access_key": "Secret access key to connect to AWS S3 API",
"bucket": "Bucket must already exist and be writable by the provided credentials.",
"endpoint_url": "Endpoint URL provided to [Boto3 Session]({boto3_docs_url}). Region-specific [AWS S3 endpoints]({aws_s3_docs_url}) are available in their docs."
},
"title": "Add AWS S3 bucket"
}
},
"error": {
"cannot_connect": "[%key:component::aws_s3::exceptions::cannot_connect::message%]",
"invalid_bucket_name": "[%key:component::aws_s3::exceptions::invalid_bucket_name::message%]",
"invalid_credentials": "[%key:component::aws_s3::exceptions::invalid_credentials::message%]",
"invalid_endpoint_url": "Invalid endpoint URL. Please make sure it's a valid AWS S3 endpoint URL."
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
}
},
"exceptions": {
"cannot_connect": {
"message": "Cannot connect to endpoint"
},
"invalid_bucket_name": {
"message": "Invalid bucket name"
},
"invalid_credentials": {
"message": "Bucket cannot be accessed using provided combination of access key ID and secret access key."
}
}
}

View File

@@ -47,7 +47,7 @@ from .const import (
CONF_VIDEO_SOURCE,
DEFAULT_STREAM_PROFILE,
DEFAULT_VIDEO_SOURCE,
DOMAIN,
DOMAIN as AXIS_DOMAIN,
)
from .errors import AuthenticationRequired, CannotConnect
from .hub import AxisHub, get_axis_api
@@ -58,7 +58,7 @@ DEFAULT_PROTOCOL = "https"
PROTOCOL_CHOICES = ["https", "http"]
class AxisFlowHandler(ConfigFlow, domain=DOMAIN):
class AxisFlowHandler(ConfigFlow, domain=AXIS_DOMAIN):
"""Handle a Axis config flow."""
VERSION = 3
@@ -146,7 +146,7 @@ class AxisFlowHandler(ConfigFlow, domain=DOMAIN):
model = self.config[CONF_MODEL]
same_model = [
entry.data[CONF_NAME]
for entry in self.hass.config_entries.async_entries(DOMAIN)
for entry in self.hass.config_entries.async_entries(AXIS_DOMAIN)
if entry.source != SOURCE_IGNORE and entry.data[CONF_MODEL] == model
]

View File

@@ -14,7 +14,7 @@ from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity, EntityDescription
from .const import DOMAIN
from .const import DOMAIN as AXIS_DOMAIN
if TYPE_CHECKING:
from .hub import AxisHub
@@ -61,7 +61,7 @@ class AxisEntity(Entity):
self.hub = hub
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, hub.unique_id)},
identifiers={(AXIS_DOMAIN, hub.unique_id)},
serial_number=hub.unique_id,
)

View File

@@ -2,8 +2,8 @@
from aiohttp import ClientTimeout
from azure.core.exceptions import (
AzureError,
ClientAuthenticationError,
HttpResponseError,
ResourceNotFoundError,
)
from azure.core.pipeline.transport._aiohttp import (
@@ -39,20 +39,11 @@ async def async_setup_entry(
session = async_create_clientsession(
hass, timeout=ClientTimeout(connect=10, total=12 * 60 * 60)
)
def create_container_client() -> ContainerClient:
"""Create a ContainerClient."""
return ContainerClient(
account_url=f"https://{entry.data[CONF_ACCOUNT_NAME]}.blob.core.windows.net/",
container_name=entry.data[CONF_CONTAINER_NAME],
credential=entry.data[CONF_STORAGE_ACCOUNT_KEY],
transport=AioHttpTransport(session=session),
)
# has a blocking call to open in cpython
container_client: ContainerClient = await hass.async_add_executor_job(
create_container_client
container_client = ContainerClient(
account_url=f"https://{entry.data[CONF_ACCOUNT_NAME]}.blob.core.windows.net/",
container_name=entry.data[CONF_CONTAINER_NAME],
credential=entry.data[CONF_STORAGE_ACCOUNT_KEY],
transport=AioHttpTransport(session=session),
)
try:
@@ -70,7 +61,7 @@ async def async_setup_entry(
translation_key="invalid_auth",
translation_placeholders={CONF_ACCOUNT_NAME: entry.data[CONF_ACCOUNT_NAME]},
) from err
except AzureError as err:
except HttpResponseError as err:
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="cannot_connect",

View File

@@ -8,7 +8,7 @@ import json
import logging
from typing import Any, Concatenate
from azure.core.exceptions import AzureError, HttpResponseError, ServiceRequestError
from azure.core.exceptions import HttpResponseError
from azure.storage.blob import BlobProperties
from homeassistant.components.backup import (
@@ -80,20 +80,6 @@ def handle_backup_errors[_R, **P](
f"Error during backup operation in {func.__name__}:"
f" Status {err.status_code}, message: {err.message}"
) from err
except ServiceRequestError as err:
raise BackupAgentError(
f"Timeout during backup operation in {func.__name__}"
) from err
except AzureError as err:
_LOGGER.debug(
"Error during backup in %s: %s",
func.__name__,
err,
exc_info=True,
)
raise BackupAgentError(
f"Error during backup operation in {func.__name__}: {err}"
) from err
return wrapper

View File

@@ -27,25 +27,9 @@ _LOGGER = logging.getLogger(__name__)
class AzureStorageConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for azure storage."""
async def get_container_client(
self, account_name: str, container_name: str, storage_account_key: str
) -> ContainerClient:
"""Get the container client.
ContainerClient has a blocking call to open in cpython
"""
session = async_get_clientsession(self.hass)
def create_container_client() -> ContainerClient:
return ContainerClient(
account_url=f"https://{account_name}.blob.core.windows.net/",
container_name=container_name,
credential=storage_account_key,
transport=AioHttpTransport(session=session),
)
return await self.hass.async_add_executor_job(create_container_client)
def get_account_url(self, account_name: str) -> str:
"""Get the account URL."""
return f"https://{account_name}.blob.core.windows.net/"
async def validate_config(
self, container_client: ContainerClient
@@ -74,10 +58,11 @@ class AzureStorageConfigFlow(ConfigFlow, domain=DOMAIN):
self._async_abort_entries_match(
{CONF_ACCOUNT_NAME: user_input[CONF_ACCOUNT_NAME]}
)
container_client = await self.get_container_client(
account_name=user_input[CONF_ACCOUNT_NAME],
container_client = ContainerClient(
account_url=self.get_account_url(user_input[CONF_ACCOUNT_NAME]),
container_name=user_input[CONF_CONTAINER_NAME],
storage_account_key=user_input[CONF_STORAGE_ACCOUNT_KEY],
credential=user_input[CONF_STORAGE_ACCOUNT_KEY],
transport=AioHttpTransport(session=async_get_clientsession(self.hass)),
)
errors = await self.validate_config(container_client)
@@ -114,12 +99,12 @@ class AzureStorageConfigFlow(ConfigFlow, domain=DOMAIN):
reauth_entry = self._get_reauth_entry()
if user_input is not None:
container_client = await self.get_container_client(
account_name=reauth_entry.data[CONF_ACCOUNT_NAME],
container_client = ContainerClient(
account_url=self.get_account_url(reauth_entry.data[CONF_ACCOUNT_NAME]),
container_name=reauth_entry.data[CONF_CONTAINER_NAME],
storage_account_key=user_input[CONF_STORAGE_ACCOUNT_KEY],
credential=user_input[CONF_STORAGE_ACCOUNT_KEY],
transport=AioHttpTransport(session=async_get_clientsession(self.hass)),
)
errors = await self.validate_config(container_client)
if not errors:
return self.async_update_reload_and_abort(
@@ -144,10 +129,13 @@ class AzureStorageConfigFlow(ConfigFlow, domain=DOMAIN):
reconfigure_entry = self._get_reconfigure_entry()
if user_input is not None:
container_client = await self.get_container_client(
account_name=reconfigure_entry.data[CONF_ACCOUNT_NAME],
container_client = ContainerClient(
account_url=self.get_account_url(
reconfigure_entry.data[CONF_ACCOUNT_NAME]
),
container_name=user_input[CONF_CONTAINER_NAME],
storage_account_key=user_input[CONF_STORAGE_ACCOUNT_KEY],
credential=user_input[CONF_STORAGE_ACCOUNT_KEY],
transport=AioHttpTransport(session=async_get_clientsession(self.hass)),
)
errors = await self.validate_config(container_client)
if not errors:

View File

@@ -2,9 +2,9 @@
from homeassistant.config_entries import SOURCE_SYSTEM
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv, discovery_flow
from homeassistant.helpers.backup import DATA_BACKUP
from homeassistant.helpers.hassio import is_hassio
from homeassistant.helpers.typing import ConfigType
@@ -23,7 +23,6 @@ from .const import DATA_MANAGER, DOMAIN
from .coordinator import BackupConfigEntry, BackupDataUpdateCoordinator
from .http import async_register_http_views
from .manager import (
AddonErrorData,
BackupManager,
BackupManagerError,
BackupPlatformEvent,
@@ -37,7 +36,6 @@ from .manager import (
IdleEvent,
IncorrectPasswordError,
ManagerBackup,
ManagerStateEvent,
NewBackup,
RestoreBackupEvent,
RestoreBackupStage,
@@ -49,7 +47,6 @@ from .util import suggested_filename, suggested_filename_from_name_date
from .websocket import async_register_websocket_handlers
__all__ = [
"AddonErrorData",
"AddonInfo",
"AgentBackup",
"BackupAgent",
@@ -71,17 +68,17 @@ __all__ = [
"IncorrectPasswordError",
"LocalBackupAgent",
"ManagerBackup",
"ManagerStateEvent",
"NewBackup",
"RestoreBackupEvent",
"RestoreBackupStage",
"RestoreBackupState",
"WrittenBackup",
"async_get_manager",
"suggested_filename",
"suggested_filename_from_name_date",
]
PLATFORMS = [Platform.EVENT, Platform.SENSOR]
PLATFORMS = [Platform.SENSOR]
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
@@ -101,13 +98,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
backup_manager = BackupManager(hass, reader_writer)
hass.data[DATA_MANAGER] = backup_manager
try:
await backup_manager.async_setup()
except Exception as err:
hass.data[DATA_BACKUP].manager_ready.set_exception(err)
raise
else:
hass.data[DATA_BACKUP].manager_ready.set_result(None)
await backup_manager.async_setup()
async_register_websocket_handlers(hass, with_hassio)
@@ -162,3 +153,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: BackupConfigEntry) -> bo
async def async_unload_entry(hass: HomeAssistant, entry: BackupConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
@callback
def async_get_manager(hass: HomeAssistant) -> BackupManager:
"""Get the backup manager instance.
Raises HomeAssistantError if the backup integration is not available.
"""
if DATA_MANAGER not in hass.data:
raise HomeAssistantError("Backup integration is not available")
return hass.data[DATA_MANAGER]

View File

@@ -1,38 +0,0 @@
"""Websocket commands for the Backup integration."""
from typing import Any
import voluptuous as vol
from homeassistant.components import websocket_api
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.backup import async_subscribe_events
from .const import DATA_MANAGER
from .manager import ManagerStateEvent
@callback
def async_register_websocket_handlers(hass: HomeAssistant) -> None:
"""Register websocket commands."""
websocket_api.async_register_command(hass, handle_subscribe_events)
@websocket_api.require_admin
@websocket_api.websocket_command({vol.Required("type"): "backup/subscribe_events"})
@websocket_api.async_response
async def handle_subscribe_events(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""Subscribe to backup events."""
def on_event(event: ManagerStateEvent) -> None:
connection.send_message(websocket_api.event_message(msg["id"], event))
if DATA_MANAGER in hass.data:
manager = hass.data[DATA_MANAGER]
on_event(manager.last_event)
connection.subscriptions[msg["id"]] = async_subscribe_events(hass, on_event)
connection.send_result(msg["id"])

View File

@@ -2,7 +2,6 @@
from __future__ import annotations
from collections import defaultdict
from dataclasses import dataclass, field, replace
import datetime as dt
from datetime import datetime, timedelta
@@ -88,26 +87,12 @@ class BackupConfigData:
else:
time = None
days = [Day(day) for day in data["schedule"]["days"]]
agents = {}
for agent_id, agent_data in data["agents"].items():
protected = agent_data["protected"]
stored_retention = agent_data["retention"]
agent_retention: AgentRetentionConfig | None
if stored_retention:
agent_retention = AgentRetentionConfig(
copies=stored_retention["copies"],
days=stored_retention["days"],
)
else:
agent_retention = None
agent_config = AgentConfig(
protected=protected,
retention=agent_retention,
)
agents[agent_id] = agent_config
return cls(
agents=agents,
agents={
agent_id: AgentConfig(protected=agent_data["protected"])
for agent_id, agent_data in data["agents"].items()
},
automatic_backups_configured=data["automatic_backups_configured"],
create_backup=CreateBackupConfig(
agent_ids=data["create_backup"]["agent_ids"],
@@ -191,36 +176,12 @@ class BackupConfig:
"""Update config."""
if agents is not UNDEFINED:
for agent_id, agent_config in agents.items():
agent_retention = agent_config.get("retention")
if agent_retention is None:
new_agent_retention = None
else:
new_agent_retention = AgentRetentionConfig(
copies=agent_retention.get("copies"),
days=agent_retention.get("days"),
)
if agent_id not in self.data.agents:
old_agent_retention = None
self.data.agents[agent_id] = AgentConfig(
protected=agent_config.get("protected", True),
retention=new_agent_retention,
)
self.data.agents[agent_id] = AgentConfig(**agent_config)
else:
new_agent_config = self.data.agents[agent_id]
old_agent_retention = new_agent_config.retention
if "protected" in agent_config:
new_agent_config = replace(
new_agent_config, protected=agent_config["protected"]
)
if "retention" in agent_config:
new_agent_config = replace(
new_agent_config, retention=new_agent_retention
)
self.data.agents[agent_id] = new_agent_config
if new_agent_retention != old_agent_retention:
# There's a single retention application method
# for both global and agent retention settings.
self.data.retention.apply(self._manager)
self.data.agents[agent_id] = replace(
self.data.agents[agent_id], **agent_config
)
if automatic_backups_configured is not UNDEFINED:
self.data.automatic_backups_configured = automatic_backups_configured
if create_backup is not UNDEFINED:
@@ -246,24 +207,11 @@ class AgentConfig:
"""Represent the config for an agent."""
protected: bool
"""Agent protected configuration.
If True, the agent backups are password protected.
"""
retention: AgentRetentionConfig | None = None
"""Agent retention configuration.
If None, the global retention configuration is used.
If not None, the global retention configuration is ignored for this agent.
If an agent retention configuration is set and both copies and days are None,
backups will be kept forever for that agent.
"""
def to_dict(self) -> StoredAgentConfig:
"""Convert agent config to a dict."""
return {
"protected": self.protected,
"retention": self.retention.to_dict() if self.retention else None,
}
@@ -271,46 +219,24 @@ class StoredAgentConfig(TypedDict):
"""Represent the stored config for an agent."""
protected: bool
retention: StoredRetentionConfig | None
class AgentParametersDict(TypedDict, total=False):
"""Represent the parameters for an agent."""
protected: bool
retention: RetentionParametersDict | None
@dataclass(kw_only=True)
class BaseRetentionConfig:
"""Represent the base backup retention configuration."""
class RetentionConfig:
"""Represent the backup retention configuration."""
copies: int | None = None
days: int | None = None
def to_dict(self) -> StoredRetentionConfig:
"""Convert backup retention configuration to a dict."""
return StoredRetentionConfig(
copies=self.copies,
days=self.days,
)
@dataclass(kw_only=True)
class RetentionConfig(BaseRetentionConfig):
"""Represent the backup retention configuration."""
def apply(self, manager: BackupManager) -> None:
"""Apply backup retention configuration."""
agents_retention = {
agent_id: agent_config.retention
for agent_id, agent_config in manager.config.data.agents.items()
}
if self.days is not None or any(
agent_retention and agent_retention.days is not None
for agent_retention in agents_retention.values()
):
if self.days is not None:
LOGGER.debug(
"Scheduling next automatic delete of backups older than %s in 1 day",
self.days,
@@ -320,6 +246,13 @@ class RetentionConfig(BaseRetentionConfig):
LOGGER.debug("Unscheduling next automatic delete")
self._unschedule_next(manager)
def to_dict(self) -> StoredRetentionConfig:
"""Convert backup retention configuration to a dict."""
return StoredRetentionConfig(
copies=self.copies,
days=self.days,
)
@callback
def _schedule_next(
self,
@@ -338,81 +271,16 @@ class RetentionConfig(BaseRetentionConfig):
"""Return backups older than days to delete."""
# we need to check here since we await before
# this filter is applied
agents_retention = {
agent_id: agent_config.retention
for agent_id, agent_config in manager.config.data.agents.items()
}
has_agents_retention = any(
agent_retention for agent_retention in agents_retention.values()
)
has_agents_retention_days = any(
agent_retention and agent_retention.days is not None
for agent_retention in agents_retention.values()
)
if (global_days := self.days) is None and not has_agents_retention_days:
# No global retention days and no agent retention days
if self.days is None:
return {}
now = dt_util.utcnow()
if global_days is not None and not has_agents_retention:
# Return early to avoid the longer filtering below.
return {
backup_id: backup
for backup_id, backup in backups.items()
if dt_util.parse_datetime(backup.date, raise_on_error=True)
+ timedelta(days=global_days)
< now
}
# If there are any agent retention settings, we need to check
# the retention settings, for every backup and agent combination.
backups_to_delete = {}
for backup_id, backup in backups.items():
backup_date = dt_util.parse_datetime(
backup.date, raise_on_error=True
)
delete_from_agents = set(backup.agents)
for agent_id in backup.agents:
agent_retention = agents_retention.get(agent_id)
if agent_retention is None:
# This agent does not have a retention setting,
# so the global retention setting should be used.
if global_days is None:
# This agent does not have a retention setting
# and the global retention days setting is None,
# so this backup should not be deleted.
delete_from_agents.discard(agent_id)
continue
days = global_days
elif (agent_days := agent_retention.days) is None:
# This agent has a retention setting
# where days is set to None,
# so the backup should not be deleted.
delete_from_agents.discard(agent_id)
continue
else:
# This agent has a retention setting
# where days is set to a number,
# so that setting should be used.
days = agent_days
if backup_date + timedelta(days=days) >= now:
# This backup is not older than the retention days,
# so this agent should not be deleted.
delete_from_agents.discard(agent_id)
filtered_backup = replace(
backup,
agents={
agent_id: agent_backup_status
for agent_id, agent_backup_status in backup.agents.items()
if agent_id in delete_from_agents
},
)
backups_to_delete[backup_id] = filtered_backup
return backups_to_delete
return {
backup_id: backup
for backup_id, backup in backups.items()
if dt_util.parse_datetime(backup.date, raise_on_error=True)
+ timedelta(days=self.days)
< now
}
await manager.async_delete_filtered_backups(
include_filter=_automatic_backups_filter, delete_filter=_delete_filter
@@ -444,10 +312,6 @@ class RetentionParametersDict(TypedDict, total=False):
days: int | None
class AgentRetentionConfig(BaseRetentionConfig):
"""Represent an agent retention configuration."""
class StoredBackupSchedule(TypedDict):
"""Represent the stored backup schedule configuration."""
@@ -690,87 +554,16 @@ async def delete_backups_exceeding_configured_count(manager: BackupManager) -> N
backups: dict[str, ManagerBackup],
) -> dict[str, ManagerBackup]:
"""Return oldest backups more numerous than copies to delete."""
agents_retention = {
agent_id: agent_config.retention
for agent_id, agent_config in manager.config.data.agents.items()
}
has_agents_retention = any(
agent_retention for agent_retention in agents_retention.values()
)
has_agents_retention_copies = any(
agent_retention and agent_retention.copies is not None
for agent_retention in agents_retention.values()
)
# we need to check here since we await before
# this filter is applied
if (
global_copies := manager.config.data.retention.copies
) is None and not has_agents_retention_copies:
# No global retention copies and no agent retention copies
if manager.config.data.retention.copies is None:
return {}
if global_copies is not None and not has_agents_retention:
# Return early to avoid the longer filtering below.
return dict(
sorted(
backups.items(),
key=lambda backup_item: backup_item[1].date,
)[: max(len(backups) - global_copies, 0)]
)
backups_by_agent: dict[str, dict[str, ManagerBackup]] = defaultdict(dict)
for backup_id, backup in backups.items():
for agent_id in backup.agents:
backups_by_agent[agent_id][backup_id] = backup
backups_to_delete_by_agent: dict[str, dict[str, ManagerBackup]] = defaultdict(
dict
return dict(
sorted(
backups.items(),
key=lambda backup_item: backup_item[1].date,
)[: max(len(backups) - manager.config.data.retention.copies, 0)]
)
for agent_id, agent_backups in backups_by_agent.items():
agent_retention = agents_retention.get(agent_id)
if agent_retention is None:
# This agent does not have a retention setting,
# so the global retention setting should be used.
if global_copies is None:
# This agent does not have a retention setting
# and the global retention copies setting is None,
# so backups should not be deleted.
continue
# The global retention setting will be used.
copies = global_copies
elif (agent_copies := agent_retention.copies) is None:
# This agent has a retention setting
# where copies is set to None,
# so backups should not be deleted.
continue
else:
# This agent retention setting will be used.
copies = agent_copies
backups_to_delete_by_agent[agent_id] = dict(
sorted(
agent_backups.items(),
key=lambda backup_item: backup_item[1].date,
)[: max(len(agent_backups) - copies, 0)]
)
backup_ids_to_delete: dict[str, set[str]] = defaultdict(set)
for agent_id, to_delete in backups_to_delete_by_agent.items():
for backup_id in to_delete:
backup_ids_to_delete[backup_id].add(agent_id)
backups_to_delete: dict[str, ManagerBackup] = {}
for backup_id, agent_ids in backup_ids_to_delete.items():
backup = backups[backup_id]
# filter the backup to only include the agents that should be deleted
filtered_backup = replace(
backup,
agents={
agent_id: agent_backup_status
for agent_id, agent_backup_status in backup.agents.items()
if agent_id in agent_ids
},
)
backups_to_delete[backup_id] = filtered_backup
return backups_to_delete
await manager.async_delete_filtered_backups(
include_filter=_automatic_backups_filter, delete_filter=_delete_filter

View File

@@ -8,10 +8,6 @@ from datetime import datetime
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.backup import (
async_subscribe_events,
async_subscribe_platform_events,
)
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from .const import DOMAIN, LOGGER
@@ -30,10 +26,8 @@ class BackupCoordinatorData:
"""Class to hold backup data."""
backup_manager_state: BackupManagerState
last_attempted_automatic_backup: datetime | None
last_successful_automatic_backup: datetime | None
next_scheduled_automatic_backup: datetime | None
last_event: ManagerStateEvent | BackupPlatformEvent | None
class BackupDataUpdateCoordinator(DataUpdateCoordinator[BackupCoordinatorData]):
@@ -56,28 +50,24 @@ class BackupDataUpdateCoordinator(DataUpdateCoordinator[BackupCoordinatorData]):
update_interval=None,
)
self.unsubscribe: list[Callable[[], None]] = [
async_subscribe_events(hass, self._on_event),
async_subscribe_platform_events(hass, self._on_event),
backup_manager.async_subscribe_events(self._on_event),
backup_manager.async_subscribe_platform_events(self._on_event),
]
self.backup_manager = backup_manager
self._last_event: ManagerStateEvent | BackupPlatformEvent | None = None
@callback
def _on_event(self, event: ManagerStateEvent | BackupPlatformEvent) -> None:
"""Handle new event."""
LOGGER.debug("Received backup event: %s", event)
self._last_event = event
self.config_entry.async_create_task(self.hass, self.async_refresh())
async def _async_update_data(self) -> BackupCoordinatorData:
"""Update backup manager data."""
return BackupCoordinatorData(
self.backup_manager.state,
self.backup_manager.config.data.last_attempted_automatic_backup,
self.backup_manager.config.data.last_completed_automatic_backup,
self.backup_manager.config.data.schedule.next_automatic_backup,
self._last_event,
)
@callback

View File

@@ -11,7 +11,7 @@ from .const import DOMAIN
from .coordinator import BackupDataUpdateCoordinator
class BackupManagerBaseEntity(CoordinatorEntity[BackupDataUpdateCoordinator]):
class BackupManagerEntity(CoordinatorEntity[BackupDataUpdateCoordinator]):
"""Base entity for backup manager."""
_attr_has_entity_name = True
@@ -19,9 +19,12 @@ class BackupManagerBaseEntity(CoordinatorEntity[BackupDataUpdateCoordinator]):
def __init__(
self,
coordinator: BackupDataUpdateCoordinator,
entity_description: EntityDescription,
) -> None:
"""Initialize base entity."""
super().__init__(coordinator)
self.entity_description = entity_description
self._attr_unique_id = entity_description.key
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, "backup_manager")},
manufacturer="Home Assistant",
@@ -31,17 +34,3 @@ class BackupManagerBaseEntity(CoordinatorEntity[BackupDataUpdateCoordinator]):
entry_type=DeviceEntryType.SERVICE,
configuration_url="homeassistant://config/backup",
)
class BackupManagerEntity(BackupManagerBaseEntity):
"""Entity for backup manager."""
def __init__(
self,
coordinator: BackupDataUpdateCoordinator,
entity_description: EntityDescription,
) -> None:
"""Initialize entity."""
super().__init__(coordinator)
self.entity_description = entity_description
self._attr_unique_id = entity_description.key

View File

@@ -1,59 +0,0 @@
"""Event platform for Home Assistant Backup integration."""
from __future__ import annotations
from typing import Final
from homeassistant.components.event import EventEntity
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import BackupConfigEntry, BackupDataUpdateCoordinator
from .entity import BackupManagerBaseEntity
from .manager import CreateBackupEvent, CreateBackupState
ATTR_BACKUP_STAGE: Final[str] = "backup_stage"
ATTR_FAILED_REASON: Final[str] = "failed_reason"
async def async_setup_entry(
hass: HomeAssistant,
config_entry: BackupConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Event set up for backup config entry."""
coordinator = config_entry.runtime_data
async_add_entities([AutomaticBackupEvent(coordinator)])
class AutomaticBackupEvent(BackupManagerBaseEntity, EventEntity):
"""Representation of an automatic backup event."""
_attr_event_types = [s.value for s in CreateBackupState]
_unrecorded_attributes = frozenset({ATTR_FAILED_REASON, ATTR_BACKUP_STAGE})
coordinator: BackupDataUpdateCoordinator
def __init__(self, coordinator: BackupDataUpdateCoordinator) -> None:
"""Initialize the automatic backup event."""
super().__init__(coordinator)
self._attr_unique_id = "automatic_backup_event"
self._attr_translation_key = "automatic_backup_event"
@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
if (
not (data := self.coordinator.data)
or (event := data.last_event) is None
or not isinstance(event, CreateBackupEvent)
):
return
self._trigger_event(
event.state,
{
ATTR_BACKUP_STAGE: event.stage,
ATTR_FAILED_REASON: event.reason,
},
)
self.async_write_ha_state()

View File

@@ -22,7 +22,7 @@ from . import util
from .agent import BackupAgent
from .const import DATA_MANAGER
from .manager import BackupManager
from .models import AgentBackup, BackupNotFound
from .models import BackupNotFound
@callback
@@ -85,15 +85,7 @@ class DownloadBackupView(HomeAssistantView):
request, headers, backup_id, agent_id, agent, manager
)
return await self._send_backup_with_password(
hass,
backup,
request,
headers,
backup_id,
agent_id,
password,
agent,
manager,
hass, request, headers, backup_id, agent_id, password, agent, manager
)
except BackupNotFound:
return Response(status=HTTPStatus.NOT_FOUND)
@@ -124,7 +116,6 @@ class DownloadBackupView(HomeAssistantView):
async def _send_backup_with_password(
self,
hass: HomeAssistant,
backup: AgentBackup,
request: Request,
headers: dict[istr, str],
backup_id: str,
@@ -153,8 +144,7 @@ class DownloadBackupView(HomeAssistantView):
stream = util.AsyncIteratorWriter(hass)
worker = threading.Thread(
target=util.decrypt_backup,
args=[backup, reader, stream, password, on_done, 0, []],
target=util.decrypt_backup, args=[reader, stream, password, on_done, 0, []]
)
try:
worker.start()

View File

@@ -1,11 +1,4 @@
{
"entity": {
"event": {
"automatic_backup_event": {
"default": "mdi:database"
}
}
},
"services": {
"create": {
"service": "mdi:cloud-upload"

View File

@@ -36,7 +36,6 @@ from homeassistant.helpers import (
issue_registry as ir,
start,
)
from homeassistant.helpers.backup import DATA_BACKUP
from homeassistant.helpers.json import json_bytes
from homeassistant.util import dt as dt_util, json as json_util
@@ -62,7 +61,6 @@ from .const import (
LOGGER,
)
from .models import (
AddonInfo,
AgentBackup,
BackupError,
BackupManagerError,
@@ -103,27 +101,15 @@ class ManagerBackup(BaseBackup):
"""Backup class."""
agents: dict[str, AgentBackupStatus]
failed_addons: list[AddonInfo]
failed_agent_ids: list[str]
failed_folders: list[Folder]
with_automatic_settings: bool | None
@dataclass(frozen=True, kw_only=True, slots=True)
class AddonErrorData:
"""Addon error class."""
addon: AddonInfo
errors: list[tuple[str, str]]
@dataclass(frozen=True, kw_only=True, slots=True)
class WrittenBackup:
"""Written backup class."""
addon_errors: dict[str, AddonErrorData]
backup: AgentBackup
folder_errors: dict[Folder, list[tuple[str, str]]]
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]]
release_stream: Callable[[], Coroutine[Any, Any, None]]
@@ -372,12 +358,10 @@ class BackupManager:
# Latest backup event and backup event subscribers
self.last_event: ManagerStateEvent = BlockedEvent()
self.last_action_event: ManagerStateEvent | None = None
self._backup_event_subscriptions = hass.data[
DATA_BACKUP
].backup_event_subscriptions
self._backup_platform_event_subscriptions = hass.data[
DATA_BACKUP
].backup_platform_event_subscriptions
self._backup_event_subscriptions: list[Callable[[ManagerStateEvent], None]] = []
self._backup_platform_event_subscriptions: list[
Callable[[BackupPlatformEvent], None]
] = []
async def async_setup(self) -> None:
"""Set up the backup manager."""
@@ -649,13 +633,9 @@ class BackupManager:
for agent_backup in result:
if (backup_id := agent_backup.backup_id) not in backups:
if known_backup := self.known_backups.get(backup_id):
failed_addons = known_backup.failed_addons
failed_agent_ids = known_backup.failed_agent_ids
failed_folders = known_backup.failed_folders
else:
failed_addons = []
failed_agent_ids = []
failed_folders = []
with_automatic_settings = self.is_our_automatic_backup(
agent_backup, await instance_id.async_get(self.hass)
)
@@ -666,9 +646,7 @@ class BackupManager:
date=agent_backup.date,
database_included=agent_backup.database_included,
extra_metadata=agent_backup.extra_metadata,
failed_addons=failed_addons,
failed_agent_ids=failed_agent_ids,
failed_folders=failed_folders,
folders=agent_backup.folders,
homeassistant_included=agent_backup.homeassistant_included,
homeassistant_version=agent_backup.homeassistant_version,
@@ -723,13 +701,9 @@ class BackupManager:
continue
if backup is None:
if known_backup := self.known_backups.get(backup_id):
failed_addons = known_backup.failed_addons
failed_agent_ids = known_backup.failed_agent_ids
failed_folders = known_backup.failed_folders
else:
failed_addons = []
failed_agent_ids = []
failed_folders = []
with_automatic_settings = self.is_our_automatic_backup(
result, await instance_id.async_get(self.hass)
)
@@ -740,9 +714,7 @@ class BackupManager:
date=result.date,
database_included=result.database_included,
extra_metadata=result.extra_metadata,
failed_addons=failed_addons,
failed_agent_ids=failed_agent_ids,
failed_folders=failed_folders,
folders=result.folders,
homeassistant_included=result.homeassistant_included,
homeassistant_version=result.homeassistant_version,
@@ -985,7 +957,7 @@ class BackupManager:
password=None,
)
await written_backup.release_stream()
self.known_backups.add(written_backup.backup, agent_errors, {}, {}, [])
self.known_backups.add(written_backup.backup, agent_errors, [])
return written_backup.backup.backup_id
async def async_create_backup(
@@ -1223,11 +1195,7 @@ class BackupManager:
finally:
await written_backup.release_stream()
self.known_backups.add(
written_backup.backup,
agent_errors,
written_backup.addon_errors,
written_backup.folder_errors,
unavailable_agents,
written_backup.backup, agent_errors, unavailable_agents
)
if not agent_errors:
if with_automatic_settings:
@@ -1237,9 +1205,7 @@ class BackupManager:
backup_success = True
if with_automatic_settings:
self._update_issue_after_agent_upload(
written_backup, agent_errors, unavailable_agents
)
self._update_issue_after_agent_upload(agent_errors, unavailable_agents)
# delete old backups more numerous than copies
# try this regardless of agent errors above
await delete_backups_exceeding_configured_count(self)
@@ -1385,10 +1351,34 @@ class BackupManager:
for subscription in self._backup_event_subscriptions:
subscription(event)
def _create_automatic_backup_failed_issue(
self, translation_key: str, translation_placeholders: dict[str, str] | None
) -> None:
"""Create an issue in the issue registry for automatic backup failures."""
@callback
def async_subscribe_events(
self,
on_event: Callable[[ManagerStateEvent], None],
) -> Callable[[], None]:
"""Subscribe events."""
def remove_subscription() -> None:
self._backup_event_subscriptions.remove(on_event)
self._backup_event_subscriptions.append(on_event)
return remove_subscription
@callback
def async_subscribe_platform_events(
self,
on_event: Callable[[BackupPlatformEvent], None],
) -> Callable[[], None]:
"""Subscribe to backup platform events."""
def remove_subscription() -> None:
self._backup_platform_event_subscriptions.remove(on_event)
self._backup_platform_event_subscriptions.append(on_event)
return remove_subscription
def _update_issue_backup_failed(self) -> None:
"""Update issue registry when a backup fails."""
ir.async_create_issue(
self.hass,
DOMAIN,
@@ -1397,73 +1387,37 @@ class BackupManager:
is_persistent=True,
learn_more_url="homeassistant://config/backup",
severity=ir.IssueSeverity.WARNING,
translation_key=translation_key,
translation_placeholders=translation_placeholders,
)
def _update_issue_backup_failed(self) -> None:
"""Update issue registry when a backup fails."""
self._create_automatic_backup_failed_issue(
"automatic_backup_failed_create", None
translation_key="automatic_backup_failed_create",
)
def _update_issue_after_agent_upload(
self,
written_backup: WrittenBackup,
agent_errors: dict[str, Exception],
unavailable_agents: list[str],
self, agent_errors: dict[str, Exception], unavailable_agents: list[str]
) -> None:
"""Update issue registry after a backup is uploaded to agents."""
addon_errors = written_backup.addon_errors
failed_agents = unavailable_agents + [
self.backup_agents[agent_id].name for agent_id in agent_errors
]
folder_errors = written_backup.folder_errors
if not failed_agents and not addon_errors and not folder_errors:
# No issues to report, clear previous error
if not agent_errors and not unavailable_agents:
ir.async_delete_issue(self.hass, DOMAIN, "automatic_backup_failed")
return
if failed_agents and not (addon_errors or folder_errors):
# No issues with add-ons or folders, but issues with agents
self._create_automatic_backup_failed_issue(
"automatic_backup_failed_upload_agents",
{"failed_agents": ", ".join(failed_agents)},
)
elif addon_errors and not (failed_agents or folder_errors):
# No issues with agents or folders, but issues with add-ons
self._create_automatic_backup_failed_issue(
"automatic_backup_failed_addons",
{
"failed_addons": ", ".join(
val.addon.name or val.addon.slug
for val in addon_errors.values()
ir.async_create_issue(
self.hass,
DOMAIN,
"automatic_backup_failed",
is_fixable=False,
is_persistent=True,
learn_more_url="homeassistant://config/backup",
severity=ir.IssueSeverity.WARNING,
translation_key="automatic_backup_failed_upload_agents",
translation_placeholders={
"failed_agents": ", ".join(
chain(
(
self.backup_agents[agent_id].name
for agent_id in agent_errors
),
unavailable_agents,
)
},
)
elif folder_errors and not (failed_agents or addon_errors):
# No issues with agents or add-ons, but issues with folders
self._create_automatic_backup_failed_issue(
"automatic_backup_failed_folders",
{"failed_folders": ", ".join(folder for folder in folder_errors)},
)
else:
# Issues with agents, add-ons, and/or folders
self._create_automatic_backup_failed_issue(
"automatic_backup_failed_agents_addons_folders",
{
"failed_agents": ", ".join(failed_agents) or "-",
"failed_addons": (
", ".join(
val.addon.name or val.addon.slug
for val in addon_errors.values()
)
or "-"
),
"failed_folders": ", ".join(f for f in folder_errors) or "-",
},
)
)
},
)
async def async_can_decrypt_on_download(
self,
@@ -1529,12 +1483,7 @@ class KnownBackups:
self._backups = {
backup["backup_id"]: KnownBackup(
backup_id=backup["backup_id"],
failed_addons=[
AddonInfo(name=a["name"], slug=a["slug"], version=a["version"])
for a in backup["failed_addons"]
],
failed_agent_ids=backup["failed_agent_ids"],
failed_folders=[Folder(f) for f in backup["failed_folders"]],
)
for backup in stored_backups
}
@@ -1547,16 +1496,12 @@ class KnownBackups:
self,
backup: AgentBackup,
agent_errors: dict[str, Exception],
failed_addons: dict[str, AddonErrorData],
failed_folders: dict[Folder, list[tuple[str, str]]],
unavailable_agents: list[str],
) -> None:
"""Add a backup."""
self._backups[backup.backup_id] = KnownBackup(
backup_id=backup.backup_id,
failed_addons=[val.addon for val in failed_addons.values()],
failed_agent_ids=list(chain(agent_errors, unavailable_agents)),
failed_folders=list(failed_folders),
)
self._manager.store.save()
@@ -1577,38 +1522,21 @@ class KnownBackup:
"""Persistent backup data."""
backup_id: str
failed_addons: list[AddonInfo]
failed_agent_ids: list[str]
failed_folders: list[Folder]
def to_dict(self) -> StoredKnownBackup:
"""Convert known backup to a dict."""
return {
"backup_id": self.backup_id,
"failed_addons": [
{"name": a.name, "slug": a.slug, "version": a.version}
for a in self.failed_addons
],
"failed_agent_ids": self.failed_agent_ids,
"failed_folders": [f.value for f in self.failed_folders],
}
class StoredAddonInfo(TypedDict):
"""Stored add-on info."""
name: str | None
slug: str
version: str | None
class StoredKnownBackup(TypedDict):
"""Stored persistent backup data."""
backup_id: str
failed_addons: list[StoredAddonInfo]
failed_agent_ids: list[str]
failed_folders: list[str]
class CoreBackupReaderWriter(BackupReaderWriter):
@@ -1772,11 +1700,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
raise BackupReaderWriterError(str(err)) from err
return WrittenBackup(
addon_errors={},
backup=backup,
folder_errors={},
open_stream=open_backup,
release_stream=remove_backup,
backup=backup, open_stream=open_backup, release_stream=remove_backup
)
finally:
# Inform integrations the backup is done
@@ -1915,11 +1839,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
await async_add_executor_job(temp_file.unlink, True)
return WrittenBackup(
addon_errors={},
backup=backup,
folder_errors={},
open_stream=open_backup,
release_stream=remove_backup,
backup=backup, open_stream=open_backup, release_stream=remove_backup
)
async def async_restore_backup(

View File

@@ -13,9 +13,9 @@ from homeassistant.exceptions import HomeAssistantError
class AddonInfo:
"""Addon information."""
name: str | None
name: str
slug: str
version: str | None
version: str
class Folder(StrEnum):

View File

@@ -19,9 +19,14 @@ from homeassistant.components.onboarding import (
)
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.backup import async_get_manager as async_get_backup_manager
from . import BackupManager, Folder, IncorrectPasswordError, http as backup_http
from . import (
BackupManager,
Folder,
IncorrectPasswordError,
async_get_manager,
http as backup_http,
)
if TYPE_CHECKING:
from homeassistant.components.onboarding import OnboardingStoreData
@@ -54,7 +59,7 @@ def with_backup_manager[_ViewT: BaseOnboardingView, **_P](
if self._data["done"]:
raise HTTPUnauthorized
manager = await async_get_backup_manager(request.app[KEY_HASS])
manager = async_get_manager(request.app[KEY_HASS])
return await func(self, manager, request, *args, **kwargs)
return with_backup

View File

@@ -46,12 +46,6 @@ BACKUP_MANAGER_DESCRIPTIONS = (
device_class=SensorDeviceClass.TIMESTAMP,
value_fn=lambda data: data.last_successful_automatic_backup,
),
BackupSensorEntityDescription(
key="last_attempted_automatic_backup",
translation_key="last_attempted_automatic_backup",
device_class=SensorDeviceClass.TIMESTAMP,
value_fn=lambda data: data.last_attempted_automatic_backup,
),
)

View File

@@ -16,7 +16,7 @@ if TYPE_CHECKING:
STORE_DELAY_SAVE = 30
STORAGE_KEY = DOMAIN
STORAGE_VERSION = 1
STORAGE_VERSION_MINOR = 7
STORAGE_VERSION_MINOR = 5
class StoredBackupData(TypedDict):
@@ -72,20 +72,8 @@ class _BackupStore(Store[StoredBackupData]):
data["config"]["automatic_backups_configured"] = (
data["config"]["create_backup"]["password"] is not None
)
if old_minor_version < 6:
# Version 1.6 adds agent retention settings
for agent in data["config"]["agents"]:
data["config"]["agents"][agent]["retention"] = None
if old_minor_version < 7:
# Version 1.7 adds failing addons and folders
for backup in data["backups"]:
backup["failed_addons"] = []
backup["failed_folders"] = []
# Note: We allow reading data with major version 2 in which the unused key
# data["config"]["schedule"]["state"] will be removed. The bump to 2 is
# planned to happen after a 6 month quiet period with no minor version
# changes.
# Note: We allow reading data with major version 2.
# Reject if major version is higher than 2.
if old_major_version > 2:
raise NotImplementedError

View File

@@ -11,18 +11,6 @@
"automatic_backup_failed_upload_agents": {
"title": "Automatic backup could not be uploaded to the configured locations",
"description": "The automatic backup could not be uploaded to the configured locations {failed_agents}. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
},
"automatic_backup_failed_addons": {
"title": "Not all add-ons could be included in automatic backup",
"description": "Add-ons {failed_addons} could not be included in automatic backup. Please check the supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
},
"automatic_backup_failed_agents_addons_folders": {
"title": "Automatic backup was created with errors",
"description": "The automatic backup was created with errors:\n* Locations which the backup could not be uploaded to: {failed_agents}\n* Add-ons which could not be backed up: {failed_addons}\n* Folders which could not be backed up: {failed_folders}\n\nPlease check the core and supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
},
"automatic_backup_failed_folders": {
"title": "Not all folders could be included in automatic backup",
"description": "Folders {failed_folders} could not be included in automatic backup. Please check the supervisor logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
}
},
"services": {
@@ -36,22 +24,6 @@
}
},
"entity": {
"event": {
"automatic_backup_event": {
"name": "Automatic backup",
"state_attributes": {
"event_type": {
"state": {
"completed": "Completed successfully",
"failed": "Failed",
"in_progress": "In progress"
}
},
"backup_stage": { "name": "Backup stage" },
"failed_reason": { "name": "Failure reason" }
}
}
},
"sensor": {
"backup_manager_state": {
"name": "Backup Manager state",
@@ -65,9 +37,6 @@
"next_scheduled_automatic_backup": {
"name": "Next scheduled automatic backup"
},
"last_attempted_automatic_backup": {
"name": "Last attempted automatic backup"
},
"last_successful_automatic_backup": {
"name": "Last successful automatic backup"
}

View File

@@ -295,26 +295,13 @@ def validate_password_stream(
raise BackupEmpty
def _get_expected_archives(backup: AgentBackup) -> set[str]:
"""Get the expected archives in the backup."""
expected_archives = set()
if backup.homeassistant_included:
expected_archives.add("homeassistant")
for addon in backup.addons:
expected_archives.add(addon.slug)
for folder in backup.folders:
expected_archives.add(folder.value)
return expected_archives
def decrypt_backup(
backup: AgentBackup,
input_stream: IO[bytes],
output_stream: IO[bytes],
password: str | None,
on_done: Callable[[Exception | None], None],
minimum_size: int,
nonces: NonceGenerator,
nonces: list[bytes],
) -> None:
"""Decrypt a backup."""
error: Exception | None = None
@@ -328,13 +315,10 @@ def decrypt_backup(
fileobj=output_stream, mode="w|", bufsize=BUF_SIZE
) as output_tar,
):
_decrypt_backup(backup, input_tar, output_tar, password)
_decrypt_backup(input_tar, output_tar, password)
except (DecryptError, SecureTarError, tarfile.TarError) as err:
LOGGER.warning("Error decrypting backup: %s", err)
error = err
except Exception as err: # noqa: BLE001
LOGGER.exception("Unexpected error when decrypting backup: %s", err)
error = err
else:
# Pad the output stream to the requested minimum size
padding = max(minimum_size - output_stream.tell(), 0)
@@ -349,18 +333,15 @@ def decrypt_backup(
def _decrypt_backup(
backup: AgentBackup,
input_tar: tarfile.TarFile,
output_tar: tarfile.TarFile,
password: str | None,
) -> None:
"""Decrypt a backup."""
expected_archives = _get_expected_archives(backup)
for obj in input_tar:
# We compare with PurePath to avoid issues with different path separators,
# for example when backup.json is added as "./backup.json"
object_path = PurePath(obj.name)
if object_path == PurePath("backup.json"):
if PurePath(obj.name) == PurePath("backup.json"):
# Rewrite the backup.json file to indicate that the backup is decrypted
if not (reader := input_tar.extractfile(obj)):
raise DecryptError
@@ -371,13 +352,7 @@ def _decrypt_backup(
metadata_obj.size = len(updated_metadata_b)
output_tar.addfile(metadata_obj, BytesIO(updated_metadata_b))
continue
prefix, _, suffix = object_path.name.partition(".")
if suffix not in ("tar", "tgz", "tar.gz"):
LOGGER.debug("Unknown file %s will not be decrypted", obj.name)
output_tar.addfile(obj, input_tar.extractfile(obj))
continue
if prefix not in expected_archives:
LOGGER.debug("Unknown inner tar file %s will not be decrypted", obj.name)
if not obj.name.endswith((".tar", ".tgz", ".tar.gz")):
output_tar.addfile(obj, input_tar.extractfile(obj))
continue
istf = SecureTarFile(
@@ -396,13 +371,12 @@ def _decrypt_backup(
def encrypt_backup(
backup: AgentBackup,
input_stream: IO[bytes],
output_stream: IO[bytes],
password: str | None,
on_done: Callable[[Exception | None], None],
minimum_size: int,
nonces: NonceGenerator,
nonces: list[bytes],
) -> None:
"""Encrypt a backup."""
error: Exception | None = None
@@ -416,13 +390,10 @@ def encrypt_backup(
fileobj=output_stream, mode="w|", bufsize=BUF_SIZE
) as output_tar,
):
_encrypt_backup(backup, input_tar, output_tar, password, nonces)
_encrypt_backup(input_tar, output_tar, password, nonces)
except (EncryptError, SecureTarError, tarfile.TarError) as err:
LOGGER.warning("Error encrypting backup: %s", err)
error = err
except Exception as err: # noqa: BLE001
LOGGER.exception("Unexpected error when decrypting backup: %s", err)
error = err
else:
# Pad the output stream to the requested minimum size
padding = max(minimum_size - output_stream.tell(), 0)
@@ -437,20 +408,17 @@ def encrypt_backup(
def _encrypt_backup(
backup: AgentBackup,
input_tar: tarfile.TarFile,
output_tar: tarfile.TarFile,
password: str | None,
nonces: NonceGenerator,
nonces: list[bytes],
) -> None:
"""Encrypt a backup."""
inner_tar_idx = 0
expected_archives = _get_expected_archives(backup)
for obj in input_tar:
# We compare with PurePath to avoid issues with different path separators,
# for example when backup.json is added as "./backup.json"
object_path = PurePath(obj.name)
if object_path == PurePath("backup.json"):
if PurePath(obj.name) == PurePath("backup.json"):
# Rewrite the backup.json file to indicate that the backup is encrypted
if not (reader := input_tar.extractfile(obj)):
raise EncryptError
@@ -461,21 +429,16 @@ def _encrypt_backup(
metadata_obj.size = len(updated_metadata_b)
output_tar.addfile(metadata_obj, BytesIO(updated_metadata_b))
continue
prefix, _, suffix = object_path.name.partition(".")
if suffix not in ("tar", "tgz", "tar.gz"):
LOGGER.debug("Unknown file %s will not be encrypted", obj.name)
if not obj.name.endswith((".tar", ".tgz", ".tar.gz")):
output_tar.addfile(obj, input_tar.extractfile(obj))
continue
if prefix not in expected_archives:
LOGGER.debug("Unknown inner tar file %s will not be encrypted", obj.name)
continue
istf = SecureTarFile(
None, # Not used
gzip=False,
key=password_to_key(password) if password is not None else None,
mode="r",
fileobj=input_tar.extractfile(obj),
nonce=nonces.get(inner_tar_idx),
nonce=nonces[inner_tar_idx],
)
inner_tar_idx += 1
with istf.encrypt(obj) as encrypted:
@@ -493,33 +456,17 @@ class _CipherWorkerStatus:
writer: AsyncIteratorWriter
class NonceGenerator:
"""Generate nonces for encryption."""
def __init__(self) -> None:
"""Initialize the generator."""
self._nonces: dict[int, bytes] = {}
def get(self, index: int) -> bytes:
"""Get a nonce for the given index."""
if index not in self._nonces:
# Generate a new nonce for the given index
self._nonces[index] = os.urandom(16)
return self._nonces[index]
class _CipherBackupStreamer:
"""Encrypt or decrypt a backup."""
_cipher_func: Callable[
[
AgentBackup,
IO[bytes],
IO[bytes],
str | None,
Callable[[Exception | None], None],
int,
NonceGenerator,
list[bytes],
],
None,
]
@@ -537,7 +484,7 @@ class _CipherBackupStreamer:
self._hass = hass
self._open_stream = open_stream
self._password = password
self._nonces = NonceGenerator()
self._nonces: list[bytes] = []
def size(self) -> int:
"""Return the maximum size of the decrypted or encrypted backup."""
@@ -561,15 +508,7 @@ class _CipherBackupStreamer:
writer = AsyncIteratorWriter(self._hass)
worker = threading.Thread(
target=self._cipher_func,
args=[
self._backup,
reader,
writer,
self._password,
on_done,
self.size(),
self._nonces,
],
args=[reader, writer, self._password, on_done, self.size(), self._nonces],
)
worker_status = _CipherWorkerStatus(
done=asyncio.Event(), reader=reader, thread=worker, writer=writer
@@ -599,6 +538,17 @@ class DecryptedBackupStreamer(_CipherBackupStreamer):
class EncryptedBackupStreamer(_CipherBackupStreamer):
"""Encrypt a backup."""
def __init__(
self,
hass: HomeAssistant,
backup: AgentBackup,
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
password: str | None,
) -> None:
"""Initialize."""
super().__init__(hass, backup, open_stream, password)
self._nonces = [os.urandom(16) for _ in range(self._num_tar_files())]
_cipher_func = staticmethod(encrypt_backup)
def backup(self) -> AgentBackup:

View File

@@ -10,7 +10,11 @@ from homeassistant.helpers import config_validation as cv
from .config import Day, ScheduleRecurrence
from .const import DATA_MANAGER, LOGGER
from .manager import DecryptOnDowloadNotSupported, IncorrectPasswordError
from .manager import (
DecryptOnDowloadNotSupported,
IncorrectPasswordError,
ManagerStateEvent,
)
from .models import BackupNotFound, Folder
@@ -30,6 +34,7 @@ def async_register_websocket_handlers(hass: HomeAssistant, with_hassio: bool) ->
websocket_api.async_register_command(hass, handle_create_with_automatic_settings)
websocket_api.async_register_command(hass, handle_delete)
websocket_api.async_register_command(hass, handle_restore)
websocket_api.async_register_command(hass, handle_subscribe_events)
websocket_api.async_register_command(hass, handle_config_info)
websocket_api.async_register_command(hass, handle_config_update)
@@ -346,28 +351,7 @@ async def handle_config_info(
@websocket_api.websocket_command(
{
vol.Required("type"): "backup/config/update",
vol.Optional("agents"): vol.Schema(
{
str: {
vol.Optional("protected"): bool,
vol.Optional("retention"): vol.Any(
vol.Schema(
{
# Note: We can't use cv.positive_int because it allows 0 even
# though 0 is not positive.
vol.Optional("copies"): vol.Any(
vol.All(int, vol.Range(min=1)), None
),
vol.Optional("days"): vol.Any(
vol.All(int, vol.Range(min=1)), None
),
},
),
None,
),
}
}
),
vol.Optional("agents"): vol.Schema({str: {"protected": bool}}),
vol.Optional("automatic_backups_configured"): bool,
vol.Optional("create_backup"): vol.Schema(
{
@@ -417,3 +401,22 @@ def handle_config_update(
changes.pop("type")
manager.config.update(**changes)
connection.send_result(msg["id"])
@websocket_api.require_admin
@websocket_api.websocket_command({vol.Required("type"): "backup/subscribe_events"})
@websocket_api.async_response
async def handle_subscribe_events(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""Subscribe to backup events."""
def on_event(event: ManagerStateEvent) -> None:
connection.send_message(websocket_api.event_message(msg["id"], event))
manager = hass.data[DATA_MANAGER]
on_event(manager.last_event)
connection.subscriptions[msg["id"]] = manager.async_subscribe_events(on_event)
connection.send_result(msg["id"])

View File

@@ -21,6 +21,7 @@ from .entity import BleBoxEntity
SCAN_INTERVAL = timedelta(seconds=5)
BLEBOX_TO_HVACMODE = {
None: None,
0: HVACMode.OFF,
1: HVACMode.HEAT,
2: HVACMode.COOL,
@@ -58,14 +59,12 @@ class BleBoxClimateEntity(BleBoxEntity[blebox_uniapi.climate.Climate], ClimateEn
_attr_temperature_unit = UnitOfTemperature.CELSIUS
@property
def hvac_modes(self) -> list[HVACMode]:
def hvac_modes(self):
"""Return list of supported HVAC modes."""
if self._feature.mode is None:
return [HVACMode.OFF]
return [HVACMode.OFF, BLEBOX_TO_HVACMODE[self._feature.mode]]
@property
def hvac_mode(self) -> HVACMode | None:
def hvac_mode(self):
"""Return the desired HVAC mode."""
if self._feature.is_on is None:
return None
@@ -76,7 +75,7 @@ class BleBoxClimateEntity(BleBoxEntity[blebox_uniapi.climate.Climate], ClimateEn
return HVACMode.HEAT if self._feature.is_on else HVACMode.OFF
@property
def hvac_action(self) -> HVACAction | None:
def hvac_action(self):
"""Return the actual current HVAC action."""
if self._feature.hvac_action is not None:
if not self._feature.is_on:
@@ -89,22 +88,22 @@ class BleBoxClimateEntity(BleBoxEntity[blebox_uniapi.climate.Climate], ClimateEn
return HVACAction.HEATING if self._feature.is_heating else HVACAction.IDLE
@property
def max_temp(self) -> float:
def max_temp(self):
"""Return the maximum temperature supported."""
return self._feature.max_temp
@property
def min_temp(self) -> float:
def min_temp(self):
"""Return the maximum temperature supported."""
return self._feature.min_temp
@property
def current_temperature(self) -> float | None:
def current_temperature(self):
"""Return the current temperature."""
return self._feature.current
@property
def target_temperature(self) -> float | None:
def target_temperature(self):
"""Return the desired thermostat temperature."""
return self._feature.desired

View File

@@ -84,7 +84,7 @@ class BleBoxLightEntity(BleBoxEntity[blebox_uniapi.light.Light], LightEntity):
return color_util.color_temperature_mired_to_kelvin(self._feature.color_temp)
@property
def color_mode(self) -> ColorMode:
def color_mode(self):
"""Return the color mode.
Set values to _attr_ibutes if needed.
@@ -92,7 +92,7 @@ class BleBoxLightEntity(BleBoxEntity[blebox_uniapi.light.Light], LightEntity):
return COLOR_MODE_MAP.get(self._feature.color_mode, ColorMode.ONOFF)
@property
def supported_color_modes(self) -> set[ColorMode]:
def supported_color_modes(self):
"""Return supported color modes."""
return {self.color_mode}
@@ -107,7 +107,7 @@ class BleBoxLightEntity(BleBoxEntity[blebox_uniapi.light.Light], LightEntity):
return self._feature.effect
@property
def rgb_color(self) -> tuple[int, int, int] | None:
def rgb_color(self):
"""Return value for rgb."""
if (rgb_hex := self._feature.rgb_hex) is None:
return None
@@ -118,14 +118,14 @@ class BleBoxLightEntity(BleBoxEntity[blebox_uniapi.light.Light], LightEntity):
)
@property
def rgbw_color(self) -> tuple[int, int, int, int] | None:
def rgbw_color(self):
"""Return the hue and saturation."""
if (rgbw_hex := self._feature.rgbw_hex) is None:
return None
return tuple(blebox_uniapi.light.Light.rgb_hex_to_rgb_list(rgbw_hex)[0:4])
@property
def rgbww_color(self) -> tuple[int, int, int, int, int] | None:
def rgbww_color(self):
"""Return value for rgbww."""
if (rgbww_hex := self._feature.rgbww_hex) is None:
return None

View File

@@ -2,7 +2,7 @@
"config": {
"step": {
"user": {
"title": "Sign in with Blink account",
"title": "Sign-in with Blink account",
"data": {
"username": "[%key:common::config_flow::data::username%]",
"password": "[%key:common::config_flow::data::password%]"
@@ -30,7 +30,7 @@
"step": {
"simple_options": {
"data": {
"scan_interval": "Scan interval (seconds)"
"scan_interval": "Scan Interval (seconds)"
},
"title": "Blink options",
"description": "Configure Blink integration"
@@ -93,7 +93,7 @@
},
"config_entry_id": {
"name": "Integration ID",
"description": "The Blink integration ID."
"description": "The Blink Integration ID."
}
}
}

View File

@@ -24,7 +24,7 @@ from .const import DOMAIN, EVSE_ID, LOGGER, MODEL_TYPE
type BlueCurrentConfigEntry = ConfigEntry[Connector]
PLATFORMS = [Platform.BUTTON, Platform.SENSOR]
PLATFORMS = [Platform.SENSOR]
CHARGE_POINTS = "CHARGE_POINTS"
DATA = "data"
DELAY = 5

View File

@@ -1,89 +0,0 @@
"""Support for Blue Current buttons."""
from __future__ import annotations
from collections.abc import Callable, Coroutine
from dataclasses import dataclass
from typing import Any
from bluecurrent_api.client import Client
from homeassistant.components.button import (
ButtonDeviceClass,
ButtonEntity,
ButtonEntityDescription,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import BlueCurrentConfigEntry, Connector
from .entity import ChargepointEntity
@dataclass(kw_only=True, frozen=True)
class ChargePointButtonEntityDescription(ButtonEntityDescription):
"""Describes a Blue Current button entity."""
function: Callable[[Client, str], Coroutine[Any, Any, None]]
CHARGE_POINT_BUTTONS = (
ChargePointButtonEntityDescription(
key="reset",
translation_key="reset",
function=lambda client, evse_id: client.reset(evse_id),
device_class=ButtonDeviceClass.RESTART,
),
ChargePointButtonEntityDescription(
key="reboot",
translation_key="reboot",
function=lambda client, evse_id: client.reboot(evse_id),
device_class=ButtonDeviceClass.RESTART,
),
ChargePointButtonEntityDescription(
key="stop_charge_session",
translation_key="stop_charge_session",
function=lambda client, evse_id: client.stop_session(evse_id),
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: BlueCurrentConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Blue Current buttons."""
connector: Connector = entry.runtime_data
async_add_entities(
ChargePointButton(
connector,
button,
evse_id,
)
for evse_id in connector.charge_points
for button in CHARGE_POINT_BUTTONS
)
class ChargePointButton(ChargepointEntity, ButtonEntity):
"""Define a charge point button."""
has_value = True
entity_description: ChargePointButtonEntityDescription
def __init__(
self,
connector: Connector,
description: ChargePointButtonEntityDescription,
evse_id: str,
) -> None:
"""Initialize the button."""
super().__init__(connector, evse_id)
self.entity_description = description
self._attr_unique_id = f"{description.key}_{evse_id}"
async def async_press(self) -> None:
"""Handle the button press."""
await self.entity_description.function(self.connector.client, self.evse_id)

View File

@@ -1,5 +1,7 @@
"""Entity representing a Blue Current charge point."""
from abc import abstractmethod
from homeassistant.const import ATTR_NAME
from homeassistant.core import callback
from homeassistant.helpers.device_registry import DeviceInfo
@@ -15,12 +17,12 @@ class BlueCurrentEntity(Entity):
_attr_has_entity_name = True
_attr_should_poll = False
has_value = False
def __init__(self, connector: Connector, signal: str) -> None:
"""Initialize the entity."""
self.connector = connector
self.signal = signal
self.has_value = False
async def async_added_to_hass(self) -> None:
"""Register callbacks."""
@@ -41,6 +43,7 @@ class BlueCurrentEntity(Entity):
return self.connector.connected and self.has_value
@callback
@abstractmethod
def update_from_latest_data(self) -> None:
"""Update the entity from the latest data."""

View File

@@ -19,17 +19,6 @@
"current_left": {
"default": "mdi:gauge"
}
},
"button": {
"reset": {
"default": "mdi:restart"
},
"reboot": {
"default": "mdi:restart-alert"
},
"stop_charge_session": {
"default": "mdi:stop"
}
}
}
}

View File

@@ -1,7 +1,7 @@
{
"domain": "blue_current",
"name": "Blue Current",
"codeowners": ["@gleeuwen", "@NickKoepr", "@jtodorova23"],
"codeowners": ["@Floris272", "@gleeuwen"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/blue_current",
"iot_class": "cloud_push",

Some files were not shown because too many files have changed in this diff Show More