mirror of
https://github.com/home-assistant/core.git
synced 2025-08-10 16:15:08 +02:00
Merge branch 'dev' into max-recursion
This commit is contained in:
@@ -111,6 +111,7 @@ components: &components
|
||||
- homeassistant/components/tag/**
|
||||
- homeassistant/components/template/**
|
||||
- homeassistant/components/timer/**
|
||||
- homeassistant/components/trace/**
|
||||
- homeassistant/components/usb/**
|
||||
- homeassistant/components/webhook/**
|
||||
- homeassistant/components/websocket_api/**
|
||||
|
18
.github/workflows/builder.yml
vendored
18
.github/workflows/builder.yml
vendored
@@ -27,7 +27,7 @@ jobs:
|
||||
publish: ${{ steps.version.outputs.publish }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.7
|
||||
uses: actions/checkout@v4.2.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -90,7 +90,7 @@ jobs:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.7
|
||||
uses: actions/checkout@v4.2.0
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
@@ -242,7 +242,7 @@ jobs:
|
||||
- green
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.7
|
||||
uses: actions/checkout@v4.2.0
|
||||
|
||||
- name: Set build additional args
|
||||
run: |
|
||||
@@ -279,7 +279,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.7
|
||||
uses: actions/checkout@v4.2.0
|
||||
|
||||
- name: Initialize git
|
||||
uses: home-assistant/actions/helpers/git-init@master
|
||||
@@ -321,7 +321,7 @@ jobs:
|
||||
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.7
|
||||
uses: actions/checkout@v4.2.0
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@v3.6.0
|
||||
@@ -451,7 +451,7 @@ jobs:
|
||||
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.7
|
||||
uses: actions/checkout@v4.2.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.2.0
|
||||
@@ -499,7 +499,7 @@ jobs:
|
||||
HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
|
||||
uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
|
||||
@@ -509,7 +509,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build Docker image
|
||||
uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85 # v6.7.0
|
||||
uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75 # v6.9.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
@@ -522,7 +522,7 @@ jobs:
|
||||
- name: Push Docker image
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
id: push
|
||||
uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85 # v6.7.0
|
||||
uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75 # v6.9.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
|
60
.github/workflows/ci.yaml
vendored
60
.github/workflows/ci.yaml
vendored
@@ -39,8 +39,8 @@ on:
|
||||
env:
|
||||
CACHE_VERSION: 10
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 8
|
||||
HA_SHORT_VERSION: "2024.10"
|
||||
MYPY_CACHE_VERSION: 9
|
||||
HA_SHORT_VERSION: "2024.11"
|
||||
DEFAULT_PYTHON: "3.12"
|
||||
ALL_PYTHON_VERSIONS: "['3.12']"
|
||||
# 10.3 is the oldest supported version
|
||||
@@ -93,7 +93,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
uses: actions/checkout@v4.2.0
|
||||
- name: Generate partial Python venv restore key
|
||||
id: generate_python_cache_key
|
||||
run: |
|
||||
@@ -231,7 +231,7 @@ jobs:
|
||||
- info
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
uses: actions/checkout@v4.2.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.2.0
|
||||
@@ -277,7 +277,7 @@ jobs:
|
||||
- pre-commit
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
uses: actions/checkout@v4.2.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.2.0
|
||||
id: python
|
||||
@@ -317,7 +317,7 @@ jobs:
|
||||
- pre-commit
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
uses: actions/checkout@v4.2.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.2.0
|
||||
id: python
|
||||
@@ -357,7 +357,7 @@ jobs:
|
||||
- pre-commit
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
uses: actions/checkout@v4.2.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.2.0
|
||||
id: python
|
||||
@@ -447,7 +447,7 @@ jobs:
|
||||
- script/hassfest/docker/Dockerfile
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
uses: actions/checkout@v4.2.0
|
||||
- name: Register hadolint problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
||||
@@ -466,7 +466,7 @@ jobs:
|
||||
python-version: ${{ fromJSON(needs.info.outputs.python_versions) }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
uses: actions/checkout@v4.2.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.2.0
|
||||
@@ -550,7 +550,7 @@ jobs:
|
||||
sudo apt-get -y install \
|
||||
libturbojpeg
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
uses: actions/checkout@v4.2.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.2.0
|
||||
@@ -583,7 +583,7 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
uses: actions/checkout@v4.2.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.2.0
|
||||
@@ -617,7 +617,7 @@ jobs:
|
||||
&& needs.info.outputs.requirements == 'true'
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
uses: actions/checkout@v4.2.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.2.0
|
||||
@@ -645,7 +645,7 @@ jobs:
|
||||
- name: Process licenses
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python -m script.licenses
|
||||
python -m script.licenses licenses.json
|
||||
|
||||
pylint:
|
||||
name: Check pylint
|
||||
@@ -660,7 +660,7 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
uses: actions/checkout@v4.2.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.2.0
|
||||
@@ -707,7 +707,7 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
uses: actions/checkout@v4.2.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.2.0
|
||||
@@ -752,7 +752,7 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
uses: actions/checkout@v4.2.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.2.0
|
||||
@@ -815,7 +815,11 @@ jobs:
|
||||
needs:
|
||||
- info
|
||||
- base
|
||||
name: Split tests for full run
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ${{ fromJson(needs.info.outputs.python_versions) }}
|
||||
name: Split tests for full run Python ${{ matrix.python-version }}
|
||||
steps:
|
||||
- name: Install additional OS dependencies
|
||||
run: |
|
||||
@@ -827,12 +831,12 @@ jobs:
|
||||
libturbojpeg \
|
||||
libgammu-dev
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/checkout@v4.2.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.2.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
@@ -891,7 +895,7 @@ jobs:
|
||||
libturbojpeg \
|
||||
libgammu-dev
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
uses: actions/checkout@v4.2.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.2.0
|
||||
@@ -1011,7 +1015,7 @@ jobs:
|
||||
libturbojpeg \
|
||||
libmariadb-dev-compat
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
uses: actions/checkout@v4.2.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.2.0
|
||||
@@ -1137,7 +1141,7 @@ jobs:
|
||||
libturbojpeg \
|
||||
postgresql-server-dev-14
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
uses: actions/checkout@v4.2.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.2.0
|
||||
@@ -1232,14 +1236,14 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
uses: actions/checkout@v4.2.0
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
uses: codecov/codecov-action@v4.5.0
|
||||
uses: codecov/codecov-action@v4.6.0
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
@@ -1283,7 +1287,7 @@ jobs:
|
||||
libturbojpeg \
|
||||
libgammu-dev
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
uses: actions/checkout@v4.2.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.2.0
|
||||
@@ -1370,14 +1374,14 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
uses: actions/checkout@v4.2.0
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
uses: codecov/codecov-action@v4.5.0
|
||||
uses: codecov/codecov-action@v4.6.0
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
6
.github/workflows/codeql.yml
vendored
6
.github/workflows/codeql.yml
vendored
@@ -21,14 +21,14 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.7
|
||||
uses: actions/checkout@v4.2.0
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.26.7
|
||||
uses: github/codeql-action/init@v3.26.11
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.26.7
|
||||
uses: github/codeql-action/analyze@v3.26.11
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
2
.github/workflows/translations.yml
vendored
2
.github/workflows/translations.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.7
|
||||
uses: actions/checkout@v4.2.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.2.0
|
||||
|
16
.github/workflows/wheels.yml
vendored
16
.github/workflows/wheels.yml
vendored
@@ -32,7 +32,7 @@ jobs:
|
||||
architectures: ${{ steps.info.outputs.architectures }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.7
|
||||
uses: actions/checkout@v4.2.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
@@ -119,7 +119,7 @@ jobs:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.7
|
||||
uses: actions/checkout@v4.2.0
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
@@ -163,7 +163,7 @@ jobs:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.1.7
|
||||
uses: actions/checkout@v4.2.0
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
@@ -205,11 +205,9 @@ jobs:
|
||||
# Some dependencies still require 'cython<3'
|
||||
# and don't yet use isolated build environments.
|
||||
# Build these first.
|
||||
# grpcio: https://github.com/grpc/grpc/issues/33918
|
||||
# pydantic: https://github.com/pydantic/pydantic/issues/7689
|
||||
|
||||
touch requirements_old-cython.txt
|
||||
cat homeassistant/package_constraints.txt | grep 'grpcio==' >> requirements_old-cython.txt
|
||||
cat homeassistant/package_constraints.txt | grep 'pydantic==' >> requirements_old-cython.txt
|
||||
|
||||
- name: Build wheels (old cython)
|
||||
@@ -221,7 +219,7 @@ jobs:
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;protobuf;pydantic;pymicro-vad;yarl
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_old-cython.txt"
|
||||
@@ -236,7 +234,7 @@ jobs:
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;protobuf;pydantic;pymicro-vad;yarl
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtaa"
|
||||
@@ -250,7 +248,7 @@ jobs:
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;protobuf;pydantic;pymicro-vad;yarl
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtab"
|
||||
@@ -264,7 +262,7 @@ jobs:
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;protobuf;pydantic;pymicro-vad;yarl
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtac"
|
||||
|
@@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.6.5
|
||||
rev: v0.6.8
|
||||
hooks:
|
||||
- id: ruff
|
||||
args:
|
||||
@@ -83,10 +83,10 @@ repos:
|
||||
pass_filenames: false
|
||||
language: script
|
||||
types: [text]
|
||||
files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/brands/.*\.json|homeassistant/.+/services\.yaml|script/hassfest/(?!metadata|mypy_config).+\.py|requirements\.txt)$
|
||||
files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/brands/.*\.json|homeassistant/.+/services\.yaml|script/hassfest/(?!metadata|mypy_config).+\.py|requirements.+\.txt)$
|
||||
- id: hassfest-metadata
|
||||
name: hassfest-metadata
|
||||
entry: script/run-in-env.sh python3 -m script.hassfest -p metadata
|
||||
entry: script/run-in-env.sh python3 -m script.hassfest -p metadata,docker
|
||||
pass_filenames: false
|
||||
language: script
|
||||
types: [text]
|
||||
|
22
CODEOWNERS
22
CODEOWNERS
@@ -544,6 +544,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/github/ @timmo001 @ludeeus
|
||||
/homeassistant/components/glances/ @engrbm87
|
||||
/tests/components/glances/ @engrbm87
|
||||
/homeassistant/components/go2rtc/ @home-assistant/core
|
||||
/tests/components/go2rtc/ @home-assistant/core
|
||||
/homeassistant/components/goalzero/ @tkdrob
|
||||
/tests/components/goalzero/ @tkdrob
|
||||
/homeassistant/components/gogogate2/ @vangorra
|
||||
@@ -817,8 +819,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/lektrico/ @lektrico
|
||||
/homeassistant/components/lg_netcast/ @Drafteed @splinter98
|
||||
/tests/components/lg_netcast/ @Drafteed @splinter98
|
||||
/homeassistant/components/lg_thinq/ @LG-ThinQ-Integration
|
||||
/tests/components/lg_thinq/ @LG-ThinQ-Integration
|
||||
/homeassistant/components/lidarr/ @tkdrob
|
||||
/tests/components/lidarr/ @tkdrob
|
||||
/homeassistant/components/lifx/ @Djelibeybi
|
||||
@@ -1026,6 +1026,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/nut/ @bdraco @ollo69 @pestevez
|
||||
/homeassistant/components/nws/ @MatthewFlamm @kamiyo
|
||||
/tests/components/nws/ @MatthewFlamm @kamiyo
|
||||
/homeassistant/components/nyt_games/ @joostlek
|
||||
/tests/components/nyt_games/ @joostlek
|
||||
/homeassistant/components/nzbget/ @chriscla
|
||||
/tests/components/nzbget/ @chriscla
|
||||
/homeassistant/components/obihai/ @dshokouhi @ejpenney
|
||||
@@ -1104,8 +1106,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/pi_hole/ @shenxn
|
||||
/homeassistant/components/picnic/ @corneyl
|
||||
/tests/components/picnic/ @corneyl
|
||||
/homeassistant/components/pilight/ @trekky12
|
||||
/tests/components/pilight/ @trekky12
|
||||
/homeassistant/components/ping/ @jpbede
|
||||
/tests/components/ping/ @jpbede
|
||||
/homeassistant/components/plaato/ @JohNan
|
||||
@@ -1135,8 +1135,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/proximity/ @mib1185
|
||||
/tests/components/proximity/ @mib1185
|
||||
/homeassistant/components/proxmoxve/ @jhollowe @Corbeno
|
||||
/homeassistant/components/prusalink/ @balloob @Skaronator
|
||||
/tests/components/prusalink/ @balloob @Skaronator
|
||||
/homeassistant/components/prusalink/ @balloob
|
||||
/tests/components/prusalink/ @balloob
|
||||
/homeassistant/components/ps4/ @ktnrg45
|
||||
/tests/components/ps4/ @ktnrg45
|
||||
/homeassistant/components/pure_energie/ @klaasnicolaas
|
||||
@@ -1384,15 +1384,13 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/spaceapi/ @fabaff
|
||||
/homeassistant/components/speedtestdotnet/ @rohankapoorcom @engrbm87
|
||||
/tests/components/speedtestdotnet/ @rohankapoorcom @engrbm87
|
||||
/homeassistant/components/spider/ @peternijssen
|
||||
/tests/components/spider/ @peternijssen
|
||||
/homeassistant/components/splunk/ @Bre77
|
||||
/homeassistant/components/spotify/ @frenck @joostlek
|
||||
/tests/components/spotify/ @frenck @joostlek
|
||||
/homeassistant/components/sql/ @gjohansson-ST @dougiteixeira
|
||||
/tests/components/sql/ @gjohansson-ST @dougiteixeira
|
||||
/homeassistant/components/squeezebox/ @rajlaud
|
||||
/tests/components/squeezebox/ @rajlaud
|
||||
/homeassistant/components/squeezebox/ @rajlaud @pssc @peteS-UK
|
||||
/tests/components/squeezebox/ @rajlaud @pssc @peteS-UK
|
||||
/homeassistant/components/srp_energy/ @briglx
|
||||
/tests/components/srp_energy/ @briglx
|
||||
/homeassistant/components/starline/ @anonym-tsk
|
||||
@@ -1438,8 +1436,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/switchbot/ @danielhiversen @RenierM26 @murtas @Eloston @dsypniewski
|
||||
/homeassistant/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur
|
||||
/tests/components/switchbot_cloud/ @SeraphicRav @laurence-presland @Gigatrappeur
|
||||
/homeassistant/components/switcher_kis/ @thecode
|
||||
/tests/components/switcher_kis/ @thecode
|
||||
/homeassistant/components/switcher_kis/ @thecode @YogevBokobza
|
||||
/tests/components/switcher_kis/ @thecode @YogevBokobza
|
||||
/homeassistant/components/switchmate/ @danielhiversen @qiz-li
|
||||
/homeassistant/components/syncthing/ @zhulik
|
||||
/tests/components/syncthing/ @zhulik
|
||||
|
29
Dockerfile
29
Dockerfile
@@ -12,7 +12,7 @@ ENV \
|
||||
ARG QEMU_CPU
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv==0.4.9
|
||||
RUN pip3 install uv==0.4.17
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
@@ -29,15 +29,9 @@ RUN \
|
||||
if ls homeassistant/home_assistant_*.whl 1> /dev/null 2>&1; then \
|
||||
uv pip install homeassistant/home_assistant_*.whl; \
|
||||
fi \
|
||||
&& if [ "${BUILD_ARCH}" = "i386" ]; then \
|
||||
linux32 uv pip install \
|
||||
--no-build \
|
||||
-r homeassistant/requirements_all.txt; \
|
||||
else \
|
||||
uv pip install \
|
||||
--no-build \
|
||||
-r homeassistant/requirements_all.txt; \
|
||||
fi
|
||||
&& uv pip install \
|
||||
--no-build \
|
||||
-r homeassistant/requirements_all.txt
|
||||
|
||||
## Setup Home Assistant Core
|
||||
COPY . homeassistant/
|
||||
@@ -50,4 +44,19 @@ RUN \
|
||||
# Home Assistant S6-Overlay
|
||||
COPY rootfs /
|
||||
|
||||
# Needs to be redefined inside the FROM statement to be set for RUN commands
|
||||
ARG BUILD_ARCH
|
||||
# Get go2rtc binary
|
||||
RUN \
|
||||
case "${BUILD_ARCH}" in \
|
||||
"aarch64") go2rtc_suffix='arm64' ;; \
|
||||
"armhf") go2rtc_suffix='armv6' ;; \
|
||||
"armv7") go2rtc_suffix='arm' ;; \
|
||||
*) go2rtc_suffix=${BUILD_ARCH} ;; \
|
||||
esac \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.4/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& chmod +x /bin/go2rtc \
|
||||
# Verify go2rtc can be executed
|
||||
&& go2rtc --version
|
||||
|
||||
WORKDIR /config
|
||||
|
@@ -7,8 +7,6 @@ Check out `home-assistant.io <https://home-assistant.io>`__ for `a
|
||||
demo <https://demo.home-assistant.io>`__, `installation instructions <https://home-assistant.io/getting-started/>`__,
|
||||
`tutorials <https://home-assistant.io/getting-started/automation/>`__ and `documentation <https://home-assistant.io/docs/>`__.
|
||||
|
||||
This is a project of the `Open Home Foundation <https://www.openhomefoundation.org/>`__.
|
||||
|
||||
|screenshot-states|
|
||||
|
||||
Featured integrations
|
||||
@@ -22,9 +20,14 @@ components <https://developers.home-assistant.io/docs/creating_component_index/>
|
||||
If you run into issues while using Home Assistant or during development
|
||||
of a component, check the `Home Assistant help section <https://home-assistant.io/help/>`__ of our website for further help and information.
|
||||
|
||||
|ohf-logo|
|
||||
|
||||
.. |Chat Status| image:: https://img.shields.io/discord/330944238910963714.svg
|
||||
:target: https://www.home-assistant.io/join-chat/
|
||||
.. |screenshot-states| image:: https://raw.githubusercontent.com/home-assistant/core/dev/.github/assets/screenshot-states.png
|
||||
:target: https://demo.home-assistant.io
|
||||
.. |screenshot-integrations| image:: https://raw.githubusercontent.com/home-assistant/core/dev/.github/assets/screenshot-integrations.png
|
||||
:target: https://home-assistant.io/integrations/
|
||||
.. |ohf-logo| image:: https://www.openhomefoundation.org/badges/home-assistant.png
|
||||
:alt: Home Assistant - A project from the Open Home Foundation
|
||||
:target: https://www.openhomefoundation.org/
|
||||
|
@@ -127,7 +127,11 @@ class AuthManagerFlowManager(
|
||||
flow: data_entry_flow.FlowHandler[AuthFlowResult, tuple[str, str]],
|
||||
result: AuthFlowResult,
|
||||
) -> AuthFlowResult:
|
||||
"""Return a user as result of login flow."""
|
||||
"""Return a user as result of login flow.
|
||||
|
||||
This method is called when a flow step returns FlowResultType.ABORT or
|
||||
FlowResultType.CREATE_ENTRY.
|
||||
"""
|
||||
flow = cast(LoginFlow, flow)
|
||||
|
||||
if result["type"] != data_entry_flow.FlowResultType.CREATE_ENTRY:
|
||||
|
@@ -3,7 +3,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from functools import cached_property
|
||||
import secrets
|
||||
from typing import Any, NamedTuple
|
||||
import uuid
|
||||
@@ -11,6 +10,7 @@ import uuid
|
||||
import attr
|
||||
from attr import Attribute
|
||||
from attr.setters import validate
|
||||
from propcache import cached_property
|
||||
|
||||
from homeassistant.const import __version__
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
|
@@ -9,6 +9,7 @@ import it.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
# pylint: disable-next=hass-deprecated-import
|
||||
from functools import cached_property as _cached_property, partial
|
||||
|
||||
from homeassistant.helpers.deprecation import (
|
||||
|
5
homeassistant/brands/aqara.json
Normal file
5
homeassistant/brands/aqara.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"domain": "aqara",
|
||||
"name": "Aqara",
|
||||
"iot_standards": ["matter", "zigbee"]
|
||||
}
|
@@ -5,7 +5,6 @@
|
||||
"google_assistant",
|
||||
"google_assistant_sdk",
|
||||
"google_cloud",
|
||||
"google_domains",
|
||||
"google_generative_ai_conversation",
|
||||
"google_mail",
|
||||
"google_maps",
|
||||
|
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"domain": "lg",
|
||||
"name": "LG",
|
||||
"integrations": ["lg_netcast", "lg_thinq", "lg_soundbar", "webostv"]
|
||||
"integrations": ["lg_netcast", "lg_soundbar", "webostv"]
|
||||
}
|
||||
|
@@ -4,8 +4,10 @@ from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from functools import partial
|
||||
from pathlib import Path
|
||||
|
||||
from jaraco.abode.client import Client as Abode
|
||||
import jaraco.abode.config
|
||||
from jaraco.abode.exceptions import (
|
||||
AuthenticationException as AbodeAuthenticationException,
|
||||
Exception as AbodeException,
|
||||
@@ -93,6 +95,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
password = entry.data[CONF_PASSWORD]
|
||||
polling = entry.data[CONF_POLLING]
|
||||
|
||||
# Configure abode library to use config directory for storing data
|
||||
jaraco.abode.config.paths.override(user_data=Path(hass.config.path("Abode")))
|
||||
|
||||
# For previous config entries where unique_id is None
|
||||
if entry.unique_id is None:
|
||||
hass.config_entries.async_update_entry(
|
||||
|
@@ -102,15 +102,7 @@ class AbodeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
existing_entry = await self.async_set_unique_id(self._username)
|
||||
|
||||
if existing_entry:
|
||||
self.hass.config_entries.async_update_entry(
|
||||
existing_entry, data=config_data
|
||||
)
|
||||
# Reload the Abode config entry otherwise devices will remain unavailable
|
||||
self.hass.async_create_task(
|
||||
self.hass.config_entries.async_reload(existing_entry.entry_id)
|
||||
)
|
||||
|
||||
return self.async_abort(reason="reauth_successful")
|
||||
return self.async_update_reload_and_abort(existing_entry, data=config_data)
|
||||
|
||||
return self.async_create_entry(
|
||||
title=cast(str, self._username), data=config_data
|
||||
|
@@ -9,5 +9,5 @@
|
||||
},
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["jaraco.abode", "lomond"],
|
||||
"requirements": ["jaraco.abode==6.2.0"]
|
||||
"requirements": ["jaraco.abode==6.2.1"]
|
||||
}
|
||||
|
@@ -13,11 +13,13 @@ from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.typing import ConfigType, StateType
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER: Final = logging.getLogger(__name__)
|
||||
|
||||
DATA_COMPONENT: HassKey[EntityComponent[AirQualityEntity]] = HassKey(DOMAIN)
|
||||
ENTITY_ID_FORMAT: Final = DOMAIN + ".{}"
|
||||
PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA
|
||||
PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE
|
||||
@@ -54,7 +56,7 @@ PROP_TO_ATTR: Final[dict[str, str]] = {
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the air quality component."""
|
||||
component = hass.data[DOMAIN] = EntityComponent[AirQualityEntity](
|
||||
component = hass.data[DATA_COMPONENT] = EntityComponent[AirQualityEntity](
|
||||
_LOGGER, DOMAIN, hass, SCAN_INTERVAL
|
||||
)
|
||||
await component.async_setup(config)
|
||||
@@ -63,14 +65,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up a config entry."""
|
||||
component: EntityComponent[AirQualityEntity] = hass.data[DOMAIN]
|
||||
return await component.async_setup_entry(entry)
|
||||
return await hass.data[DATA_COMPONENT].async_setup_entry(entry)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
component: EntityComponent[AirQualityEntity] = hass.data[DOMAIN]
|
||||
return await component.async_unload_entry(entry)
|
||||
return await hass.data[DATA_COMPONENT].async_unload_entry(entry)
|
||||
|
||||
|
||||
class AirQualityEntity(Entity):
|
||||
|
@@ -9,9 +9,10 @@ from typing import TYPE_CHECKING
|
||||
from airgradient import AirGradientClient, AirGradientError, Config, Measures
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import LOGGER
|
||||
from .const import DOMAIN, LOGGER
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import AirGradientConfigEntry
|
||||
@@ -29,6 +30,7 @@ class AirGradientCoordinator(DataUpdateCoordinator[AirGradientData]):
|
||||
"""Class to manage fetching AirGradient data."""
|
||||
|
||||
config_entry: AirGradientConfigEntry
|
||||
_current_version: str
|
||||
|
||||
def __init__(self, hass: HomeAssistant, client: AirGradientClient) -> None:
|
||||
"""Initialize coordinator."""
|
||||
@@ -42,11 +44,27 @@ class AirGradientCoordinator(DataUpdateCoordinator[AirGradientData]):
|
||||
assert self.config_entry.unique_id
|
||||
self.serial_number = self.config_entry.unique_id
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
"""Set up the coordinator."""
|
||||
self._current_version = (
|
||||
await self.client.get_current_measures()
|
||||
).firmware_version
|
||||
|
||||
async def _async_update_data(self) -> AirGradientData:
|
||||
try:
|
||||
measures = await self.client.get_current_measures()
|
||||
config = await self.client.get_config()
|
||||
except AirGradientError as error:
|
||||
raise UpdateFailed(error) from error
|
||||
else:
|
||||
return AirGradientData(measures, config)
|
||||
if measures.firmware_version != self._current_version:
|
||||
device_registry = dr.async_get(self.hass)
|
||||
device_entry = device_registry.async_get_device(
|
||||
identifiers={(DOMAIN, self.serial_number)}
|
||||
)
|
||||
assert device_entry
|
||||
device_registry.async_update_device(
|
||||
device_entry.id,
|
||||
sw_version=measures.firmware_version,
|
||||
)
|
||||
self._current_version = measures.firmware_version
|
||||
return AirGradientData(measures, config)
|
||||
|
18
homeassistant/components/airgradient/diagnostics.py
Normal file
18
homeassistant/components/airgradient/diagnostics.py
Normal file
@@ -0,0 +1,18 @@
|
||||
"""Diagnostics support for Airgradient."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import asdict
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import AirGradientConfigEntry
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: AirGradientConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
|
||||
return asdict(entry.runtime_data.data)
|
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airgradient",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["airgradient==0.8.0"],
|
||||
"requirements": ["airgradient==0.9.0"],
|
||||
"zeroconf": ["_airgradient._tcp.local."]
|
||||
}
|
||||
|
@@ -1,7 +1,8 @@
|
||||
"""Airgradient Update platform."""
|
||||
|
||||
from datetime import timedelta
|
||||
from functools import cached_property
|
||||
|
||||
from propcache import cached_property
|
||||
|
||||
from homeassistant.components.update import UpdateDeviceClass, UpdateEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
@@ -15,7 +15,6 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN # noqa: F401
|
||||
from .coordinator import AirNowDataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
@@ -14,10 +14,32 @@ ATTR_API_POLLUTANT = "Pollutant"
|
||||
ATTR_API_REPORT_DATE = "DateObserved"
|
||||
ATTR_API_REPORT_HOUR = "HourObserved"
|
||||
ATTR_API_REPORT_TZ = "LocalTimeZone"
|
||||
ATTR_API_REPORT_TZINFO = "LocalTimeZoneInfo"
|
||||
ATTR_API_STATE = "StateCode"
|
||||
ATTR_API_STATION = "ReportingArea"
|
||||
ATTR_API_STATION_LATITUDE = "Latitude"
|
||||
ATTR_API_STATION_LONGITUDE = "Longitude"
|
||||
DEFAULT_NAME = "AirNow"
|
||||
DOMAIN = "airnow"
|
||||
|
||||
SECONDS_PER_HOUR = 3600
|
||||
|
||||
# AirNow seems to only use standard time zones,
|
||||
# but we include daylight savings for completeness/futureproofing.
|
||||
US_TZ_OFFSETS = {
|
||||
"HST": -10 * SECONDS_PER_HOUR,
|
||||
"HDT": -9 * SECONDS_PER_HOUR,
|
||||
# AirNow returns AKT instead of AKST or AKDT, use standard
|
||||
"AKT": -9 * SECONDS_PER_HOUR,
|
||||
"AKST": -9 * SECONDS_PER_HOUR,
|
||||
"AKDT": -8 * SECONDS_PER_HOUR,
|
||||
"PST": -8 * SECONDS_PER_HOUR,
|
||||
"PDT": -7 * SECONDS_PER_HOUR,
|
||||
"MST": -7 * SECONDS_PER_HOUR,
|
||||
"MDT": -6 * SECONDS_PER_HOUR,
|
||||
"CST": -6 * SECONDS_PER_HOUR,
|
||||
"CDT": -5 * SECONDS_PER_HOUR,
|
||||
"EST": -5 * SECONDS_PER_HOUR,
|
||||
"EDT": -4 * SECONDS_PER_HOUR,
|
||||
"AST": -4 * SECONDS_PER_HOUR,
|
||||
"ADT": -3 * SECONDS_PER_HOUR,
|
||||
}
|
||||
|
@@ -12,7 +12,6 @@ from pyairnow.errors import AirNowError
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import (
|
||||
ATTR_API_AQI,
|
||||
@@ -27,7 +26,6 @@ from .const import (
|
||||
ATTR_API_REPORT_DATE,
|
||||
ATTR_API_REPORT_HOUR,
|
||||
ATTR_API_REPORT_TZ,
|
||||
ATTR_API_REPORT_TZINFO,
|
||||
ATTR_API_STATE,
|
||||
ATTR_API_STATION,
|
||||
ATTR_API_STATION_LATITUDE,
|
||||
@@ -98,9 +96,7 @@ class AirNowDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
# Copy Report Details
|
||||
data[ATTR_API_REPORT_DATE] = obv[ATTR_API_REPORT_DATE]
|
||||
data[ATTR_API_REPORT_HOUR] = obv[ATTR_API_REPORT_HOUR]
|
||||
data[ATTR_API_REPORT_TZINFO] = await dt_util.async_get_time_zone(
|
||||
obv[ATTR_API_REPORT_TZ]
|
||||
)
|
||||
data[ATTR_API_REPORT_TZ] = obv[ATTR_API_REPORT_TZ]
|
||||
|
||||
# Copy Station Details
|
||||
data[ATTR_API_STATE] = obv[ATTR_API_STATE]
|
||||
|
@@ -4,9 +4,10 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
|
||||
from dateutil import parser
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
@@ -34,12 +35,13 @@ from .const import (
|
||||
ATTR_API_PM25,
|
||||
ATTR_API_REPORT_DATE,
|
||||
ATTR_API_REPORT_HOUR,
|
||||
ATTR_API_REPORT_TZINFO,
|
||||
ATTR_API_REPORT_TZ,
|
||||
ATTR_API_STATION,
|
||||
ATTR_API_STATION_LATITUDE,
|
||||
ATTR_API_STATION_LONGITUDE,
|
||||
DEFAULT_NAME,
|
||||
DOMAIN,
|
||||
US_TZ_OFFSETS,
|
||||
)
|
||||
|
||||
ATTRIBUTION = "Data provided by AirNow"
|
||||
@@ -69,6 +71,18 @@ def station_extra_attrs(data: dict[str, Any]) -> dict[str, Any]:
|
||||
return {}
|
||||
|
||||
|
||||
def aqi_extra_attrs(data: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Process extra attributes for main AQI sensor."""
|
||||
return {
|
||||
ATTR_DESCR: data[ATTR_API_AQI_DESCRIPTION],
|
||||
ATTR_LEVEL: data[ATTR_API_AQI_LEVEL],
|
||||
ATTR_TIME: parser.parse(
|
||||
f"{data[ATTR_API_REPORT_DATE]} {data[ATTR_API_REPORT_HOUR]}:00 {data[ATTR_API_REPORT_TZ]}",
|
||||
tzinfos=US_TZ_OFFSETS,
|
||||
).isoformat(),
|
||||
}
|
||||
|
||||
|
||||
SENSOR_TYPES: tuple[AirNowEntityDescription, ...] = (
|
||||
AirNowEntityDescription(
|
||||
key=ATTR_API_AQI,
|
||||
@@ -76,16 +90,7 @@ SENSOR_TYPES: tuple[AirNowEntityDescription, ...] = (
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.AQI,
|
||||
value_fn=lambda data: data.get(ATTR_API_AQI),
|
||||
extra_state_attributes_fn=lambda data: {
|
||||
ATTR_DESCR: data[ATTR_API_AQI_DESCRIPTION],
|
||||
ATTR_LEVEL: data[ATTR_API_AQI_LEVEL],
|
||||
ATTR_TIME: datetime.strptime(
|
||||
f"{data[ATTR_API_REPORT_DATE]} {data[ATTR_API_REPORT_HOUR]}",
|
||||
"%Y-%m-%d %H",
|
||||
)
|
||||
.replace(tzinfo=data[ATTR_API_REPORT_TZINFO])
|
||||
.isoformat(),
|
||||
},
|
||||
extra_state_attributes_fn=aqi_extra_attrs,
|
||||
),
|
||||
AirNowEntityDescription(
|
||||
key=ATTR_API_PM10,
|
||||
|
@@ -34,13 +34,8 @@ from homeassistant.helpers import (
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
)
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
CoordinatorEntity,
|
||||
DataUpdateCoordinator,
|
||||
UpdateFailed,
|
||||
)
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import (
|
||||
CONF_CITY,
|
||||
@@ -403,39 +398,3 @@ async def async_unload_entry(hass: HomeAssistant, entry: AirVisualConfigEntry) -
|
||||
async def async_reload_entry(hass: HomeAssistant, entry: AirVisualConfigEntry) -> None:
|
||||
"""Handle an options update."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
class AirVisualEntity(CoordinatorEntity):
|
||||
"""Define a generic AirVisual entity."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: DataUpdateCoordinator,
|
||||
entry: ConfigEntry,
|
||||
description: EntityDescription,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
self._attr_extra_state_attributes = {}
|
||||
self._entry = entry
|
||||
self.entity_description = description
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register callbacks."""
|
||||
await super().async_added_to_hass()
|
||||
|
||||
@callback
|
||||
def update() -> None:
|
||||
"""Update the state."""
|
||||
self.update_from_latest_data()
|
||||
self.async_write_ha_state()
|
||||
|
||||
self.async_on_remove(self.coordinator.async_add_listener(update))
|
||||
|
||||
self.update_from_latest_data()
|
||||
|
||||
@callback
|
||||
def update_from_latest_data(self) -> None:
|
||||
"""Update the entity from the latest data."""
|
||||
raise NotImplementedError
|
||||
|
@@ -141,11 +141,7 @@ class AirVisualFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
valid_keys.add(user_input[CONF_API_KEY])
|
||||
|
||||
if existing_entry := await self.async_set_unique_id(self._geo_id):
|
||||
self.hass.config_entries.async_update_entry(existing_entry, data=user_input)
|
||||
self.hass.async_create_task(
|
||||
self.hass.config_entries.async_reload(existing_entry.entry_id)
|
||||
)
|
||||
return self.async_abort(reason="reauth_successful")
|
||||
return self.async_update_reload_and_abort(existing_entry, data=user_input)
|
||||
|
||||
return self.async_create_entry(
|
||||
title=f"Cloud API ({self._geo_id})",
|
||||
|
47
homeassistant/components/airvisual/entity.py
Normal file
47
homeassistant/components/airvisual/entity.py
Normal file
@@ -0,0 +1,47 @@
|
||||
"""The AirVisual component."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
CoordinatorEntity,
|
||||
DataUpdateCoordinator,
|
||||
)
|
||||
|
||||
|
||||
class AirVisualEntity(CoordinatorEntity):
|
||||
"""Define a generic AirVisual entity."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: DataUpdateCoordinator,
|
||||
entry: ConfigEntry,
|
||||
description: EntityDescription,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
self._attr_extra_state_attributes = {}
|
||||
self._entry = entry
|
||||
self.entity_description = description
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register callbacks."""
|
||||
await super().async_added_to_hass()
|
||||
|
||||
@callback
|
||||
def update() -> None:
|
||||
"""Update the state."""
|
||||
self.update_from_latest_data()
|
||||
self.async_write_ha_state()
|
||||
|
||||
self.async_on_remove(self.coordinator.async_add_listener(update))
|
||||
|
||||
self.update_from_latest_data()
|
||||
|
||||
@callback
|
||||
def update_from_latest_data(self) -> None:
|
||||
"""Update the entity from the latest data."""
|
||||
raise NotImplementedError
|
@@ -26,8 +26,9 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from . import AirVisualConfigEntry, AirVisualEntity
|
||||
from . import AirVisualConfigEntry
|
||||
from .const import CONF_CITY
|
||||
from .entity import AirVisualEntity
|
||||
|
||||
ATTR_CITY = "city"
|
||||
ATTR_COUNTRY = "country"
|
||||
|
@@ -24,15 +24,9 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
CoordinatorEntity,
|
||||
DataUpdateCoordinator,
|
||||
UpdateFailed,
|
||||
)
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
from .const import LOGGER
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
@@ -120,28 +114,3 @@ async def async_unload_entry(
|
||||
await entry.runtime_data.node.async_disconnect()
|
||||
|
||||
return unload_ok
|
||||
|
||||
|
||||
class AirVisualProEntity(CoordinatorEntity):
|
||||
"""Define a generic AirVisual Pro entity."""
|
||||
|
||||
def __init__(
|
||||
self, coordinator: DataUpdateCoordinator, description: EntityDescription
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.data['serial_number']}_{description.key}"
|
||||
self.entity_description = description
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
"""Return device registry information for this entity."""
|
||||
return DeviceInfo(
|
||||
identifiers={(DOMAIN, self.coordinator.data["serial_number"])},
|
||||
manufacturer="AirVisual",
|
||||
model=self.coordinator.data["status"]["model"],
|
||||
name=self.coordinator.data["settings"]["node_name"],
|
||||
hw_version=self.coordinator.data["status"]["system_version"],
|
||||
sw_version=self.coordinator.data["status"]["app_version"],
|
||||
)
|
||||
|
@@ -76,9 +76,7 @@ class AirVisualProFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
VERSION = 1
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize."""
|
||||
self._reauth_entry: ConfigEntry | None = None
|
||||
_reauth_entry: ConfigEntry
|
||||
|
||||
async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Import a config entry from `airvisual` integration (see #83882)."""
|
||||
@@ -88,9 +86,7 @@ class AirVisualProFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle configuration by re-auth."""
|
||||
self._reauth_entry = self.hass.config_entries.async_get_entry(
|
||||
self.context["entry_id"]
|
||||
)
|
||||
self._reauth_entry = self._get_reauth_entry()
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
@@ -102,8 +98,6 @@ class AirVisualProFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
step_id="reauth_confirm", data_schema=STEP_REAUTH_SCHEMA
|
||||
)
|
||||
|
||||
assert self._reauth_entry
|
||||
|
||||
validation_result = await async_validate_credentials(
|
||||
self._reauth_entry.data[CONF_IP_ADDRESS], user_input[CONF_PASSWORD]
|
||||
)
|
||||
@@ -115,13 +109,9 @@ class AirVisualProFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
errors=validation_result.errors,
|
||||
)
|
||||
|
||||
self.hass.config_entries.async_update_entry(
|
||||
return self.async_update_reload_and_abort(
|
||||
self._reauth_entry, data=self._reauth_entry.data | user_input
|
||||
)
|
||||
self.hass.async_create_task(
|
||||
self.hass.config_entries.async_reload(self._reauth_entry.entry_id)
|
||||
)
|
||||
return self.async_abort(reason="reauth_successful")
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
|
37
homeassistant/components/airvisual_pro/entity.py
Normal file
37
homeassistant/components/airvisual_pro/entity.py
Normal file
@@ -0,0 +1,37 @@
|
||||
"""The AirVisual Pro integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
CoordinatorEntity,
|
||||
DataUpdateCoordinator,
|
||||
)
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
class AirVisualProEntity(CoordinatorEntity):
|
||||
"""Define a generic AirVisual Pro entity."""
|
||||
|
||||
def __init__(
|
||||
self, coordinator: DataUpdateCoordinator, description: EntityDescription
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.data['serial_number']}_{description.key}"
|
||||
self.entity_description = description
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
"""Return device registry information for this entity."""
|
||||
return DeviceInfo(
|
||||
identifiers={(DOMAIN, self.coordinator.data["serial_number"])},
|
||||
manufacturer="AirVisual",
|
||||
model=self.coordinator.data["status"]["model"],
|
||||
name=self.coordinator.data["settings"]["node_name"],
|
||||
hw_version=self.coordinator.data["status"]["system_version"],
|
||||
sw_version=self.coordinator.data["status"]["app_version"],
|
||||
)
|
@@ -22,7 +22,8 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import AirVisualProConfigEntry, AirVisualProEntity
|
||||
from . import AirVisualProConfigEntry
|
||||
from .entity import AirVisualProEntity
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
|
@@ -3,10 +3,11 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
from functools import cached_property, partial
|
||||
from functools import partial
|
||||
import logging
|
||||
from typing import Any, Final, final
|
||||
|
||||
from propcache import cached_property
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -33,6 +34,7 @@ from homeassistant.helpers.deprecation import (
|
||||
from homeassistant.helpers.entity import Entity, EntityDescription
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
from .const import ( # noqa: F401
|
||||
_DEPRECATED_FORMAT_NUMBER,
|
||||
@@ -52,6 +54,7 @@ from .const import ( # noqa: F401
|
||||
|
||||
_LOGGER: Final = logging.getLogger(__name__)
|
||||
|
||||
DATA_COMPONENT: HassKey[EntityComponent[AlarmControlPanelEntity]] = HassKey(DOMAIN)
|
||||
ENTITY_ID_FORMAT: Final = DOMAIN + ".{}"
|
||||
PLATFORM_SCHEMA: Final = cv.PLATFORM_SCHEMA
|
||||
PLATFORM_SCHEMA_BASE: Final = cv.PLATFORM_SCHEMA_BASE
|
||||
@@ -69,7 +72,7 @@ ALARM_SERVICE_SCHEMA: Final = make_entity_service_schema(
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Track states and offer events for sensors."""
|
||||
component = hass.data[DOMAIN] = EntityComponent[AlarmControlPanelEntity](
|
||||
component = hass.data[DATA_COMPONENT] = EntityComponent[AlarmControlPanelEntity](
|
||||
_LOGGER, DOMAIN, hass, SCAN_INTERVAL
|
||||
)
|
||||
|
||||
@@ -122,14 +125,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up a config entry."""
|
||||
component: EntityComponent[AlarmControlPanelEntity] = hass.data[DOMAIN]
|
||||
return await component.async_setup_entry(entry)
|
||||
return await hass.data[DATA_COMPONENT].async_setup_entry(entry)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
component: EntityComponent[AlarmControlPanelEntity] = hass.data[DOMAIN]
|
||||
return await component.async_unload_entry(entry)
|
||||
return await hass.data[DATA_COMPONENT].async_unload_entry(entry)
|
||||
|
||||
|
||||
class AlarmControlPanelEntityDescription(EntityDescription, frozen_or_thawed=True):
|
||||
|
@@ -157,7 +157,7 @@ class AlarmDecoderFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
class AlarmDecoderOptionsFlowHandler(OptionsFlow):
|
||||
"""Handle AlarmDecoder options."""
|
||||
|
||||
selected_zone: str | None = None
|
||||
selected_zone: str
|
||||
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize AlarmDecoder options flow."""
|
||||
|
@@ -29,6 +29,7 @@ from homeassistant.components.alarm_control_panel import (
|
||||
CodeFormat,
|
||||
)
|
||||
from homeassistant.components.climate import HVACMode
|
||||
from homeassistant.components.lock import LockState
|
||||
from homeassistant.const import (
|
||||
ATTR_CODE_FORMAT,
|
||||
ATTR_SUPPORTED_FEATURES,
|
||||
@@ -40,16 +41,12 @@ from homeassistant.const import (
|
||||
STATE_ALARM_ARMED_HOME,
|
||||
STATE_ALARM_ARMED_NIGHT,
|
||||
STATE_IDLE,
|
||||
STATE_LOCKED,
|
||||
STATE_LOCKING,
|
||||
STATE_OFF,
|
||||
STATE_ON,
|
||||
STATE_PAUSED,
|
||||
STATE_PLAYING,
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
STATE_UNLOCKED,
|
||||
STATE_UNLOCKING,
|
||||
UnitOfLength,
|
||||
UnitOfMass,
|
||||
UnitOfTemperature,
|
||||
@@ -500,10 +497,10 @@ class AlexaLockController(AlexaCapability):
|
||||
raise UnsupportedProperty(name)
|
||||
|
||||
# If its unlocking its still locked and not unlocked yet
|
||||
if self.entity.state in (STATE_UNLOCKING, STATE_LOCKED):
|
||||
if self.entity.state in (LockState.UNLOCKING, LockState.LOCKED):
|
||||
return "LOCKED"
|
||||
# If its locking its still unlocked and not locked yet
|
||||
if self.entity.state in (STATE_LOCKING, STATE_UNLOCKED):
|
||||
if self.entity.state in (LockState.LOCKING, LockState.UNLOCKED):
|
||||
return "UNLOCKED"
|
||||
return "JAMMED"
|
||||
|
||||
|
@@ -10,12 +10,15 @@ from homeassistant.core import Event, HassJob, HomeAssistant, callback
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.event import async_call_later, async_track_time_interval
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
from .analytics import Analytics
|
||||
from .const import ATTR_ONBOARDED, ATTR_PREFERENCES, DOMAIN, INTERVAL, PREFERENCE_SCHEMA
|
||||
|
||||
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
|
||||
DATA_COMPONENT: HassKey[Analytics] = HassKey(DOMAIN)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, _: ConfigType) -> bool:
|
||||
"""Set up the analytics integration."""
|
||||
@@ -52,7 +55,7 @@ async def async_setup(hass: HomeAssistant, _: ConfigType) -> bool:
|
||||
websocket_api.async_register_command(hass, websocket_analytics)
|
||||
websocket_api.async_register_command(hass, websocket_analytics_preferences)
|
||||
|
||||
hass.data[DOMAIN] = analytics
|
||||
hass.data[DATA_COMPONENT] = analytics
|
||||
return True
|
||||
|
||||
|
||||
@@ -65,7 +68,7 @@ def websocket_analytics(
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Return analytics preferences."""
|
||||
analytics: Analytics = hass.data[DOMAIN]
|
||||
analytics = hass.data[DATA_COMPONENT]
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
{ATTR_PREFERENCES: analytics.preferences, ATTR_ONBOARDED: analytics.onboarded},
|
||||
@@ -87,7 +90,7 @@ async def websocket_analytics_preferences(
|
||||
) -> None:
|
||||
"""Update analytics preferences."""
|
||||
preferences = msg[ATTR_PREFERENCES]
|
||||
analytics: Analytics = hass.data[DOMAIN]
|
||||
analytics = hass.data[DATA_COMPONENT]
|
||||
|
||||
await analytics.save_preferences(preferences)
|
||||
await analytics.send_analytics()
|
||||
|
@@ -261,18 +261,19 @@ class Analytics:
|
||||
integrations.append(integration.domain)
|
||||
|
||||
if supervisor_info is not None:
|
||||
supervisor_client = hassio.get_supervisor_client(hass)
|
||||
installed_addons = await asyncio.gather(
|
||||
*(
|
||||
hassio.async_get_addon_info(hass, addon[ATTR_SLUG])
|
||||
supervisor_client.addons.addon_info(addon[ATTR_SLUG])
|
||||
for addon in supervisor_info[ATTR_ADDONS]
|
||||
)
|
||||
)
|
||||
addons.extend(
|
||||
{
|
||||
ATTR_SLUG: addon[ATTR_SLUG],
|
||||
ATTR_PROTECTED: addon[ATTR_PROTECTED],
|
||||
ATTR_VERSION: addon[ATTR_VERSION],
|
||||
ATTR_AUTO_UPDATE: addon[ATTR_AUTO_UPDATE],
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_PROTECTED: addon.protected,
|
||||
ATTR_VERSION: addon.version,
|
||||
ATTR_AUTO_UPDATE: addon.auto_update,
|
||||
}
|
||||
for addon in installed_addons
|
||||
)
|
||||
|
@@ -31,6 +31,8 @@ if TYPE_CHECKING:
|
||||
class AnalyticsData:
|
||||
"""Analytics data class."""
|
||||
|
||||
active_installations: int
|
||||
reports_integrations: int
|
||||
core_integrations: dict[str, int]
|
||||
custom_integrations: dict[str, int]
|
||||
|
||||
@@ -76,7 +78,12 @@ class HomeassistantAnalyticsDataUpdateCoordinator(DataUpdateCoordinator[Analytic
|
||||
integration: get_custom_integration_value(custom_data, integration)
|
||||
for integration in self._tracked_custom_integrations
|
||||
}
|
||||
return AnalyticsData(core_integrations, custom_integrations)
|
||||
return AnalyticsData(
|
||||
data.active_installations,
|
||||
data.reports_integrations,
|
||||
core_integrations,
|
||||
custom_integrations,
|
||||
)
|
||||
|
||||
|
||||
def get_custom_integration_value(
|
||||
|
@@ -6,6 +6,12 @@
|
||||
},
|
||||
"custom_integrations": {
|
||||
"default": "mdi:puzzle-edit"
|
||||
},
|
||||
"total_active_installations": {
|
||||
"default": "mdi:puzzle"
|
||||
},
|
||||
"total_reports_integrations": {
|
||||
"default": "mdi:puzzle"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -7,6 +7,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["python_homeassistant_analytics"],
|
||||
"requirements": ["python-homeassistant-analytics==0.7.0"],
|
||||
"requirements": ["python-homeassistant-analytics==0.8.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@@ -57,6 +57,26 @@ def get_custom_integration_entity_description(
|
||||
)
|
||||
|
||||
|
||||
GENERAL_SENSORS = [
|
||||
AnalyticsSensorEntityDescription(
|
||||
key="total_active_installations",
|
||||
translation_key="total_active_installations",
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
native_unit_of_measurement="active installations",
|
||||
value_fn=lambda data: data.active_installations,
|
||||
),
|
||||
AnalyticsSensorEntityDescription(
|
||||
key="total_reports_integrations",
|
||||
translation_key="total_reports_integrations",
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
native_unit_of_measurement="active installations",
|
||||
value_fn=lambda data: data.reports_integrations,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AnalyticsInsightsConfigEntry,
|
||||
@@ -85,6 +105,12 @@ async def async_setup_entry(
|
||||
)
|
||||
for integration_domain in coordinator.data.custom_integrations
|
||||
)
|
||||
|
||||
entities.extend(
|
||||
HomeassistantAnalyticsSensor(coordinator, entity_description)
|
||||
for entity_description in GENERAL_SENSORS
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
|
@@ -44,6 +44,12 @@
|
||||
"sensor": {
|
||||
"custom_integrations": {
|
||||
"name": "{custom_integration_domain} (custom)"
|
||||
},
|
||||
"total_active_installations": {
|
||||
"name": "Total active installations"
|
||||
},
|
||||
"total_reports_integrations": {
|
||||
"name": "Total reported integrations"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -4,7 +4,6 @@ from __future__ import annotations
|
||||
|
||||
from pydroid_ipcam import PyDroidIPCam
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_PASSWORD,
|
||||
@@ -15,8 +14,7 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AndroidIPCamDataUpdateCoordinator
|
||||
from .coordinator import AndroidIPCamConfigEntry, AndroidIPCamDataUpdateCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [
|
||||
Platform.BINARY_SENSOR,
|
||||
@@ -26,7 +24,9 @@ PLATFORMS: list[Platform] = [
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: AndroidIPCamConfigEntry
|
||||
) -> bool:
|
||||
"""Set up Android IP Webcam from a config entry."""
|
||||
websession = async_get_clientsession(hass)
|
||||
cam = PyDroidIPCam(
|
||||
@@ -40,16 +40,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
coordinator = AndroidIPCamDataUpdateCoordinator(hass, entry, cam)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(
|
||||
hass: HomeAssistant, entry: AndroidIPCamConfigEntry
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
|
||||
return unload_ok
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
@@ -7,12 +7,11 @@ from homeassistant.components.binary_sensor import (
|
||||
BinarySensorEntity,
|
||||
BinarySensorEntityDescription,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import DOMAIN, MOTION_ACTIVE
|
||||
from .coordinator import AndroidIPCamDataUpdateCoordinator
|
||||
from .const import MOTION_ACTIVE
|
||||
from .coordinator import AndroidIPCamConfigEntry, AndroidIPCamDataUpdateCoordinator
|
||||
from .entity import AndroidIPCamBaseEntity
|
||||
|
||||
BINARY_SENSOR_DESCRIPTION = BinarySensorEntityDescription(
|
||||
@@ -24,16 +23,12 @@ BINARY_SENSOR_DESCRIPTION = BinarySensorEntityDescription(
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: AndroidIPCamConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the IP Webcam sensors from config entry."""
|
||||
|
||||
coordinator: AndroidIPCamDataUpdateCoordinator = hass.data[DOMAIN][
|
||||
config_entry.entry_id
|
||||
]
|
||||
|
||||
async_add_entities([IPWebcamBinarySensor(coordinator)])
|
||||
async_add_entities([IPWebcamBinarySensor(config_entry.runtime_data)])
|
||||
|
||||
|
||||
class IPWebcamBinarySensor(AndroidIPCamBaseEntity, BinarySensorEntity):
|
||||
|
@@ -3,7 +3,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.components.mjpeg import MjpegCamera, filter_urllib3_logging
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_PASSWORD,
|
||||
@@ -15,21 +14,17 @@ from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AndroidIPCamDataUpdateCoordinator
|
||||
from .coordinator import AndroidIPCamConfigEntry, AndroidIPCamDataUpdateCoordinator
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: AndroidIPCamConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the IP Webcam camera from config entry."""
|
||||
filter_urllib3_logging()
|
||||
coordinator: AndroidIPCamDataUpdateCoordinator = hass.data[DOMAIN][
|
||||
config_entry.entry_id
|
||||
]
|
||||
|
||||
async_add_entities([IPWebcamCamera(coordinator)])
|
||||
async_add_entities([IPWebcamCamera(config_entry.runtime_data)])
|
||||
|
||||
|
||||
class IPWebcamCamera(MjpegCamera):
|
||||
|
@@ -15,19 +15,22 @@ from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type AndroidIPCamConfigEntry = ConfigEntry[AndroidIPCamDataUpdateCoordinator]
|
||||
|
||||
|
||||
class AndroidIPCamDataUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
"""Coordinator class for the Android IP Webcam."""
|
||||
|
||||
config_entry: AndroidIPCamConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: AndroidIPCamConfigEntry,
|
||||
cam: PyDroidIPCam,
|
||||
) -> None:
|
||||
"""Initialize the Android IP Webcam."""
|
||||
self.hass = hass
|
||||
self.config_entry: ConfigEntry = config_entry
|
||||
self.cam = cam
|
||||
super().__init__(
|
||||
self.hass,
|
||||
|
@@ -13,14 +13,12 @@ from homeassistant.components.sensor import (
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AndroidIPCamDataUpdateCoordinator
|
||||
from .coordinator import AndroidIPCamConfigEntry, AndroidIPCamDataUpdateCoordinator
|
||||
from .entity import AndroidIPCamBaseEntity
|
||||
|
||||
|
||||
@@ -120,19 +118,21 @@ SENSOR_TYPES: tuple[AndroidIPWebcamSensorEntityDescription, ...] = (
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: AndroidIPCamConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the IP Webcam sensors from config entry."""
|
||||
|
||||
coordinator: AndroidIPCamDataUpdateCoordinator = hass.data[DOMAIN][
|
||||
config_entry.entry_id
|
||||
]
|
||||
coordinator = config_entry.runtime_data
|
||||
sensor_types = [
|
||||
sensor
|
||||
for sensor in SENSOR_TYPES
|
||||
if sensor.key
|
||||
in [*coordinator.cam.enabled_sensors, "audio_connections", "video_connections"]
|
||||
in [
|
||||
*coordinator.cam.enabled_sensors,
|
||||
"audio_connections",
|
||||
"video_connections",
|
||||
]
|
||||
]
|
||||
async_add_entities(
|
||||
IPWebcamSensor(coordinator, description) for description in sensor_types
|
||||
|
@@ -9,13 +9,11 @@ from typing import Any
|
||||
from pydroid_ipcam import PyDroidIPCam
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AndroidIPCamDataUpdateCoordinator
|
||||
from .coordinator import AndroidIPCamConfigEntry, AndroidIPCamDataUpdateCoordinator
|
||||
from .entity import AndroidIPCamBaseEntity
|
||||
|
||||
|
||||
@@ -113,14 +111,12 @@ SWITCH_TYPES: tuple[AndroidIPWebcamSwitchEntityDescription, ...] = (
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: AndroidIPCamConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the IP Webcam switches from config entry."""
|
||||
|
||||
coordinator: AndroidIPCamDataUpdateCoordinator = hass.data[DOMAIN][
|
||||
config_entry.entry_id
|
||||
]
|
||||
coordinator = config_entry.runtime_data
|
||||
switch_types = [
|
||||
switch
|
||||
for switch in SWITCH_TYPES
|
||||
|
@@ -16,6 +16,7 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components import zeroconf
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_REAUTH,
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
@@ -58,13 +59,11 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
VERSION = 1
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize a new AndroidTVRemoteConfigFlow."""
|
||||
self.api: AndroidTVRemote | None = None
|
||||
self.reauth_entry: ConfigEntry | None = None
|
||||
self.host: str | None = None
|
||||
self.name: str | None = None
|
||||
self.mac: str | None = None
|
||||
api: AndroidTVRemote
|
||||
host: str
|
||||
name: str
|
||||
mac: str
|
||||
reauth_entry: ConfigEntry
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -72,13 +71,11 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
self.host = user_input["host"]
|
||||
assert self.host
|
||||
self.host = user_input[CONF_HOST]
|
||||
api = create_api(self.hass, self.host, enable_ime=False)
|
||||
try:
|
||||
await api.async_generate_cert_if_missing()
|
||||
self.name, self.mac = await api.async_get_name_and_mac()
|
||||
assert self.mac
|
||||
await self.async_set_unique_id(format_mac(self.mac))
|
||||
self._abort_if_unique_id_configured(updates={CONF_HOST: self.host})
|
||||
return await self._async_start_pair()
|
||||
@@ -94,7 +91,6 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
async def _async_start_pair(self) -> ConfigFlowResult:
|
||||
"""Start pairing with the Android TV. Navigate to the pair flow to enter the PIN shown on screen."""
|
||||
assert self.host
|
||||
self.api = create_api(self.hass, self.host, enable_ime=False)
|
||||
await self.api.async_generate_cert_if_missing()
|
||||
await self.api.async_start_pairing()
|
||||
@@ -108,14 +104,12 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if user_input is not None:
|
||||
try:
|
||||
pin = user_input["pin"]
|
||||
assert self.api
|
||||
await self.api.async_finish_pairing(pin)
|
||||
if self.reauth_entry:
|
||||
if self.source == SOURCE_REAUTH:
|
||||
await self.hass.config_entries.async_reload(
|
||||
self.reauth_entry.entry_id
|
||||
)
|
||||
return self.async_abort(reason="reauth_successful")
|
||||
assert self.name
|
||||
return self.async_create_entry(
|
||||
title=self.name,
|
||||
data={
|
||||
@@ -155,9 +149,9 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
_LOGGER.debug("Android TV device found via zeroconf: %s", discovery_info)
|
||||
self.host = discovery_info.host
|
||||
self.name = discovery_info.name.removesuffix("._androidtvremote2._tcp.local.")
|
||||
self.mac = discovery_info.properties.get("bt")
|
||||
if not self.mac:
|
||||
if not (mac := discovery_info.properties.get("bt")):
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
self.mac = mac
|
||||
await self.async_set_unique_id(format_mac(self.mac))
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={CONF_HOST: self.host, CONF_NAME: self.name}
|
||||
@@ -189,9 +183,7 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self.host = entry_data[CONF_HOST]
|
||||
self.name = entry_data[CONF_NAME]
|
||||
self.mac = entry_data[CONF_MAC]
|
||||
self.reauth_entry = self.hass.config_entries.async_get_entry(
|
||||
self.context["entry_id"]
|
||||
)
|
||||
self.reauth_entry = self._get_reauth_entry()
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
|
@@ -23,7 +23,7 @@ class AOSmithConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
VERSION = 1
|
||||
|
||||
_reauth_email: str | None = None
|
||||
_reauth_email: str
|
||||
|
||||
async def _async_validate_credentials(
|
||||
self, email: str, password: str
|
||||
@@ -85,21 +85,17 @@ class AOSmithConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle user's reauth credentials."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None and self._reauth_email is not None:
|
||||
email = self._reauth_email
|
||||
if user_input:
|
||||
password = user_input[CONF_PASSWORD]
|
||||
entry_id = self.context["entry_id"]
|
||||
|
||||
if entry := self.hass.config_entries.async_get_entry(entry_id):
|
||||
error = await self._async_validate_credentials(email, password)
|
||||
if error is None:
|
||||
self.hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
data=entry.data | user_input,
|
||||
)
|
||||
await self.hass.config_entries.async_reload(entry.entry_id)
|
||||
return self.async_abort(reason="reauth_successful")
|
||||
errors["base"] = error
|
||||
entry = self._get_reauth_entry()
|
||||
error = await self._async_validate_credentials(self._reauth_email, password)
|
||||
if error is None:
|
||||
return self.async_update_reload_and_abort(
|
||||
entry,
|
||||
data=entry.data | user_input,
|
||||
)
|
||||
errors["base"] = error
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
|
@@ -53,7 +53,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Activate the Apache Kafka integration."""
|
||||
conf = config[DOMAIN]
|
||||
|
||||
kafka = hass.data[DOMAIN] = KafkaManager(
|
||||
kafka = KafkaManager(
|
||||
hass,
|
||||
conf[CONF_IP_ADDRESS],
|
||||
conf[CONF_PORT],
|
||||
|
@@ -8,7 +8,7 @@ from collections.abc import Awaitable, Callable, Mapping
|
||||
from ipaddress import ip_address
|
||||
import logging
|
||||
from random import randrange
|
||||
from typing import Any
|
||||
from typing import Any, Self
|
||||
|
||||
from pyatv import exceptions, pair, scan
|
||||
from pyatv.const import DeviceModel, PairingRequirement, Protocol
|
||||
@@ -98,8 +98,11 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
VERSION = 1
|
||||
|
||||
scan_filter: str | None = None
|
||||
all_identifiers: set[str]
|
||||
atv: BaseConfig | None = None
|
||||
atv_identifiers: list[str] | None = None
|
||||
_host: str # host in zeroconf discovery info, should not be accessed by other flows
|
||||
host: str | None = None # set by _async_aggregate_discoveries, for other flows
|
||||
protocol: Protocol | None = None
|
||||
pairing: PairingHandler | None = None
|
||||
protocols_to_pair: deque[Protocol] | None = None
|
||||
@@ -157,7 +160,6 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"type": "Apple TV",
|
||||
}
|
||||
self.scan_filter = self.unique_id
|
||||
self.context["identifier"] = self.unique_id
|
||||
return await self.async_step_restore_device()
|
||||
|
||||
async def async_step_restore_device(
|
||||
@@ -192,7 +194,7 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self.device_identifier, raise_on_progress=False
|
||||
)
|
||||
assert self.atv
|
||||
self.context["all_identifiers"] = self.atv.all_identifiers
|
||||
self.all_identifiers = set(self.atv.all_identifiers)
|
||||
return await self.async_step_confirm()
|
||||
|
||||
return self.async_show_form(
|
||||
@@ -207,7 +209,7 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle device found via zeroconf."""
|
||||
if discovery_info.ip_address.version == 6:
|
||||
return self.async_abort(reason="ipv6_not_supported")
|
||||
host = discovery_info.host
|
||||
self._host = host = discovery_info.host
|
||||
service_type = discovery_info.type[:-1] # Remove leading .
|
||||
name = discovery_info.name.replace(f".{service_type}.", "")
|
||||
properties = discovery_info.properties
|
||||
@@ -255,7 +257,7 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
# as two separate flows.
|
||||
#
|
||||
# To solve this, all identifiers are stored as
|
||||
# "all_identifiers" in the flow context. When a new service is discovered, the
|
||||
# "all_identifiers" in the flow. When a new service is discovered, the
|
||||
# code below will check these identifiers for all active flows and abort if a
|
||||
# match is found. Before aborting, the original flow is updated with any
|
||||
# potentially new identifiers. In the example above, when service C is
|
||||
@@ -277,32 +279,32 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self._async_check_and_update_in_progress(host, unique_id)
|
||||
# Host must only be set AFTER checking and updating in progress
|
||||
# flows or we will have a race condition where no flows move forward.
|
||||
self.context[CONF_ADDRESS] = host
|
||||
self.host = host
|
||||
|
||||
@callback
|
||||
def _async_check_and_update_in_progress(self, host: str, unique_id: str) -> None:
|
||||
"""Check for in-progress flows and update them with identifiers if needed."""
|
||||
for flow in self._async_in_progress(include_uninitialized=True):
|
||||
context = flow["context"]
|
||||
if (
|
||||
context.get("source") != SOURCE_ZEROCONF
|
||||
or context.get(CONF_ADDRESS) != host
|
||||
):
|
||||
continue
|
||||
if (
|
||||
"all_identifiers" in context
|
||||
and unique_id not in context["all_identifiers"]
|
||||
):
|
||||
# Add potentially new identifiers from this device to the existing flow
|
||||
context["all_identifiers"].append(unique_id)
|
||||
if self.hass.config_entries.flow.async_has_matching_flow(self):
|
||||
raise AbortFlow("already_in_progress")
|
||||
|
||||
def is_matching(self, other_flow: Self) -> bool:
|
||||
"""Return True if other_flow is matching this flow."""
|
||||
if (
|
||||
other_flow.context.get("source") != SOURCE_ZEROCONF
|
||||
or other_flow.host != self._host
|
||||
):
|
||||
return False
|
||||
if self.unique_id is not None:
|
||||
# Add potentially new identifiers from this device to the existing flow
|
||||
other_flow.all_identifiers.add(self.unique_id)
|
||||
return True
|
||||
|
||||
async def async_found_zeroconf_device(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle device found after Zeroconf discovery."""
|
||||
assert self.atv
|
||||
self.context["all_identifiers"] = self.atv.all_identifiers
|
||||
self.all_identifiers = set(self.atv.all_identifiers)
|
||||
# Also abort if an integration with this identifier already exists
|
||||
await self.async_set_unique_id(self.device_identifier)
|
||||
# but be sure to update the address if its changed so the scanner
|
||||
@@ -310,7 +312,6 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={CONF_ADDRESS: str(self.atv.address)}
|
||||
)
|
||||
self.context["identifier"] = self.unique_id
|
||||
return await self.async_step_confirm()
|
||||
|
||||
async def async_find_device_wrapper(
|
||||
@@ -390,7 +391,7 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle user-confirmation of discovered node."""
|
||||
assert self.atv
|
||||
if user_input is not None:
|
||||
expected_identifier_count = len(self.context["all_identifiers"])
|
||||
expected_identifier_count = len(self.all_identifiers)
|
||||
# If number of services found during device scan mismatch number of
|
||||
# identifiers collected during Zeroconf discovery, then trigger a new scan
|
||||
# with hopes of finding all services.
|
||||
|
@@ -36,6 +36,7 @@ from homeassistant.loader import (
|
||||
async_get_integration,
|
||||
)
|
||||
from homeassistant.util import slugify
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
__all__ = ["ClientCredential", "AuthorizationServer", "async_import_client_credential"]
|
||||
|
||||
@@ -45,7 +46,7 @@ DOMAIN = "application_credentials"
|
||||
|
||||
STORAGE_KEY = DOMAIN
|
||||
STORAGE_VERSION = 1
|
||||
DATA_STORAGE = "storage"
|
||||
DATA_COMPONENT: HassKey[ApplicationCredentialsStorageCollection] = HassKey(DOMAIN)
|
||||
CONF_AUTH_DOMAIN = "auth_domain"
|
||||
DEFAULT_IMPORT_NAME = "Import from configuration.yaml"
|
||||
|
||||
@@ -150,7 +151,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
id_manager,
|
||||
)
|
||||
await storage_collection.async_load()
|
||||
hass.data[DOMAIN][DATA_STORAGE] = storage_collection
|
||||
hass.data[DATA_COMPONENT] = storage_collection
|
||||
|
||||
collection.DictStorageCollectionWebsocket(
|
||||
storage_collection, DOMAIN, DOMAIN, CREATE_FIELDS, UPDATE_FIELDS
|
||||
@@ -175,7 +176,6 @@ async def async_import_client_credential(
|
||||
"""Import an existing credential from configuration.yaml."""
|
||||
if DOMAIN not in hass.data:
|
||||
raise ValueError("Integration 'application_credentials' not setup")
|
||||
storage_collection = hass.data[DOMAIN][DATA_STORAGE]
|
||||
item = {
|
||||
CONF_DOMAIN: domain,
|
||||
CONF_CLIENT_ID: credential.client_id,
|
||||
@@ -183,7 +183,7 @@ async def async_import_client_credential(
|
||||
CONF_AUTH_DOMAIN: auth_domain if auth_domain else domain,
|
||||
}
|
||||
item[CONF_NAME] = credential.name if credential.name else DEFAULT_IMPORT_NAME
|
||||
await storage_collection.async_import_item(item)
|
||||
await hass.data[DATA_COMPONENT].async_import_item(item)
|
||||
|
||||
|
||||
class AuthImplementation(config_entry_oauth2_flow.LocalOAuth2Implementation):
|
||||
@@ -222,8 +222,7 @@ async def _async_provide_implementation(
|
||||
if not platform:
|
||||
return []
|
||||
|
||||
storage_collection = hass.data[DOMAIN][DATA_STORAGE]
|
||||
credentials = storage_collection.async_client_credentials(domain)
|
||||
credentials = hass.data[DATA_COMPONENT].async_client_credentials(domain)
|
||||
if hasattr(platform, "async_get_auth_implementation"):
|
||||
return [
|
||||
await platform.async_get_auth_implementation(hass, auth_domain, credential)
|
||||
@@ -246,8 +245,7 @@ async def _async_config_entry_app_credentials(
|
||||
):
|
||||
return None
|
||||
|
||||
storage_collection = hass.data[DOMAIN][DATA_STORAGE]
|
||||
for item in storage_collection.async_items():
|
||||
for item in hass.data[DATA_COMPONENT].async_items():
|
||||
item_id = item[CONF_ID]
|
||||
if (
|
||||
item[CONF_DOMAIN] == config_entry.domain
|
||||
|
@@ -5,5 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/apprise",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["apprise"],
|
||||
"requirements": ["apprise==1.8.0"]
|
||||
"requirements": ["apprise==1.9.0"]
|
||||
}
|
||||
|
@@ -6,14 +6,12 @@ import logging
|
||||
|
||||
from pyaprilaire.const import Attribute
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT, EVENT_HOMEASSISTANT_STOP, Platform
|
||||
from homeassistant.core import Event, HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AprilaireCoordinator
|
||||
from .coordinator import AprilaireConfigEntry, AprilaireCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [
|
||||
Platform.CLIMATE,
|
||||
@@ -25,7 +23,7 @@ PLATFORMS: list[Platform] = [
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AprilaireConfigEntry) -> bool:
|
||||
"""Set up a config entry for Aprilaire."""
|
||||
|
||||
host = entry.data[CONF_HOST]
|
||||
@@ -34,15 +32,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
coordinator = AprilaireCoordinator(hass, entry.unique_id, host, port)
|
||||
await coordinator.start_listen()
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})[entry.unique_id] = coordinator
|
||||
|
||||
async def ready_callback(ready: bool):
|
||||
async def ready_callback(ready: bool) -> None:
|
||||
if ready:
|
||||
mac_address = format_mac(coordinator.data[Attribute.MAC_ADDRESS])
|
||||
|
||||
if mac_address != entry.unique_id:
|
||||
raise ConfigEntryAuthFailed("Invalid MAC address")
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
entry.async_on_unload(coordinator.stop_listen)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
async def _async_close(_: Event) -> None:
|
||||
@@ -63,12 +62,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AprilaireConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
if unload_ok:
|
||||
coordinator: AprilaireCoordinator = hass.data[DOMAIN].pop(entry.unique_id)
|
||||
coordinator.stop_listen()
|
||||
|
||||
return unload_ok
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
@@ -16,19 +16,17 @@ from homeassistant.components.climate import (
|
||||
HVACAction,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import PRECISION_HALVES, PRECISION_WHOLE, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
FAN_CIRCULATE,
|
||||
PRESET_PERMANENT_HOLD,
|
||||
PRESET_TEMPORARY_HOLD,
|
||||
PRESET_VACATION,
|
||||
)
|
||||
from .coordinator import AprilaireCoordinator
|
||||
from .coordinator import AprilaireConfigEntry
|
||||
from .entity import BaseAprilaireEntity
|
||||
|
||||
HVAC_MODE_MAP = {
|
||||
@@ -64,14 +62,14 @@ FAN_MODE_MAP = {
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: AprilaireConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Add climates for passed config_entry in HA."""
|
||||
|
||||
coordinator: AprilaireCoordinator = hass.data[DOMAIN][config_entry.unique_id]
|
||||
|
||||
async_add_entities([AprilaireClimate(coordinator, config_entry.unique_id)])
|
||||
async_add_entities(
|
||||
[AprilaireClimate(config_entry.runtime_data, config_entry.unique_id)]
|
||||
)
|
||||
|
||||
|
||||
class AprilaireClimate(BaseAprilaireEntity, ClimateEntity):
|
||||
|
@@ -9,6 +9,7 @@ from typing import Any
|
||||
import pyaprilaire.client
|
||||
from pyaprilaire.const import MODELS, Attribute, FunctionalDomain
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||
import homeassistant.helpers.device_registry as dr
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
@@ -22,6 +23,8 @@ WAIT_TIMEOUT = 30
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type AprilaireConfigEntry = ConfigEntry[AprilaireCoordinator]
|
||||
|
||||
|
||||
class AprilaireCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
"""Coordinator for interacting with the thermostat."""
|
||||
@@ -112,7 +115,7 @@ class AprilaireCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
self.client.stop_listen()
|
||||
|
||||
async def wait_for_ready(
|
||||
self, ready_callback: Callable[[bool], Awaitable[bool]]
|
||||
self, ready_callback: Callable[[bool], Awaitable[None]]
|
||||
) -> bool:
|
||||
"""Wait for the client to be ready."""
|
||||
|
||||
|
@@ -14,13 +14,11 @@ from homeassistant.components.humidifier import (
|
||||
HumidifierEntity,
|
||||
HumidifierEntityDescription,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AprilaireCoordinator
|
||||
from .coordinator import AprilaireConfigEntry, AprilaireCoordinator
|
||||
from .entity import BaseAprilaireEntity
|
||||
|
||||
HUMIDIFIER_ACTION_MAP: dict[StateType, HumidifierAction] = {
|
||||
@@ -41,12 +39,12 @@ DEHUMIDIFIER_ACTION_MAP: dict[StateType, HumidifierAction] = {
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: AprilaireConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Aprilaire humidifier devices."""
|
||||
|
||||
coordinator: AprilaireCoordinator = hass.data[DOMAIN][config_entry.unique_id]
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
assert config_entry.unique_id is not None
|
||||
|
||||
|
@@ -9,12 +9,10 @@ from typing import cast
|
||||
from pyaprilaire.const import Attribute
|
||||
|
||||
from homeassistant.components.select import SelectEntity, SelectEntityDescription
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AprilaireCoordinator
|
||||
from .coordinator import AprilaireConfigEntry, AprilaireCoordinator
|
||||
from .entity import BaseAprilaireEntity
|
||||
|
||||
AIR_CLEANING_EVENT_MAP = {0: "off", 3: "event_clean", 4: "allergies"}
|
||||
@@ -25,12 +23,12 @@ FRESH_AIR_MODE_MAP = {0: "off", 1: "automatic"}
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: AprilaireConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Aprilaire select devices."""
|
||||
|
||||
coordinator: AprilaireCoordinator = hass.data[DOMAIN][config_entry.unique_id]
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
assert config_entry.unique_id is not None
|
||||
|
||||
|
@@ -13,14 +13,12 @@ from homeassistant.components.sensor import (
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import PERCENTAGE, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AprilaireCoordinator
|
||||
from .coordinator import AprilaireConfigEntry, AprilaireCoordinator
|
||||
from .entity import BaseAprilaireEntity
|
||||
|
||||
DEHUMIDIFICATION_STATUS_MAP: dict[StateType, str] = {
|
||||
@@ -76,12 +74,12 @@ def get_entities(
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: AprilaireConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Aprilaire sensor devices."""
|
||||
|
||||
coordinator: AprilaireCoordinator = hass.data[DOMAIN][config_entry.unique_id]
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
assert config_entry.unique_id is not None
|
||||
|
||||
|
@@ -22,6 +22,9 @@ class ArcamFmjFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
VERSION = 1
|
||||
|
||||
host: str
|
||||
port: int
|
||||
|
||||
async def _async_set_unique_id_and_update(
|
||||
self, host: str, port: int, uuid: str
|
||||
) -> None:
|
||||
@@ -74,16 +77,11 @@ class ArcamFmjFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle user-confirmation of discovered node."""
|
||||
context = self.context
|
||||
placeholders = {
|
||||
"host": context[CONF_HOST],
|
||||
}
|
||||
context["title_placeholders"] = placeholders
|
||||
placeholders = {"host": self.host}
|
||||
self.context["title_placeholders"] = placeholders
|
||||
|
||||
if user_input is not None:
|
||||
return await self._async_check_and_create(
|
||||
context[CONF_HOST], context[CONF_PORT]
|
||||
)
|
||||
return await self._async_check_and_create(self.host, self.port)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="confirm", description_placeholders=placeholders
|
||||
@@ -101,7 +99,6 @@ class ArcamFmjFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
await self._async_set_unique_id_and_update(host, port, uuid)
|
||||
|
||||
context = self.context
|
||||
context[CONF_HOST] = host
|
||||
context[CONF_PORT] = DEFAULT_PORT
|
||||
self.host = host
|
||||
self.port = DEFAULT_PORT
|
||||
return await self.async_step_confirm()
|
||||
|
@@ -2,33 +2,28 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import ArveCoordinator
|
||||
from .coordinator import ArveConfigEntry, ArveCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ArveConfigEntry) -> bool:
|
||||
"""Set up Arve from a config entry."""
|
||||
|
||||
coordinator = ArveCoordinator(hass)
|
||||
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ArveConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
|
||||
return unload_ok
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
@@ -21,11 +21,13 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
|
||||
type ArveConfigEntry = ConfigEntry[ArveCoordinator]
|
||||
|
||||
|
||||
class ArveCoordinator(DataUpdateCoordinator[ArveSensProData]):
|
||||
"""Arve coordinator."""
|
||||
|
||||
config_entry: ConfigEntry
|
||||
config_entry: ArveConfigEntry
|
||||
devices: ArveDevices
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
|
@@ -11,7 +11,6 @@ from homeassistant.components.sensor import (
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
@@ -21,8 +20,7 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import ArveCoordinator
|
||||
from .coordinator import ArveConfigEntry
|
||||
from .entity import ArveDeviceEntity
|
||||
|
||||
|
||||
@@ -85,10 +83,10 @@ SENSORS: tuple[ArveDeviceEntityDescription, ...] = (
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
|
||||
hass: HomeAssistant, entry: ArveConfigEntry, async_add_entities: AddEntitiesCallback
|
||||
) -> None:
|
||||
"""Set up Arve device based on a config entry."""
|
||||
coordinator: ArveCoordinator = hass.data[DOMAIN][entry.entry_id]
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
ArveDevice(coordinator, description, sn)
|
||||
|
@@ -4,58 +4,43 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from aioaseko import APIUnavailable, InvalidAuthCredentials, MobileAccount
|
||||
from aioaseko import Aseko, AsekoNotLoggedIn
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AsekoDataUpdateCoordinator
|
||||
from .coordinator import AsekoConfigEntry, AsekoDataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS: list[str] = [Platform.BINARY_SENSOR, Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AsekoConfigEntry) -> bool:
|
||||
"""Set up Aseko Pool Live from a config entry."""
|
||||
account = MobileAccount(
|
||||
async_get_clientsession(hass),
|
||||
username=entry.data[CONF_EMAIL],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
)
|
||||
aseko = Aseko(entry.data[CONF_EMAIL], entry.data[CONF_PASSWORD])
|
||||
|
||||
try:
|
||||
units = await account.get_units()
|
||||
except InvalidAuthCredentials as err:
|
||||
await aseko.login()
|
||||
except AsekoNotLoggedIn as err:
|
||||
raise ConfigEntryAuthFailed from err
|
||||
except APIUnavailable as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = []
|
||||
|
||||
for unit in units:
|
||||
coordinator = AsekoDataUpdateCoordinator(hass, unit)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
hass.data[DOMAIN][entry.entry_id].append((unit, coordinator))
|
||||
|
||||
coordinator = AsekoDataUpdateCoordinator(hass, aseko)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
entry.runtime_data = coordinator
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AsekoConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
|
||||
return unload_ok
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
async def async_migrate_entry(
|
||||
hass: HomeAssistant, config_entry: AsekoConfigEntry
|
||||
) -> bool:
|
||||
"""Migrate old entry."""
|
||||
_LOGGER.debug("Migrating from version %s", config_entry.version)
|
||||
|
||||
|
@@ -8,16 +8,13 @@ from dataclasses import dataclass
|
||||
from aioaseko import Unit
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
BinarySensorEntityDescription,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AsekoDataUpdateCoordinator
|
||||
from .coordinator import AsekoConfigEntry
|
||||
from .entity import AsekoEntity
|
||||
|
||||
|
||||
@@ -25,63 +22,40 @@ from .entity import AsekoEntity
|
||||
class AsekoBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||
"""Describes an Aseko binary sensor entity."""
|
||||
|
||||
value_fn: Callable[[Unit], bool]
|
||||
value_fn: Callable[[Unit], bool | None]
|
||||
|
||||
|
||||
UNIT_BINARY_SENSORS: tuple[AsekoBinarySensorEntityDescription, ...] = (
|
||||
BINARY_SENSORS: tuple[AsekoBinarySensorEntityDescription, ...] = (
|
||||
AsekoBinarySensorEntityDescription(
|
||||
key="water_flow",
|
||||
translation_key="water_flow",
|
||||
value_fn=lambda unit: unit.water_flow,
|
||||
),
|
||||
AsekoBinarySensorEntityDescription(
|
||||
key="has_alarm",
|
||||
translation_key="alarm",
|
||||
value_fn=lambda unit: unit.has_alarm,
|
||||
device_class=BinarySensorDeviceClass.SAFETY,
|
||||
),
|
||||
AsekoBinarySensorEntityDescription(
|
||||
key="has_error",
|
||||
translation_key="error",
|
||||
value_fn=lambda unit: unit.has_error,
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
translation_key="water_flow_to_probes",
|
||||
value_fn=lambda unit: unit.water_flow_to_probes,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: AsekoConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Aseko Pool Live binary sensors."""
|
||||
data: list[tuple[Unit, AsekoDataUpdateCoordinator]] = hass.data[DOMAIN][
|
||||
config_entry.entry_id
|
||||
]
|
||||
coordinator = config_entry.runtime_data
|
||||
units = coordinator.data.values()
|
||||
async_add_entities(
|
||||
AsekoUnitBinarySensorEntity(unit, coordinator, description)
|
||||
for unit, coordinator in data
|
||||
for description in UNIT_BINARY_SENSORS
|
||||
AsekoBinarySensorEntity(unit, coordinator, description)
|
||||
for description in BINARY_SENSORS
|
||||
for unit in units
|
||||
if description.value_fn(unit) is not None
|
||||
)
|
||||
|
||||
|
||||
class AsekoUnitBinarySensorEntity(AsekoEntity, BinarySensorEntity):
|
||||
"""Representation of a unit water flow binary sensor entity."""
|
||||
class AsekoBinarySensorEntity(AsekoEntity, BinarySensorEntity):
|
||||
"""Representation of an Aseko binary sensor entity."""
|
||||
|
||||
entity_description: AsekoBinarySensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
unit: Unit,
|
||||
coordinator: AsekoDataUpdateCoordinator,
|
||||
entity_description: AsekoBinarySensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the unit binary sensor."""
|
||||
super().__init__(unit, coordinator)
|
||||
self.entity_description = entity_description
|
||||
self._attr_unique_id = f"{self._unit.serial_number}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return the state of the sensor."""
|
||||
return self.entity_description.value_fn(self._unit)
|
||||
return self.entity_description.value_fn(self.unit)
|
||||
|
@@ -6,12 +6,16 @@ from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aioaseko import APIUnavailable, InvalidAuthCredentials, WebAccount
|
||||
from aioaseko import Aseko, AsekoAPIError, AsekoInvalidCredentials
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_REAUTH,
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
)
|
||||
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, CONF_UNIQUE_ID
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
@@ -30,19 +34,16 @@ class AsekoConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
}
|
||||
)
|
||||
|
||||
reauth_entry: ConfigEntry | None = None
|
||||
reauth_entry: ConfigEntry
|
||||
|
||||
async def get_account_info(self, email: str, password: str) -> dict:
|
||||
"""Get account info from the mobile API and the web API."""
|
||||
session = async_get_clientsession(self.hass)
|
||||
|
||||
web_account = WebAccount(session, email, password)
|
||||
web_account_info = await web_account.login()
|
||||
|
||||
aseko = Aseko(email, password)
|
||||
user = await aseko.login()
|
||||
return {
|
||||
CONF_EMAIL: email,
|
||||
CONF_PASSWORD: password,
|
||||
CONF_UNIQUE_ID: web_account_info.user_id,
|
||||
CONF_UNIQUE_ID: user.user_id,
|
||||
}
|
||||
|
||||
async def async_step_user(
|
||||
@@ -50,7 +51,6 @@ class AsekoConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
|
||||
self.reauth_entry = None
|
||||
errors = {}
|
||||
|
||||
if user_input is not None:
|
||||
@@ -58,9 +58,9 @@ class AsekoConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
info = await self.get_account_info(
|
||||
user_input[CONF_EMAIL], user_input[CONF_PASSWORD]
|
||||
)
|
||||
except APIUnavailable:
|
||||
except AsekoAPIError:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuthCredentials:
|
||||
except AsekoInvalidCredentials:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
@@ -77,8 +77,8 @@ class AsekoConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
async def async_store_credentials(self, info: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Store validated credentials."""
|
||||
|
||||
if self.reauth_entry:
|
||||
self.hass.config_entries.async_update_entry(
|
||||
if self.source == SOURCE_REAUTH:
|
||||
return self.async_update_reload_and_abort(
|
||||
self.reauth_entry,
|
||||
title=info[CONF_EMAIL],
|
||||
data={
|
||||
@@ -86,8 +86,6 @@ class AsekoConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
CONF_PASSWORD: info[CONF_PASSWORD],
|
||||
},
|
||||
)
|
||||
await self.hass.config_entries.async_reload(self.reauth_entry.entry_id)
|
||||
return self.async_abort(reason="reauth_successful")
|
||||
|
||||
await self.async_set_unique_id(info[CONF_UNIQUE_ID])
|
||||
self._abort_if_unique_id_configured()
|
||||
@@ -105,9 +103,7 @@ class AsekoConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
) -> ConfigFlowResult:
|
||||
"""Perform reauth upon an API authentication error."""
|
||||
|
||||
self.reauth_entry = self.hass.config_entries.async_get_entry(
|
||||
self.context["entry_id"]
|
||||
)
|
||||
self.reauth_entry = self._get_reauth_entry()
|
||||
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
@@ -122,9 +118,9 @@ class AsekoConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
info = await self.get_account_info(
|
||||
user_input[CONF_EMAIL], user_input[CONF_PASSWORD]
|
||||
)
|
||||
except APIUnavailable:
|
||||
except AsekoAPIError:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuthCredentials:
|
||||
except AsekoInvalidCredentials:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
|
@@ -5,34 +5,34 @@ from __future__ import annotations
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from aioaseko import Unit, Variable
|
||||
from aioaseko import Aseko, Unit
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type AsekoConfigEntry = ConfigEntry[AsekoDataUpdateCoordinator]
|
||||
|
||||
class AsekoDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Variable]]):
|
||||
|
||||
class AsekoDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Unit]]):
|
||||
"""Class to manage fetching Aseko unit data from single endpoint."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, unit: Unit) -> None:
|
||||
def __init__(self, hass: HomeAssistant, aseko: Aseko) -> None:
|
||||
"""Initialize global Aseko unit data updater."""
|
||||
self._unit = unit
|
||||
|
||||
if self._unit.name:
|
||||
name = self._unit.name
|
||||
else:
|
||||
name = f"{self._unit.type}-{self._unit.serial_number}"
|
||||
self._aseko = aseko
|
||||
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=name,
|
||||
name=DOMAIN,
|
||||
update_interval=timedelta(minutes=2),
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> dict[str, Variable]:
|
||||
async def _async_update_data(self) -> dict[str, Unit]:
|
||||
"""Fetch unit data."""
|
||||
await self._unit.get_state()
|
||||
return {variable.type: variable for variable in self._unit.variables}
|
||||
units = await self._aseko.get_units()
|
||||
return {unit.serial_number: unit for unit in units}
|
||||
|
@@ -3,6 +3,7 @@
|
||||
from aioaseko import Unit
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
@@ -14,20 +15,44 @@ class AsekoEntity(CoordinatorEntity[AsekoDataUpdateCoordinator]):
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(self, unit: Unit, coordinator: AsekoDataUpdateCoordinator) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
unit: Unit,
|
||||
coordinator: AsekoDataUpdateCoordinator,
|
||||
description: EntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the aseko entity."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
self._unit = unit
|
||||
|
||||
if self._unit.type == "Remote":
|
||||
self._device_model = "ASIN Pool"
|
||||
else:
|
||||
self._device_model = f"ASIN AQUA {self._unit.type}"
|
||||
self._device_name = self._unit.name if self._unit.name else self._device_model
|
||||
|
||||
self._attr_unique_id = f"{self.unit.serial_number}{self.entity_description.key}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
name=self._device_name,
|
||||
identifiers={(DOMAIN, str(self._unit.serial_number))},
|
||||
manufacturer="Aseko",
|
||||
model=self._device_model,
|
||||
identifiers={(DOMAIN, self.unit.serial_number)},
|
||||
serial_number=self.unit.serial_number,
|
||||
name=unit.name or unit.serial_number,
|
||||
manufacturer=(
|
||||
self.unit.brand_name.primary
|
||||
if self.unit.brand_name is not None
|
||||
else None
|
||||
),
|
||||
model=(
|
||||
self.unit.brand_name.secondary
|
||||
if self.unit.brand_name is not None
|
||||
else None
|
||||
),
|
||||
configuration_url=f"https://aseko.cloud/unit/{self.unit.serial_number}",
|
||||
)
|
||||
|
||||
@property
|
||||
def unit(self) -> Unit:
|
||||
"""Return the aseko unit."""
|
||||
return self.coordinator.data[self._unit.serial_number]
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return (
|
||||
super().available
|
||||
and self.unit.serial_number in self.coordinator.data
|
||||
and self.unit.online
|
||||
)
|
||||
|
@@ -1,16 +1,28 @@
|
||||
{
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"water_flow": {
|
||||
"water_flow_to_probes": {
|
||||
"default": "mdi:waves-arrow-right"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"air_temperature": {
|
||||
"default": "mdi:thermometer-lines"
|
||||
},
|
||||
"electrolyzer": {
|
||||
"default": "mdi:lightning-bolt"
|
||||
},
|
||||
"free_chlorine": {
|
||||
"default": "mdi:flask"
|
||||
"default": "mdi:pool"
|
||||
},
|
||||
"redox": {
|
||||
"default": "mdi:pool"
|
||||
},
|
||||
"salinity": {
|
||||
"default": "mdi:pool"
|
||||
},
|
||||
"water_temperature": {
|
||||
"default": "mdi:coolant-temperature"
|
||||
"default": "mdi:pool-thermometer"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aseko_pool_live",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioaseko"],
|
||||
"requirements": ["aioaseko==0.2.0"]
|
||||
"requirements": ["aioaseko==1.0.0"]
|
||||
}
|
||||
|
@@ -2,77 +2,109 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from aioaseko import Unit, Variable
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from aioaseko import Unit
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import UnitOfElectricPotential, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AsekoDataUpdateCoordinator
|
||||
from .coordinator import AsekoConfigEntry
|
||||
from .entity import AsekoEntity
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AsekoSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes an Aseko sensor entity."""
|
||||
|
||||
value_fn: Callable[[Unit], StateType]
|
||||
|
||||
|
||||
SENSORS: list[AsekoSensorEntityDescription] = [
|
||||
AsekoSensorEntityDescription(
|
||||
key="airTemp",
|
||||
translation_key="air_temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda unit: unit.air_temperature,
|
||||
),
|
||||
AsekoSensorEntityDescription(
|
||||
key="electrolyzer",
|
||||
translation_key="electrolyzer",
|
||||
native_unit_of_measurement="g/h",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda unit: unit.electrolyzer,
|
||||
),
|
||||
AsekoSensorEntityDescription(
|
||||
key="free_chlorine",
|
||||
translation_key="free_chlorine",
|
||||
native_unit_of_measurement="mg/l",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda unit: unit.cl_free,
|
||||
),
|
||||
AsekoSensorEntityDescription(
|
||||
key="ph",
|
||||
device_class=SensorDeviceClass.PH,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda unit: unit.ph,
|
||||
),
|
||||
AsekoSensorEntityDescription(
|
||||
key="rx",
|
||||
translation_key="redox",
|
||||
native_unit_of_measurement=UnitOfElectricPotential.MILLIVOLT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda unit: unit.redox,
|
||||
),
|
||||
AsekoSensorEntityDescription(
|
||||
key="salinity",
|
||||
translation_key="salinity",
|
||||
native_unit_of_measurement="kg/m³",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda unit: unit.salinity,
|
||||
),
|
||||
AsekoSensorEntityDescription(
|
||||
key="waterTemp",
|
||||
translation_key="water_temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda unit: unit.water_temperature,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: AsekoConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Aseko Pool Live sensors."""
|
||||
data: list[tuple[Unit, AsekoDataUpdateCoordinator]] = hass.data[DOMAIN][
|
||||
config_entry.entry_id
|
||||
]
|
||||
|
||||
coordinator = config_entry.runtime_data
|
||||
units = coordinator.data.values()
|
||||
async_add_entities(
|
||||
VariableSensorEntity(unit, variable, coordinator)
|
||||
for unit, coordinator in data
|
||||
for variable in unit.variables
|
||||
AsekoSensorEntity(unit, coordinator, description)
|
||||
for description in SENSORS
|
||||
for unit in units
|
||||
if description.value_fn(unit) is not None
|
||||
)
|
||||
|
||||
|
||||
class VariableSensorEntity(AsekoEntity, SensorEntity):
|
||||
"""Representation of a unit variable sensor entity."""
|
||||
class AsekoSensorEntity(AsekoEntity, SensorEntity):
|
||||
"""Representation of an Aseko unit sensor entity."""
|
||||
|
||||
_attr_state_class = SensorStateClass.MEASUREMENT
|
||||
|
||||
def __init__(
|
||||
self, unit: Unit, variable: Variable, coordinator: AsekoDataUpdateCoordinator
|
||||
) -> None:
|
||||
"""Initialize the variable sensor."""
|
||||
super().__init__(unit, coordinator)
|
||||
self._variable = variable
|
||||
|
||||
translation_key = {
|
||||
"Air temp.": "air_temperature",
|
||||
"Cl free": "free_chlorine",
|
||||
"Water temp.": "water_temperature",
|
||||
}.get(self._variable.name)
|
||||
if translation_key is not None:
|
||||
self._attr_translation_key = translation_key
|
||||
else:
|
||||
self._attr_name = self._variable.name
|
||||
|
||||
self._attr_unique_id = f"{self._unit.serial_number}{self._variable.type}"
|
||||
self._attr_native_unit_of_measurement = self._variable.unit
|
||||
|
||||
self._attr_icon = {
|
||||
"rx": "mdi:test-tube",
|
||||
"waterLevel": "mdi:waves",
|
||||
}.get(self._variable.type)
|
||||
|
||||
self._attr_device_class = {
|
||||
"airTemp": SensorDeviceClass.TEMPERATURE,
|
||||
"waterTemp": SensorDeviceClass.TEMPERATURE,
|
||||
"ph": SensorDeviceClass.PH,
|
||||
}.get(self._variable.type)
|
||||
entity_description: AsekoSensorEntityDescription
|
||||
|
||||
@property
|
||||
def native_value(self) -> int | None:
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the sensor."""
|
||||
variable = self.coordinator.data[self._variable.type]
|
||||
return variable.current_value
|
||||
return self.entity_description.value_fn(self.unit)
|
||||
|
@@ -26,20 +26,26 @@
|
||||
},
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"water_flow": {
|
||||
"name": "Water flow"
|
||||
},
|
||||
"alarm": {
|
||||
"name": "Alarm"
|
||||
"water_flow_to_probes": {
|
||||
"name": "Water flow to probes"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"air_temperature": {
|
||||
"name": "Air temperature"
|
||||
},
|
||||
"electrolyzer": {
|
||||
"name": "Electrolyzer"
|
||||
},
|
||||
"free_chlorine": {
|
||||
"name": "Free chlorine"
|
||||
},
|
||||
"redox": {
|
||||
"name": "Redox potential"
|
||||
},
|
||||
"salinity": {
|
||||
"name": "Salinity"
|
||||
},
|
||||
"water_temperature": {
|
||||
"name": "Water temperature"
|
||||
}
|
||||
|
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"domain": "assist_pipeline",
|
||||
"name": "Assist pipeline",
|
||||
"after_dependencies": ["repairs"],
|
||||
"codeowners": ["@balloob", "@synesthesiam"],
|
||||
"dependencies": ["conversation", "stt", "tts", "wake_word"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/assist_pipeline",
|
||||
|
55
homeassistant/components/assist_pipeline/repair_flows.py
Normal file
55
homeassistant/components/assist_pipeline/repair_flows.py
Normal file
@@ -0,0 +1,55 @@
|
||||
"""Repairs implementation for the cloud integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.assist_satellite import DOMAIN as ASSIST_SATELLITE_DOMAIN
|
||||
from homeassistant.components.repairs import RepairsFlow
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
REQUIRED_KEYS = ("entity_id", "entity_uuid", "integration_name")
|
||||
|
||||
|
||||
class AssistInProgressDeprecatedRepairFlow(RepairsFlow):
|
||||
"""Handler for an issue fixing flow."""
|
||||
|
||||
def __init__(self, data: dict[str, str | int | float | None] | None) -> None:
|
||||
"""Initialize."""
|
||||
if not data or any(key not in data for key in REQUIRED_KEYS):
|
||||
raise ValueError("Missing data")
|
||||
self._data = data
|
||||
|
||||
async def async_step_init(self, _: None = None) -> FlowResult:
|
||||
"""Handle the first step of a fix flow."""
|
||||
return await self.async_step_confirm_disable_entity()
|
||||
|
||||
async def async_step_confirm_disable_entity(
|
||||
self,
|
||||
user_input: dict[str, str] | None = None,
|
||||
) -> FlowResult:
|
||||
"""Handle the confirm step of a fix flow."""
|
||||
if user_input is not None:
|
||||
entity_registry = er.async_get(self.hass)
|
||||
entity_entry = entity_registry.async_get(
|
||||
cast(str, self._data["entity_uuid"])
|
||||
)
|
||||
if entity_entry:
|
||||
entity_registry.async_update_entity(
|
||||
entity_entry.entity_id, disabled_by=er.RegistryEntryDisabler.USER
|
||||
)
|
||||
return self.async_create_entry(data={})
|
||||
|
||||
description_placeholders: dict[str, str] = {
|
||||
"assist_satellite_domain": ASSIST_SATELLITE_DOMAIN,
|
||||
"entity_id": cast(str, self._data["entity_id"]),
|
||||
"integration_name": cast(str, self._data["integration_name"]),
|
||||
}
|
||||
return self.async_show_form(
|
||||
step_id="confirm_disable_entity",
|
||||
data_schema=vol.Schema({}),
|
||||
description_placeholders=description_placeholders,
|
||||
)
|
@@ -7,7 +7,7 @@
|
||||
},
|
||||
"select": {
|
||||
"pipeline": {
|
||||
"name": "Assist pipeline",
|
||||
"name": "Assistant",
|
||||
"state": {
|
||||
"preferred": "Preferred"
|
||||
}
|
||||
@@ -21,5 +21,17 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"assist_in_progress_deprecated": {
|
||||
"title": "{integration_name} in progress binary sensors are deprecated",
|
||||
"fix_flow": {
|
||||
"step": {
|
||||
"confirm_disable_entity": {
|
||||
"description": "The {integration_name} in progress binary sensor `{entity_id}` is deprecated.\n\nMigrate your configuration to use the corresponding `{assist_satellite_domain}` entity and then click SUBMIT to disable the in progress binary sensor and fix this issue."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -10,8 +10,15 @@ from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN, AssistSatelliteEntityFeature
|
||||
from .connection_test import ConnectionTestView
|
||||
from .const import (
|
||||
CONNECTION_TEST_DATA,
|
||||
DATA_COMPONENT,
|
||||
DOMAIN,
|
||||
AssistSatelliteEntityFeature,
|
||||
)
|
||||
from .entity import (
|
||||
AssistSatelliteAnnouncement,
|
||||
AssistSatelliteConfiguration,
|
||||
AssistSatelliteEntity,
|
||||
AssistSatelliteEntityDescription,
|
||||
@@ -22,6 +29,7 @@ from .websocket_api import async_register_websocket_api
|
||||
|
||||
__all__ = [
|
||||
"DOMAIN",
|
||||
"AssistSatelliteAnnouncement",
|
||||
"AssistSatelliteEntity",
|
||||
"AssistSatelliteConfiguration",
|
||||
"AssistSatelliteEntityDescription",
|
||||
@@ -36,7 +44,7 @@ PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
component = hass.data[DOMAIN] = EntityComponent[AssistSatelliteEntity](
|
||||
component = hass.data[DATA_COMPONENT] = EntityComponent[AssistSatelliteEntity](
|
||||
_LOGGER, DOMAIN, hass
|
||||
)
|
||||
await component.async_setup(config)
|
||||
@@ -55,18 +63,18 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"async_internal_announce",
|
||||
[AssistSatelliteEntityFeature.ANNOUNCE],
|
||||
)
|
||||
hass.data[CONNECTION_TEST_DATA] = {}
|
||||
async_register_websocket_api(hass)
|
||||
hass.http.register_view(ConnectionTestView())
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up a config entry."""
|
||||
component: EntityComponent[AssistSatelliteEntity] = hass.data[DOMAIN]
|
||||
return await component.async_setup_entry(entry)
|
||||
return await hass.data[DATA_COMPONENT].async_setup_entry(entry)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
component: EntityComponent[AssistSatelliteEntity] = hass.data[DOMAIN]
|
||||
return await component.async_unload_entry(entry)
|
||||
return await hass.data[DATA_COMPONENT].async_unload_entry(entry)
|
||||
|
BIN
homeassistant/components/assist_satellite/connection_test.mp3
Normal file
BIN
homeassistant/components/assist_satellite/connection_test.mp3
Normal file
Binary file not shown.
43
homeassistant/components/assist_satellite/connection_test.py
Normal file
43
homeassistant/components/assist_satellite/connection_test.py
Normal file
@@ -0,0 +1,43 @@
|
||||
"""Assist satellite connection test."""
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
from homeassistant.components.http import KEY_HASS, HomeAssistantView
|
||||
|
||||
from .const import CONNECTION_TEST_DATA
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONNECTION_TEST_CONTENT_TYPE = "audio/mpeg"
|
||||
CONNECTION_TEST_FILENAME = "connection_test.mp3"
|
||||
CONNECTION_TEST_URL_BASE = "/api/assist_satellite/connection_test"
|
||||
|
||||
|
||||
class ConnectionTestView(HomeAssistantView):
|
||||
"""View to serve an audio sample for connection test."""
|
||||
|
||||
requires_auth = False
|
||||
url = f"{CONNECTION_TEST_URL_BASE}/{{connection_id}}"
|
||||
name = "api:assist_satellite_connection_test"
|
||||
|
||||
async def get(self, request: web.Request, connection_id: str) -> web.Response:
|
||||
"""Start a get request."""
|
||||
_LOGGER.debug("Request for connection test with id %s", connection_id)
|
||||
|
||||
hass = request.app[KEY_HASS]
|
||||
connection_test_data = hass.data[CONNECTION_TEST_DATA]
|
||||
|
||||
connection_test_event = connection_test_data.pop(connection_id, None)
|
||||
|
||||
if connection_test_event is None:
|
||||
return web.Response(status=404)
|
||||
|
||||
connection_test_event.set()
|
||||
|
||||
audio_path = Path(__file__).parent / CONNECTION_TEST_FILENAME
|
||||
audio_data = await hass.async_add_executor_job(audio_path.read_bytes)
|
||||
|
||||
return web.Response(body=audio_data, content_type=CONNECTION_TEST_CONTENT_TYPE)
|
@@ -1,9 +1,25 @@
|
||||
"""Constants for assist satellite."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from enum import IntFlag
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
|
||||
from .entity import AssistSatelliteEntity
|
||||
|
||||
DOMAIN = "assist_satellite"
|
||||
|
||||
DATA_COMPONENT: HassKey[EntityComponent[AssistSatelliteEntity]] = HassKey(DOMAIN)
|
||||
CONNECTION_TEST_DATA: HassKey[dict[str, asyncio.Event]] = HassKey(
|
||||
f"{DOMAIN}_connection_tests"
|
||||
)
|
||||
|
||||
|
||||
class AssistSatelliteEntityFeature(IntFlag):
|
||||
"""Supported features of Assist satellite entity."""
|
||||
|
@@ -8,7 +8,7 @@ from dataclasses import dataclass
|
||||
from enum import StrEnum
|
||||
import logging
|
||||
import time
|
||||
from typing import Any, Final, final
|
||||
from typing import Any, Final, Literal, final
|
||||
|
||||
from homeassistant.components import media_source, stt, tts
|
||||
from homeassistant.components.assist_pipeline import (
|
||||
@@ -41,10 +41,10 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class AssistSatelliteState(StrEnum):
|
||||
"""Valid states of an Assist satellite entity."""
|
||||
|
||||
LISTENING_WAKE_WORD = "listening_wake_word"
|
||||
"""Device is streaming audio for wake word detection to Home Assistant."""
|
||||
IDLE = "idle"
|
||||
"""Device is waiting for user input, such as a wake word or a button press."""
|
||||
|
||||
LISTENING_COMMAND = "listening_command"
|
||||
LISTENING = "listening"
|
||||
"""Device is streaming audio with the voice command to Home Assistant."""
|
||||
|
||||
PROCESSING = "processing"
|
||||
@@ -86,6 +86,19 @@ class AssistSatelliteConfiguration:
|
||||
"""Maximum number of simultaneous wake words allowed (0 for no limit)."""
|
||||
|
||||
|
||||
@dataclass
|
||||
class AssistSatelliteAnnouncement:
|
||||
"""Announcement to be made."""
|
||||
|
||||
message: str
|
||||
"""Message to be spoken."""
|
||||
|
||||
media_id: str
|
||||
"""Media ID to be played."""
|
||||
|
||||
media_id_source: Literal["url", "media_id", "tts"]
|
||||
|
||||
|
||||
class AssistSatelliteEntity(entity.Entity):
|
||||
"""Entity encapsulating the state and functionality of an Assist satellite."""
|
||||
|
||||
@@ -104,7 +117,7 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
_attr_tts_options: dict[str, Any] | None = None
|
||||
_pipeline_task: asyncio.Task | None = None
|
||||
|
||||
__assist_satellite_state = AssistSatelliteState.LISTENING_WAKE_WORD
|
||||
__assist_satellite_state = AssistSatelliteState.IDLE
|
||||
|
||||
@final
|
||||
@property
|
||||
@@ -174,10 +187,13 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
"""
|
||||
await self._cancel_running_pipeline()
|
||||
|
||||
media_id_source: Literal["url", "media_id", "tts"] | None = None
|
||||
|
||||
if message is None:
|
||||
message = ""
|
||||
|
||||
if not media_id:
|
||||
media_id_source = "tts"
|
||||
# Synthesize audio and get URL
|
||||
pipeline_id = self._resolve_pipeline()
|
||||
pipeline = async_get_pipeline(self.hass, pipeline_id)
|
||||
@@ -198,6 +214,8 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
)
|
||||
|
||||
if media_source.is_media_source_id(media_id):
|
||||
if not media_id_source:
|
||||
media_id_source = "media_id"
|
||||
media = await media_source.async_resolve_media(
|
||||
self.hass,
|
||||
media_id,
|
||||
@@ -205,6 +223,9 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
)
|
||||
media_id = media.url
|
||||
|
||||
if not media_id_source:
|
||||
media_id_source = "url"
|
||||
|
||||
# Resolve to full URL
|
||||
media_id = async_process_play_media_url(self.hass, media_id)
|
||||
|
||||
@@ -216,12 +237,14 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
|
||||
try:
|
||||
# Block until announcement is finished
|
||||
await self.async_announce(message, media_id)
|
||||
await self.async_announce(
|
||||
AssistSatelliteAnnouncement(message, media_id, media_id_source)
|
||||
)
|
||||
finally:
|
||||
self._is_announcing = False
|
||||
self._set_state(AssistSatelliteState.LISTENING_WAKE_WORD)
|
||||
self._set_state(AssistSatelliteState.IDLE)
|
||||
|
||||
async def async_announce(self, message: str, media_id: str) -> None:
|
||||
async def async_announce(self, announcement: AssistSatelliteAnnouncement) -> None:
|
||||
"""Announce media on the satellite.
|
||||
|
||||
Should block until the announcement is done playing.
|
||||
@@ -340,9 +363,9 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
def _internal_on_pipeline_event(self, event: PipelineEvent) -> None:
|
||||
"""Set state based on pipeline stage."""
|
||||
if event.type is PipelineEventType.WAKE_WORD_START:
|
||||
self._set_state(AssistSatelliteState.LISTENING_WAKE_WORD)
|
||||
self._set_state(AssistSatelliteState.IDLE)
|
||||
elif event.type is PipelineEventType.STT_START:
|
||||
self._set_state(AssistSatelliteState.LISTENING_COMMAND)
|
||||
self._set_state(AssistSatelliteState.LISTENING)
|
||||
elif event.type is PipelineEventType.INTENT_START:
|
||||
self._set_state(AssistSatelliteState.PROCESSING)
|
||||
elif event.type is PipelineEventType.INTENT_END:
|
||||
@@ -356,7 +379,7 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
self._set_state(AssistSatelliteState.RESPONDING)
|
||||
elif event.type is PipelineEventType.RUN_END:
|
||||
if not self._run_has_tts:
|
||||
self._set_state(AssistSatelliteState.LISTENING_WAKE_WORD)
|
||||
self._set_state(AssistSatelliteState.IDLE)
|
||||
|
||||
self.on_pipeline_event(event)
|
||||
|
||||
@@ -369,7 +392,7 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
@callback
|
||||
def tts_response_finished(self) -> None:
|
||||
"""Tell entity that the text-to-speech response has finished playing."""
|
||||
self._set_state(AssistSatelliteState.LISTENING_WAKE_WORD)
|
||||
self._set_state(AssistSatelliteState.IDLE)
|
||||
|
||||
@callback
|
||||
def _resolve_pipeline(self) -> str | None:
|
||||
|
@@ -2,7 +2,7 @@
|
||||
"domain": "assist_satellite",
|
||||
"name": "Assist Satellite",
|
||||
"codeowners": ["@home-assistant/core", "@synesthesiam"],
|
||||
"dependencies": ["assist_pipeline", "stt", "tts"],
|
||||
"dependencies": ["assist_pipeline", "http", "stt", "tts"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/assist_satellite",
|
||||
"integration_type": "entity",
|
||||
"quality_scale": "internal"
|
||||
|
@@ -4,8 +4,8 @@
|
||||
"_": {
|
||||
"name": "Assist satellite",
|
||||
"state": {
|
||||
"listening_wake_word": "Wake word",
|
||||
"listening_command": "Voice command",
|
||||
"idle": "[%key:common::state::idle%]",
|
||||
"listening": "Listening",
|
||||
"responding": "Responding",
|
||||
"processing": "Processing"
|
||||
}
|
||||
|
@@ -1,5 +1,6 @@
|
||||
"""Assist satellite Websocket API."""
|
||||
|
||||
import asyncio
|
||||
from dataclasses import asdict, replace
|
||||
from typing import Any
|
||||
|
||||
@@ -10,10 +11,19 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.util import uuid as uuid_util
|
||||
|
||||
from .const import DOMAIN
|
||||
from .connection_test import CONNECTION_TEST_URL_BASE
|
||||
from .const import (
|
||||
CONNECTION_TEST_DATA,
|
||||
DATA_COMPONENT,
|
||||
DOMAIN,
|
||||
AssistSatelliteEntityFeature,
|
||||
)
|
||||
from .entity import AssistSatelliteEntity
|
||||
|
||||
CONNECTION_TEST_TIMEOUT = 30
|
||||
|
||||
|
||||
@callback
|
||||
def async_register_websocket_api(hass: HomeAssistant) -> None:
|
||||
@@ -21,9 +31,9 @@ def async_register_websocket_api(hass: HomeAssistant) -> None:
|
||||
websocket_api.async_register_command(hass, websocket_intercept_wake_word)
|
||||
websocket_api.async_register_command(hass, websocket_get_configuration)
|
||||
websocket_api.async_register_command(hass, websocket_set_wake_words)
|
||||
websocket_api.async_register_command(hass, websocket_test_connection)
|
||||
|
||||
|
||||
@callback
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "assist_satellite/intercept_wake_word",
|
||||
@@ -38,8 +48,7 @@ async def websocket_intercept_wake_word(
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Intercept the next wake word from a satellite."""
|
||||
component: EntityComponent[AssistSatelliteEntity] = hass.data[DOMAIN]
|
||||
satellite = component.get_entity(msg["entity_id"])
|
||||
satellite = hass.data[DATA_COMPONENT].get_entity(msg["entity_id"])
|
||||
if satellite is None:
|
||||
connection.send_error(
|
||||
msg["id"], websocket_api.ERR_NOT_FOUND, "Entity not found"
|
||||
@@ -77,8 +86,7 @@ def websocket_get_configuration(
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Get the current satellite configuration."""
|
||||
component: EntityComponent[AssistSatelliteEntity] = hass.data[DOMAIN]
|
||||
satellite = component.get_entity(msg["entity_id"])
|
||||
satellite = hass.data[DATA_COMPONENT].get_entity(msg["entity_id"])
|
||||
if satellite is None:
|
||||
connection.send_error(
|
||||
msg["id"], websocket_api.ERR_NOT_FOUND, "Entity not found"
|
||||
@@ -92,7 +100,6 @@ def websocket_get_configuration(
|
||||
connection.send_result(msg["id"], config_dict)
|
||||
|
||||
|
||||
@callback
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "assist_satellite/set_wake_words",
|
||||
@@ -108,8 +115,7 @@ async def websocket_set_wake_words(
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Set the active wake words for the satellite."""
|
||||
component: EntityComponent[AssistSatelliteEntity] = hass.data[DOMAIN]
|
||||
satellite = component.get_entity(msg["entity_id"])
|
||||
satellite = hass.data[DATA_COMPONENT].get_entity(msg["entity_id"])
|
||||
if satellite is None:
|
||||
connection.send_error(
|
||||
msg["id"], websocket_api.ERR_NOT_FOUND, "Entity not found"
|
||||
@@ -143,3 +149,57 @@ async def websocket_set_wake_words(
|
||||
replace(config, active_wake_words=actual_ids)
|
||||
)
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "assist_satellite/test_connection",
|
||||
vol.Required("entity_id"): cv.entity_domain(DOMAIN),
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def websocket_test_connection(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.connection.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Test the connection between the device and Home Assistant.
|
||||
|
||||
Send an announcement to the device with a special media id.
|
||||
"""
|
||||
component: EntityComponent[AssistSatelliteEntity] = hass.data[DOMAIN]
|
||||
satellite = component.get_entity(msg["entity_id"])
|
||||
if satellite is None:
|
||||
connection.send_error(
|
||||
msg["id"], websocket_api.ERR_NOT_FOUND, "Entity not found"
|
||||
)
|
||||
return
|
||||
if not (satellite.supported_features or 0) & AssistSatelliteEntityFeature.ANNOUNCE:
|
||||
connection.send_error(
|
||||
msg["id"],
|
||||
websocket_api.ERR_NOT_SUPPORTED,
|
||||
"Entity does not support announce",
|
||||
)
|
||||
return
|
||||
|
||||
# Announce and wait for event
|
||||
connection_test_data = hass.data[CONNECTION_TEST_DATA]
|
||||
connection_id = uuid_util.random_uuid_hex()
|
||||
connection_test_event = asyncio.Event()
|
||||
connection_test_data[connection_id] = connection_test_event
|
||||
|
||||
hass.async_create_background_task(
|
||||
satellite.async_internal_announce(
|
||||
media_id=f"{CONNECTION_TEST_URL_BASE}/{connection_id}"
|
||||
),
|
||||
f"assist_satellite_connection_test_{msg['entity_id']}",
|
||||
)
|
||||
|
||||
try:
|
||||
async with asyncio.timeout(CONNECTION_TEST_TIMEOUT):
|
||||
await connection_test_event.wait()
|
||||
connection.send_result(msg["id"], {"status": "success"})
|
||||
except TimeoutError:
|
||||
connection.send_result(msg["id"], {"status": "timeout"})
|
||||
finally:
|
||||
connection_test_data.pop(connection_id, None)
|
||||
|
@@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.components.device_tracker import ScannerEntity, SourceType
|
||||
from homeassistant.components.device_tracker import ScannerEntity
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
@@ -71,11 +71,6 @@ class AsusWrtDevice(ScannerEntity):
|
||||
"""Return true if the device is connected to the network."""
|
||||
return self._device.is_connected
|
||||
|
||||
@property
|
||||
def source_type(self) -> SourceType:
|
||||
"""Return the source type."""
|
||||
return SourceType.ROUTER
|
||||
|
||||
@property
|
||||
def hostname(self) -> str | None:
|
||||
"""Return the hostname of device."""
|
||||
|
@@ -1,91 +1,29 @@
|
||||
"""The ATAG Integration."""
|
||||
|
||||
from asyncio import timeout
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from pyatag import AtagException, AtagOne
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
CoordinatorEntity,
|
||||
DataUpdateCoordinator,
|
||||
UpdateFailed,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
from .coordinator import AtagConfigEntry, AtagDataUpdateCoordinator
|
||||
|
||||
DOMAIN = "atag"
|
||||
PLATFORMS = [Platform.CLIMATE, Platform.SENSOR, Platform.WATER_HEATER]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AtagConfigEntry) -> bool:
|
||||
"""Set up Atag integration from a config entry."""
|
||||
|
||||
async def _async_update_data():
|
||||
"""Update data via library."""
|
||||
async with timeout(20):
|
||||
try:
|
||||
await atag.update()
|
||||
except AtagException as err:
|
||||
raise UpdateFailed(err) from err
|
||||
return atag
|
||||
|
||||
atag = AtagOne(
|
||||
session=async_get_clientsession(hass), **entry.data, device=entry.unique_id
|
||||
)
|
||||
coordinator = DataUpdateCoordinator[AtagOne](
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=DOMAIN.title(),
|
||||
update_method=_async_update_data,
|
||||
update_interval=timedelta(seconds=60),
|
||||
)
|
||||
|
||||
coordinator = AtagDataUpdateCoordinator(hass, entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator
|
||||
entry.runtime_data = coordinator
|
||||
if entry.unique_id is None:
|
||||
hass.config_entries.async_update_entry(entry, unique_id=atag.id)
|
||||
hass.config_entries.async_update_entry(entry, unique_id=coordinator.atag.id)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AtagConfigEntry) -> bool:
|
||||
"""Unload Atag config entry."""
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
if unload_ok:
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
return unload_ok
|
||||
|
||||
|
||||
class AtagEntity(CoordinatorEntity[DataUpdateCoordinator[AtagOne]]):
|
||||
"""Defines a base Atag entity."""
|
||||
|
||||
def __init__(
|
||||
self, coordinator: DataUpdateCoordinator[AtagOne], atag_id: str
|
||||
) -> None:
|
||||
"""Initialize the Atag entity."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
self._id = atag_id
|
||||
self._attr_name = DOMAIN.title()
|
||||
self._attr_unique_id = f"{coordinator.data.id}-{atag_id}"
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
"""Return info for device registry."""
|
||||
return DeviceInfo(
|
||||
identifiers={(DOMAIN, self.coordinator.data.id)},
|
||||
manufacturer="Atag",
|
||||
model="Atag One",
|
||||
name="Atag Thermostat",
|
||||
sw_version=self.coordinator.data.apiversion,
|
||||
)
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
@@ -12,13 +12,13 @@ from homeassistant.components.climate import (
|
||||
HVACAction,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_TEMPERATURE, Platform
|
||||
from homeassistant.const import ATTR_TEMPERATURE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.util.enum import try_parse_enum
|
||||
|
||||
from . import DOMAIN, AtagEntity
|
||||
from .coordinator import AtagConfigEntry, AtagDataUpdateCoordinator
|
||||
from .entity import AtagEntity
|
||||
|
||||
PRESET_MAP = {
|
||||
"Manual": "manual",
|
||||
@@ -32,11 +32,10 @@ HVAC_MODES = [HVACMode.AUTO, HVACMode.HEAT]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
|
||||
hass: HomeAssistant, entry: AtagConfigEntry, async_add_entities: AddEntitiesCallback
|
||||
) -> None:
|
||||
"""Load a config entry."""
|
||||
coordinator = hass.data[DOMAIN][entry.entry_id]
|
||||
async_add_entities([AtagThermostat(coordinator, Platform.CLIMATE)])
|
||||
async_add_entities([AtagThermostat(entry.runtime_data, "climate")])
|
||||
|
||||
|
||||
class AtagThermostat(AtagEntity, ClimateEntity):
|
||||
@@ -49,49 +48,49 @@ class AtagThermostat(AtagEntity, ClimateEntity):
|
||||
)
|
||||
_enable_turn_on_off_backwards_compatibility = False
|
||||
|
||||
def __init__(self, coordinator, atag_id):
|
||||
def __init__(self, coordinator: AtagDataUpdateCoordinator, atag_id: str) -> None:
|
||||
"""Initialize an Atag climate device."""
|
||||
super().__init__(coordinator, atag_id)
|
||||
self._attr_temperature_unit = coordinator.data.climate.temp_unit
|
||||
self._attr_temperature_unit = coordinator.atag.climate.temp_unit
|
||||
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode | None:
|
||||
"""Return hvac operation ie. heat, cool mode."""
|
||||
return try_parse_enum(HVACMode, self.coordinator.data.climate.hvac_mode)
|
||||
return try_parse_enum(HVACMode, self.coordinator.atag.climate.hvac_mode)
|
||||
|
||||
@property
|
||||
def hvac_action(self) -> HVACAction | None:
|
||||
"""Return the current running hvac operation."""
|
||||
is_active = self.coordinator.data.climate.status
|
||||
is_active = self.coordinator.atag.climate.status
|
||||
return HVACAction.HEATING if is_active else HVACAction.IDLE
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
return self.coordinator.data.climate.temperature
|
||||
return self.coordinator.atag.climate.temperature
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the temperature we try to reach."""
|
||||
return self.coordinator.data.climate.target_temperature
|
||||
return self.coordinator.atag.climate.target_temperature
|
||||
|
||||
@property
|
||||
def preset_mode(self) -> str | None:
|
||||
"""Return the current preset mode, e.g., auto, manual, fireplace, extend, etc."""
|
||||
preset = self.coordinator.data.climate.preset_mode
|
||||
preset = self.coordinator.atag.climate.preset_mode
|
||||
return PRESET_INVERTED.get(preset)
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
await self.coordinator.data.climate.set_temp(kwargs.get(ATTR_TEMPERATURE))
|
||||
await self.coordinator.atag.climate.set_temp(kwargs.get(ATTR_TEMPERATURE))
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set new target hvac mode."""
|
||||
await self.coordinator.data.climate.set_hvac_mode(hvac_mode)
|
||||
await self.coordinator.atag.climate.set_hvac_mode(hvac_mode)
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
||||
"""Set new preset mode."""
|
||||
await self.coordinator.data.climate.set_preset_mode(PRESET_MAP[preset_mode])
|
||||
await self.coordinator.atag.climate.set_preset_mode(PRESET_MAP[preset_mode])
|
||||
self.async_write_ha_state()
|
||||
|
41
homeassistant/components/atag/coordinator.py
Normal file
41
homeassistant/components/atag/coordinator.py
Normal file
@@ -0,0 +1,41 @@
|
||||
"""The ATAG Integration."""
|
||||
|
||||
from asyncio import timeout
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from pyatag import AtagException, AtagOne
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type AtagConfigEntry = ConfigEntry[AtagDataUpdateCoordinator]
|
||||
|
||||
|
||||
class AtagDataUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
"""Atag data update coordinator."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Initialize Atag coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name="Atag",
|
||||
update_interval=timedelta(seconds=60),
|
||||
)
|
||||
|
||||
self.atag = AtagOne(
|
||||
session=async_get_clientsession(hass), **entry.data, device=entry.unique_id
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Update data via library."""
|
||||
async with timeout(20):
|
||||
try:
|
||||
await self.atag.update()
|
||||
except AtagException as err:
|
||||
raise UpdateFailed(err) from err
|
30
homeassistant/components/atag/entity.py
Normal file
30
homeassistant/components/atag/entity.py
Normal file
@@ -0,0 +1,30 @@
|
||||
"""The ATAG Integration."""
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import DOMAIN
|
||||
from .coordinator import AtagDataUpdateCoordinator
|
||||
|
||||
|
||||
class AtagEntity(CoordinatorEntity[AtagDataUpdateCoordinator]):
|
||||
"""Defines a base Atag entity."""
|
||||
|
||||
def __init__(self, coordinator: AtagDataUpdateCoordinator, atag_id: str) -> None:
|
||||
"""Initialize the Atag entity."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
self._id = atag_id
|
||||
self._attr_name = DOMAIN.title()
|
||||
self._attr_unique_id = f"{coordinator.atag.id}-{atag_id}"
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
"""Return info for device registry."""
|
||||
return DeviceInfo(
|
||||
identifiers={(DOMAIN, self.coordinator.atag.id)},
|
||||
manufacturer="Atag",
|
||||
model="Atag One",
|
||||
name="Atag Thermostat",
|
||||
sw_version=self.coordinator.atag.apiversion,
|
||||
)
|
@@ -1,7 +1,6 @@
|
||||
"""Initialization of ATAG One sensor platform."""
|
||||
|
||||
from homeassistant.components.sensor import SensorDeviceClass, SensorEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
PERCENTAGE,
|
||||
UnitOfPressure,
|
||||
@@ -11,7 +10,8 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import DOMAIN, AtagEntity
|
||||
from .coordinator import AtagConfigEntry, AtagDataUpdateCoordinator
|
||||
from .entity import AtagEntity
|
||||
|
||||
SENSORS = {
|
||||
"Outside Temperature": "outside_temp",
|
||||
@@ -27,43 +27,43 @@ SENSORS = {
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: AtagConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Initialize sensor platform from config entry."""
|
||||
coordinator = hass.data[DOMAIN][config_entry.entry_id]
|
||||
coordinator = config_entry.runtime_data
|
||||
async_add_entities([AtagSensor(coordinator, sensor) for sensor in SENSORS])
|
||||
|
||||
|
||||
class AtagSensor(AtagEntity, SensorEntity):
|
||||
"""Representation of a AtagOne Sensor."""
|
||||
|
||||
def __init__(self, coordinator, sensor):
|
||||
def __init__(self, coordinator: AtagDataUpdateCoordinator, sensor: str) -> None:
|
||||
"""Initialize Atag sensor."""
|
||||
super().__init__(coordinator, SENSORS[sensor])
|
||||
self._attr_name = sensor
|
||||
if coordinator.data.report[self._id].sensorclass in (
|
||||
if coordinator.atag.report[self._id].sensorclass in (
|
||||
SensorDeviceClass.PRESSURE,
|
||||
SensorDeviceClass.TEMPERATURE,
|
||||
):
|
||||
self._attr_device_class = coordinator.data.report[self._id].sensorclass
|
||||
if coordinator.data.report[self._id].measure in (
|
||||
self._attr_device_class = coordinator.atag.report[self._id].sensorclass
|
||||
if coordinator.atag.report[self._id].measure in (
|
||||
UnitOfPressure.BAR,
|
||||
UnitOfTemperature.CELSIUS,
|
||||
UnitOfTemperature.FAHRENHEIT,
|
||||
PERCENTAGE,
|
||||
UnitOfTime.HOURS,
|
||||
):
|
||||
self._attr_native_unit_of_measurement = coordinator.data.report[
|
||||
self._attr_native_unit_of_measurement = coordinator.atag.report[
|
||||
self._id
|
||||
].measure
|
||||
|
||||
@property
|
||||
def native_value(self):
|
||||
"""Return the state of the sensor."""
|
||||
return self.coordinator.data.report[self._id].state
|
||||
return self.coordinator.atag.report[self._id].state
|
||||
|
||||
@property
|
||||
def icon(self):
|
||||
"""Return icon."""
|
||||
return self.coordinator.data.report[self._id].icon
|
||||
return self.coordinator.atag.report[self._id].icon
|
||||
|
@@ -7,24 +7,25 @@ from homeassistant.components.water_heater import (
|
||||
STATE_PERFORMANCE,
|
||||
WaterHeaterEntity,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_TEMPERATURE, STATE_OFF, Platform, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import DOMAIN, AtagEntity
|
||||
from .coordinator import AtagConfigEntry
|
||||
from .entity import AtagEntity
|
||||
|
||||
OPERATION_LIST = [STATE_OFF, STATE_ECO, STATE_PERFORMANCE]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: AtagConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Initialize DHW device from config entry."""
|
||||
coordinator = hass.data[DOMAIN][config_entry.entry_id]
|
||||
async_add_entities([AtagWaterHeater(coordinator, Platform.WATER_HEATER)])
|
||||
async_add_entities(
|
||||
[AtagWaterHeater(config_entry.runtime_data, Platform.WATER_HEATER)]
|
||||
)
|
||||
|
||||
|
||||
class AtagWaterHeater(AtagEntity, WaterHeaterEntity):
|
||||
@@ -36,30 +37,30 @@ class AtagWaterHeater(AtagEntity, WaterHeaterEntity):
|
||||
@property
|
||||
def current_temperature(self):
|
||||
"""Return the current temperature."""
|
||||
return self.coordinator.data.dhw.temperature
|
||||
return self.coordinator.atag.dhw.temperature
|
||||
|
||||
@property
|
||||
def current_operation(self):
|
||||
"""Return current operation."""
|
||||
operation = self.coordinator.data.dhw.current_operation
|
||||
operation = self.coordinator.atag.dhw.current_operation
|
||||
return operation if operation in self.operation_list else STATE_OFF
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
if await self.coordinator.data.dhw.set_temp(kwargs.get(ATTR_TEMPERATURE)):
|
||||
if await self.coordinator.atag.dhw.set_temp(kwargs.get(ATTR_TEMPERATURE)):
|
||||
self.async_write_ha_state()
|
||||
|
||||
@property
|
||||
def target_temperature(self):
|
||||
"""Return the setpoint if water demand, otherwise return base temp (comfort level)."""
|
||||
return self.coordinator.data.dhw.target_temperature
|
||||
return self.coordinator.atag.dhw.target_temperature
|
||||
|
||||
@property
|
||||
def max_temp(self) -> float:
|
||||
"""Return the maximum temperature."""
|
||||
return self.coordinator.data.dhw.max_temp
|
||||
return self.coordinator.atag.dhw.max_temp
|
||||
|
||||
@property
|
||||
def min_temp(self) -> float:
|
||||
"""Return the minimum temperature."""
|
||||
return self.coordinator.data.dhw.min_temp
|
||||
return self.coordinator.atag.dhw.min_temp
|
||||
|
@@ -12,7 +12,7 @@ from yalexs.authenticator_common import ValidationResult
|
||||
from yalexs.const import BRANDS_WITHOUT_OAUTH, DEFAULT_BRAND, Brand
|
||||
from yalexs.manager.exceptions import CannotConnect, InvalidAuth, RequireValidation
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import callback
|
||||
|
||||
@@ -93,7 +93,6 @@ class AugustConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self._aiohttp_session: aiohttp.ClientSession | None = None
|
||||
self._user_auth_details: dict[str, Any] = {}
|
||||
self._needs_reset = True
|
||||
self._mode: str | None = None
|
||||
super().__init__()
|
||||
|
||||
async def async_step_user(
|
||||
@@ -147,7 +146,7 @@ class AugustConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle validation (2fa) step."""
|
||||
if user_input:
|
||||
if self._mode == "reauth":
|
||||
if self.source == SOURCE_REAUTH:
|
||||
return await self.async_step_reauth_validate(user_input)
|
||||
return await self.async_step_user_validate(user_input)
|
||||
|
||||
@@ -188,8 +187,6 @@ class AugustConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle configuration by re-auth."""
|
||||
self._user_auth_details = dict(entry_data)
|
||||
self._mode = "reauth"
|
||||
self._needs_reset = True
|
||||
return await self.async_step_reauth_validate()
|
||||
|
||||
async def async_step_reauth_validate(
|
||||
|
@@ -16,6 +16,10 @@
|
||||
"hostname": "connect",
|
||||
"macaddress": "2C9FFB*"
|
||||
},
|
||||
{
|
||||
"hostname": "connect",
|
||||
"macaddress": "789C85*"
|
||||
},
|
||||
{
|
||||
"hostname": "august*",
|
||||
"macaddress": "E076D0*"
|
||||
@@ -24,5 +28,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pubnub", "yalexs"],
|
||||
"requirements": ["yalexs==8.6.4", "yalexs-ble==2.4.3"]
|
||||
"requirements": ["yalexs==8.9.0", "yalexs-ble==2.4.3"]
|
||||
}
|
||||
|
@@ -10,21 +10,15 @@
|
||||
# and add the following to the end of script/bootstrap:
|
||||
# sudo chmod 777 /dev/ttyUSB0
|
||||
|
||||
import logging
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ADDRESS, CONF_PORT, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AuroraAbbDataUpdateCoordinator
|
||||
from .coordinator import AuroraAbbConfigEntry, AuroraAbbDataUpdateCoordinator
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AuroraAbbConfigEntry) -> bool:
|
||||
"""Set up Aurora ABB PowerOne from a config entry."""
|
||||
|
||||
comport = entry.data[CONF_PORT]
|
||||
@@ -32,19 +26,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
coordinator = AuroraAbbDataUpdateCoordinator(hass, comport, address)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator
|
||||
entry.runtime_data = coordinator
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AuroraAbbConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
# It should not be necessary to close the serial port because we close
|
||||
# it after every use in sensor.py, i.e. no need to do entry["client"].close()
|
||||
if unload_ok:
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
|
||||
return unload_ok
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
@@ -6,6 +6,7 @@ from time import sleep
|
||||
from aurorapy.client import AuroraError, AuroraSerialClient, AuroraTimeoutError
|
||||
from serial import SerialException
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
@@ -14,6 +15,9 @@ from .const import DOMAIN, SCAN_INTERVAL
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
type AuroraAbbConfigEntry = ConfigEntry[AuroraAbbDataUpdateCoordinator]
|
||||
|
||||
|
||||
class AuroraAbbDataUpdateCoordinator(DataUpdateCoordinator[dict[str, float]]):
|
||||
"""Class to manage fetching AuroraAbbPowerone data."""
|
||||
|
||||
|
@@ -14,7 +14,6 @@ from homeassistant.components.sensor import (
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
ATTR_SERIAL_NUMBER,
|
||||
EntityCategory,
|
||||
@@ -31,7 +30,6 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import AuroraAbbDataUpdateCoordinator
|
||||
from .const import (
|
||||
ATTR_DEVICE_NAME,
|
||||
ATTR_FIRMWARE,
|
||||
@@ -40,6 +38,7 @@ from .const import (
|
||||
DOMAIN,
|
||||
MANUFACTURER,
|
||||
)
|
||||
from .coordinator import AuroraAbbConfigEntry, AuroraAbbDataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
ALARM_STATES = list(AuroraMapping.ALARM_STATES.values())
|
||||
@@ -130,12 +129,12 @@ SENSOR_TYPES = [
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: AuroraAbbConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up aurora_abb_powerone sensor based on a config entry."""
|
||||
|
||||
coordinator = hass.data[DOMAIN][config_entry.entry_id]
|
||||
coordinator = config_entry.runtime_data
|
||||
data = config_entry.data
|
||||
|
||||
entities = [AuroraSensor(coordinator, data, sens) for sens in SENSOR_TYPES]
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user