mirror of
https://github.com/home-assistant/core.git
synced 2026-01-14 03:27:32 +01:00
Compare commits
1 Commits
calendar_t
...
sensor_gro
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7dc5b2e859 |
4
.github/copilot-instructions.md
vendored
4
.github/copilot-instructions.md
vendored
@@ -847,8 +847,8 @@ rules:
|
||||
## Development Commands
|
||||
|
||||
### Code Quality & Linting
|
||||
- **Run all linters on all files**: `prek run --all-files`
|
||||
- **Run linters on staged files only**: `prek run`
|
||||
- **Run all linters on all files**: `pre-commit run --all-files`
|
||||
- **Run linters on staged files only**: `pre-commit run`
|
||||
- **PyLint on everything** (slow): `pylint homeassistant`
|
||||
- **PyLint on specific folder**: `pylint homeassistant/components/my_integration`
|
||||
- **MyPy type checking (whole project)**: `mypy homeassistant/`
|
||||
|
||||
179
.github/workflows/ci.yaml
vendored
179
.github/workflows/ci.yaml
vendored
@@ -59,6 +59,7 @@ env:
|
||||
# 15 is the latest version
|
||||
# - 15.2 is the latest (as of 9 Feb 2023)
|
||||
POSTGRESQL_VERSIONS: "['postgres:12.14','postgres:15.2']"
|
||||
PRE_COMMIT_CACHE: ~/.cache/pre-commit
|
||||
UV_CACHE_DIR: /tmp/uv-cache
|
||||
APT_CACHE_BASE: /home/runner/work/apt
|
||||
APT_CACHE_DIR: /home/runner/work/apt/cache
|
||||
@@ -82,6 +83,7 @@ jobs:
|
||||
integrations_glob: ${{ steps.info.outputs.integrations_glob }}
|
||||
integrations: ${{ steps.integrations.outputs.changes }}
|
||||
apt_cache_key: ${{ steps.generate_apt_cache_key.outputs.key }}
|
||||
pre-commit_cache_key: ${{ steps.generate_pre-commit_cache_key.outputs.key }}
|
||||
python_cache_key: ${{ steps.generate_python_cache_key.outputs.key }}
|
||||
requirements: ${{ steps.core.outputs.requirements }}
|
||||
mariadb_groups: ${{ steps.info.outputs.mariadb_groups }}
|
||||
@@ -109,6 +111,11 @@ jobs:
|
||||
hashFiles('requirements_all.txt') }}-${{
|
||||
hashFiles('homeassistant/package_constraints.txt') }}-${{
|
||||
hashFiles('script/gen_requirements_all.py') }}" >> $GITHUB_OUTPUT
|
||||
- name: Generate partial pre-commit restore key
|
||||
id: generate_pre-commit_cache_key
|
||||
run: >-
|
||||
echo "key=pre-commit-${{ env.CACHE_VERSION }}-${{
|
||||
hashFiles('.pre-commit-config.yaml') }}" >> $GITHUB_OUTPUT
|
||||
- name: Generate partial apt restore key
|
||||
id: generate_apt_cache_key
|
||||
run: |
|
||||
@@ -237,8 +244,8 @@ jobs:
|
||||
echo "skip_coverage: ${skip_coverage}"
|
||||
echo "skip_coverage=${skip_coverage}" >> $GITHUB_OUTPUT
|
||||
|
||||
prek:
|
||||
name: Run prek checks
|
||||
pre-commit:
|
||||
name: Prepare pre-commit base
|
||||
runs-on: *runs-on-ubuntu
|
||||
needs: [info]
|
||||
if: |
|
||||
@@ -247,23 +254,147 @@ jobs:
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
steps:
|
||||
- *checkout
|
||||
- &setup-python-default
|
||||
name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: &actions-setup-python actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: &actions-cache actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
path: venv
|
||||
key: &key-pre-commit-venv >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-venv-${{
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Create Python virtual environment
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
python -m venv venv
|
||||
. venv/bin/activate
|
||||
python --version
|
||||
pip install "$(grep '^uv' < requirements.txt)"
|
||||
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: *actions-cache
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
lookup-only: true
|
||||
key: &key-pre-commit-env >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Install pre-commit dependencies
|
||||
if: steps.cache-precommit.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit install-hooks
|
||||
|
||||
lint-ruff-format:
|
||||
name: Check ruff-format
|
||||
runs-on: *runs-on-ubuntu
|
||||
needs: &needs-pre-commit
|
||||
- info
|
||||
- pre-commit
|
||||
steps:
|
||||
- *checkout
|
||||
- *setup-python-default
|
||||
- &cache-restore-pre-commit-venv
|
||||
name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: &actions-cache-restore actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: *key-pre-commit-venv
|
||||
- &cache-restore-pre-commit-env
|
||||
name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: *actions-cache-restore
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
key: *key-pre-commit-env
|
||||
- name: Run ruff-format
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual ruff-format --all-files --show-diff-on-failure
|
||||
env:
|
||||
RUFF_OUTPUT_FORMAT: github
|
||||
|
||||
lint-ruff:
|
||||
name: Check ruff
|
||||
runs-on: *runs-on-ubuntu
|
||||
needs: *needs-pre-commit
|
||||
steps:
|
||||
- *checkout
|
||||
- *setup-python-default
|
||||
- *cache-restore-pre-commit-venv
|
||||
- *cache-restore-pre-commit-env
|
||||
- name: Run ruff
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual ruff-check --all-files --show-diff-on-failure
|
||||
env:
|
||||
RUFF_OUTPUT_FORMAT: github
|
||||
|
||||
lint-other:
|
||||
name: Check other linters
|
||||
runs-on: *runs-on-ubuntu
|
||||
needs: *needs-pre-commit
|
||||
steps:
|
||||
- *checkout
|
||||
- *setup-python-default
|
||||
- *cache-restore-pre-commit-venv
|
||||
- *cache-restore-pre-commit-env
|
||||
|
||||
- name: Register yamllint problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/yamllint.json"
|
||||
- name: Run yamllint
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual yamllint --all-files --show-diff-on-failure
|
||||
|
||||
- name: Register check-json problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/check-json.json"
|
||||
- name: Run check-json
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual check-json --all-files --show-diff-on-failure
|
||||
|
||||
- name: Run prettier (fully)
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual prettier --all-files --show-diff-on-failure
|
||||
|
||||
- name: Run prettier (partially)
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
shell: bash
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
shopt -s globstar
|
||||
pre-commit run --hook-stage manual prettier --show-diff-on-failure --files {homeassistant,tests}/components/${{ needs.info.outputs.integrations_glob }}/{*,**/*}
|
||||
|
||||
- name: Register check executables problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/check-executables-have-shebangs.json"
|
||||
- name: Run executables check
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual check-executables-have-shebangs --all-files --show-diff-on-failure
|
||||
|
||||
- name: Register codespell problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/codespell.json"
|
||||
- name: Run prek
|
||||
uses: j178/prek-action@91fd7d7cf70ae1dee9f4f44e7dfa5d1073fe6623 # v1.0.11
|
||||
env:
|
||||
PREK_SKIP: no-commit-to-branch,mypy,pylint,gen_requirements_all,hassfest,hassfest-metadata,hassfest-mypy-config
|
||||
RUFF_OUTPUT_FORMAT: github
|
||||
- name: Run codespell
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --show-diff-on-failure --hook-stage manual codespell --all-files
|
||||
|
||||
lint-hadolint:
|
||||
name: Check ${{ matrix.file }}
|
||||
@@ -303,7 +434,7 @@ jobs:
|
||||
- &setup-python-matrix
|
||||
name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: &actions-setup-python actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
uses: *actions-setup-python
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -316,7 +447,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: &actions-cache actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
uses: *actions-cache
|
||||
with:
|
||||
path: venv
|
||||
key: &key-python-venv >-
|
||||
@@ -431,7 +562,7 @@ jobs:
|
||||
steps:
|
||||
- &cache-restore-apt
|
||||
name: Restore apt cache
|
||||
uses: &actions-cache-restore actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
uses: *actions-cache-restore
|
||||
with:
|
||||
path: *path-apt-cache
|
||||
fail-on-cache-miss: true
|
||||
@@ -448,13 +579,7 @@ jobs:
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
libturbojpeg
|
||||
- *checkout
|
||||
- &setup-python-default
|
||||
name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: *actions-setup-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- *setup-python-default
|
||||
- &cache-restore-python-default
|
||||
name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
@@ -657,7 +782,9 @@ jobs:
|
||||
- base
|
||||
- gen-requirements-all
|
||||
- hassfest
|
||||
- prek
|
||||
- lint-other
|
||||
- lint-ruff
|
||||
- lint-ruff-format
|
||||
- mypy
|
||||
steps:
|
||||
- *cache-restore-apt
|
||||
@@ -696,7 +823,9 @@ jobs:
|
||||
- base
|
||||
- gen-requirements-all
|
||||
- hassfest
|
||||
- prek
|
||||
- lint-other
|
||||
- lint-ruff
|
||||
- lint-ruff-format
|
||||
- mypy
|
||||
- prepare-pytest-full
|
||||
if: |
|
||||
@@ -820,7 +949,9 @@ jobs:
|
||||
- base
|
||||
- gen-requirements-all
|
||||
- hassfest
|
||||
- prek
|
||||
- lint-other
|
||||
- lint-ruff
|
||||
- lint-ruff-format
|
||||
- mypy
|
||||
if: |
|
||||
needs.info.outputs.lint_only != 'true'
|
||||
@@ -935,7 +1066,9 @@ jobs:
|
||||
- base
|
||||
- gen-requirements-all
|
||||
- hassfest
|
||||
- prek
|
||||
- lint-other
|
||||
- lint-ruff
|
||||
- lint-ruff-format
|
||||
- mypy
|
||||
if: |
|
||||
needs.info.outputs.lint_only != 'true'
|
||||
@@ -1069,7 +1202,9 @@ jobs:
|
||||
- base
|
||||
- gen-requirements-all
|
||||
- hassfest
|
||||
- prek
|
||||
- lint-other
|
||||
- lint-ruff
|
||||
- lint-ruff-format
|
||||
- mypy
|
||||
if: |
|
||||
needs.info.outputs.lint_only != 'true'
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@cdefb33c0f6224e58673d9004f47f7cb3e328b89 # v4.31.10
|
||||
uses: github/codeql-action/init@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@cdefb33c0f6224e58673d9004f47f7cb3e328b89 # v4.31.10
|
||||
uses: github/codeql-action/analyze@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
@@ -46,7 +46,7 @@ repos:
|
||||
# Run `python-typing-update` hook manually from time to time
|
||||
# to update python typing syntax.
|
||||
# Will require manual work, before submitting changes!
|
||||
# prek run --hook-stage manual python-typing-update --all-files
|
||||
# pre-commit run --hook-stage manual python-typing-update --all-files
|
||||
- id: python-typing-update
|
||||
stages: [manual]
|
||||
args:
|
||||
|
||||
6
.vscode/tasks.json
vendored
6
.vscode/tasks.json
vendored
@@ -45,7 +45,7 @@
|
||||
{
|
||||
"label": "Ruff",
|
||||
"type": "shell",
|
||||
"command": "prek run ruff-check --all-files",
|
||||
"command": "pre-commit run ruff-check --all-files",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
@@ -57,9 +57,9 @@
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Prek",
|
||||
"label": "Pre-commit",
|
||||
"type": "shell",
|
||||
"command": "prek run --show-diff-on-failure",
|
||||
"command": "pre-commit run --show-diff-on-failure",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
|
||||
2
CODEOWNERS
generated
2
CODEOWNERS
generated
@@ -1068,8 +1068,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/myuplink/ @pajzo @astrandb
|
||||
/homeassistant/components/nam/ @bieniu
|
||||
/tests/components/nam/ @bieniu
|
||||
/homeassistant/components/namecheapdns/ @tr4nt0r
|
||||
/tests/components/namecheapdns/ @tr4nt0r
|
||||
/homeassistant/components/nanoleaf/ @milanmeu @joostlek
|
||||
/tests/components/nanoleaf/ @milanmeu @joostlek
|
||||
/homeassistant/components/nasweb/ @nasWebio
|
||||
|
||||
@@ -3,8 +3,9 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
import math
|
||||
|
||||
from pymicro_vad import MicroVad
|
||||
from pysilero_vad import SileroVoiceActivityDetector
|
||||
from pyspeex_noise import AudioProcessor
|
||||
|
||||
from .const import BYTES_PER_CHUNK
|
||||
@@ -42,8 +43,8 @@ class AudioEnhancer(ABC):
|
||||
"""Enhance chunk of PCM audio @ 16Khz with 16-bit mono samples."""
|
||||
|
||||
|
||||
class MicroVadSpeexEnhancer(AudioEnhancer):
|
||||
"""Audio enhancer that runs microVAD and speex."""
|
||||
class SileroVadSpeexEnhancer(AudioEnhancer):
|
||||
"""Audio enhancer that runs Silero VAD and speex."""
|
||||
|
||||
def __init__(
|
||||
self, auto_gain: int, noise_suppression: int, is_vad_enabled: bool
|
||||
@@ -69,21 +70,49 @@ class MicroVadSpeexEnhancer(AudioEnhancer):
|
||||
self.noise_suppression,
|
||||
)
|
||||
|
||||
self.vad: MicroVad | None = None
|
||||
self.vad: SileroVoiceActivityDetector | None = None
|
||||
|
||||
# We get 10ms chunks but Silero works on 32ms chunks, so we have to
|
||||
# buffer audio. The previous speech probability is used until enough
|
||||
# audio has been buffered.
|
||||
self._vad_buffer: bytearray | None = None
|
||||
self._vad_buffer_chunks = 0
|
||||
self._vad_buffer_chunk_idx = 0
|
||||
self._last_speech_probability: float | None = None
|
||||
|
||||
if self.is_vad_enabled:
|
||||
self.vad = MicroVad()
|
||||
_LOGGER.debug("Initialized microVAD")
|
||||
self.vad = SileroVoiceActivityDetector()
|
||||
|
||||
# VAD buffer is a multiple of 10ms, but Silero VAD needs 32ms.
|
||||
self._vad_buffer_chunks = int(
|
||||
math.ceil(self.vad.chunk_bytes() / BYTES_PER_CHUNK)
|
||||
)
|
||||
self._vad_leftover_bytes = self.vad.chunk_bytes() - BYTES_PER_CHUNK
|
||||
self._vad_buffer = bytearray(self.vad.chunk_bytes())
|
||||
_LOGGER.debug("Initialized Silero VAD")
|
||||
|
||||
def enhance_chunk(self, audio: bytes, timestamp_ms: int) -> EnhancedAudioChunk:
|
||||
"""Enhance 10ms chunk of PCM audio @ 16Khz with 16-bit mono samples."""
|
||||
speech_probability: float | None = None
|
||||
|
||||
assert len(audio) == BYTES_PER_CHUNK
|
||||
|
||||
if self.vad is not None:
|
||||
# Run VAD
|
||||
speech_probability = self.vad.Process10ms(audio)
|
||||
assert self._vad_buffer is not None
|
||||
start_idx = self._vad_buffer_chunk_idx * BYTES_PER_CHUNK
|
||||
self._vad_buffer[start_idx : start_idx + BYTES_PER_CHUNK] = audio
|
||||
|
||||
self._vad_buffer_chunk_idx += 1
|
||||
if self._vad_buffer_chunk_idx >= self._vad_buffer_chunks:
|
||||
# We have enough data to run Silero VAD (32 ms)
|
||||
self._last_speech_probability = self.vad.process_chunk(
|
||||
self._vad_buffer[: self.vad.chunk_bytes()]
|
||||
)
|
||||
|
||||
# Copy leftover audio that wasn't processed to start
|
||||
self._vad_buffer[: self._vad_leftover_bytes] = self._vad_buffer[
|
||||
-self._vad_leftover_bytes :
|
||||
]
|
||||
self._vad_buffer_chunk_idx = 0
|
||||
|
||||
if self.audio_processor is not None:
|
||||
# Run noise suppression and auto gain
|
||||
@@ -92,5 +121,5 @@ class MicroVadSpeexEnhancer(AudioEnhancer):
|
||||
return EnhancedAudioChunk(
|
||||
audio=audio,
|
||||
timestamp_ms=timestamp_ms,
|
||||
speech_probability=speech_probability,
|
||||
speech_probability=self._last_speech_probability,
|
||||
)
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "local_push",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["pymicro-vad==1.0.1", "pyspeex-noise==1.0.2"]
|
||||
"requirements": ["pysilero-vad==3.2.0", "pyspeex-noise==1.0.2"]
|
||||
}
|
||||
|
||||
@@ -55,7 +55,7 @@ from homeassistant.util import (
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
from homeassistant.util.limited_size_dict import LimitedSizeDict
|
||||
|
||||
from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, MicroVadSpeexEnhancer
|
||||
from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, SileroVadSpeexEnhancer
|
||||
from .const import (
|
||||
ACKNOWLEDGE_PATH,
|
||||
BYTES_PER_CHUNK,
|
||||
@@ -633,7 +633,7 @@ class PipelineRun:
|
||||
# Initialize with audio settings
|
||||
if self.audio_settings.needs_processor and (self.audio_enhancer is None):
|
||||
# Default audio enhancer
|
||||
self.audio_enhancer = MicroVadSpeexEnhancer(
|
||||
self.audio_enhancer = SileroVadSpeexEnhancer(
|
||||
self.audio_settings.auto_gain_dbfs,
|
||||
self.audio_settings.noise_suppression_level,
|
||||
self.audio_settings.is_vad_enabled,
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
"""The BSB-Lan integration."""
|
||||
|
||||
import asyncio
|
||||
import dataclasses
|
||||
|
||||
from bsblan import (
|
||||
@@ -78,16 +77,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: BSBLanConfigEntry) -> bo
|
||||
bsblan = BSBLAN(config, session)
|
||||
|
||||
try:
|
||||
# Initialize the client first - this sets up internal caches and validates
|
||||
# the connection by fetching firmware version
|
||||
# Initialize the client first - this sets up internal caches and validates the connection
|
||||
await bsblan.initialize()
|
||||
|
||||
# Fetch device metadata in parallel for faster startup
|
||||
device, info, static = await asyncio.gather(
|
||||
bsblan.device(),
|
||||
bsblan.info(),
|
||||
bsblan.static_values(),
|
||||
)
|
||||
# Fetch all required device metadata
|
||||
device = await bsblan.device()
|
||||
info = await bsblan.info()
|
||||
static = await bsblan.static_values()
|
||||
except BSBLANConnectionError as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
@@ -115,10 +110,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: BSBLanConfigEntry) -> bo
|
||||
fast_coordinator = BSBLanFastCoordinator(hass, entry, bsblan)
|
||||
slow_coordinator = BSBLanSlowCoordinator(hass, entry, bsblan)
|
||||
|
||||
# Perform first refresh of fast coordinator (required for entities)
|
||||
# Perform first refresh of both coordinators
|
||||
await fast_coordinator.async_config_entry_first_refresh()
|
||||
|
||||
# Refresh slow coordinator - don't fail if DHW is not available
|
||||
# Try to refresh slow coordinator, but don't fail if DHW is not available
|
||||
# This allows the integration to work even if the device doesn't support DHW
|
||||
await slow_coordinator.async_refresh()
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
from random import randint
|
||||
|
||||
from bsblan import (
|
||||
BSBLAN,
|
||||
@@ -22,17 +23,6 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda
|
||||
|
||||
from .const import DOMAIN, LOGGER, SCAN_INTERVAL_FAST, SCAN_INTERVAL_SLOW
|
||||
|
||||
# Filter lists for optimized API calls - only fetch parameters we actually use
|
||||
# This significantly reduces response time (~0.2s per parameter saved)
|
||||
STATE_INCLUDE = ["current_temperature", "target_temperature", "hvac_mode"]
|
||||
SENSOR_INCLUDE = ["current_temperature", "outside_temperature"]
|
||||
DHW_STATE_INCLUDE = [
|
||||
"operating_mode",
|
||||
"nominal_setpoint",
|
||||
"dhw_actual_value_top_temperature",
|
||||
]
|
||||
DHW_CONFIG_INCLUDE = ["reduced_setpoint", "nominal_setpoint_max"]
|
||||
|
||||
|
||||
@dataclass
|
||||
class BSBLanFastData:
|
||||
@@ -90,18 +80,26 @@ class BSBLanFastCoordinator(BSBLanCoordinator[BSBLanFastData]):
|
||||
config_entry,
|
||||
client,
|
||||
name=f"{DOMAIN}_fast_{config_entry.data[CONF_HOST]}",
|
||||
update_interval=SCAN_INTERVAL_FAST,
|
||||
update_interval=self._get_update_interval(),
|
||||
)
|
||||
|
||||
def _get_update_interval(self) -> timedelta:
|
||||
"""Get the update interval with a random offset.
|
||||
|
||||
Add a random number of seconds to avoid timeouts when
|
||||
the BSB-Lan device is already/still busy retrieving data,
|
||||
e.g. for MQTT or internal logging.
|
||||
"""
|
||||
return SCAN_INTERVAL_FAST + timedelta(seconds=randint(1, 8))
|
||||
|
||||
async def _async_update_data(self) -> BSBLanFastData:
|
||||
"""Fetch fast-changing data from the BSB-Lan device."""
|
||||
try:
|
||||
# Client is already initialized in async_setup_entry
|
||||
# Use include filtering to only fetch parameters we actually use
|
||||
# This reduces response time significantly (~0.2s per parameter)
|
||||
state = await self.client.state(include=STATE_INCLUDE)
|
||||
sensor = await self.client.sensor(include=SENSOR_INCLUDE)
|
||||
dhw = await self.client.hot_water_state(include=DHW_STATE_INCLUDE)
|
||||
# Fetch fast-changing data (state, sensor, DHW state)
|
||||
state = await self.client.state()
|
||||
sensor = await self.client.sensor()
|
||||
dhw = await self.client.hot_water_state()
|
||||
|
||||
except BSBLANAuthError as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
@@ -113,6 +111,9 @@ class BSBLanFastCoordinator(BSBLanCoordinator[BSBLanFastData]):
|
||||
f"Error while establishing connection with BSB-Lan device at {host}"
|
||||
) from err
|
||||
|
||||
# Update the interval with random jitter for next update
|
||||
self.update_interval = self._get_update_interval()
|
||||
|
||||
return BSBLanFastData(
|
||||
state=state,
|
||||
sensor=sensor,
|
||||
@@ -142,8 +143,8 @@ class BSBLanSlowCoordinator(BSBLanCoordinator[BSBLanSlowData]):
|
||||
"""Fetch slow-changing data from the BSB-Lan device."""
|
||||
try:
|
||||
# Client is already initialized in async_setup_entry
|
||||
# Use include filtering to only fetch parameters we actually use
|
||||
dhw_config = await self.client.hot_water_config(include=DHW_CONFIG_INCLUDE)
|
||||
# Fetch slow-changing configuration data
|
||||
dhw_config = await self.client.hot_water_config()
|
||||
dhw_schedule = await self.client.hot_water_schedule()
|
||||
|
||||
except AttributeError:
|
||||
|
||||
@@ -15,13 +15,5 @@
|
||||
"get_events": {
|
||||
"service": "mdi:calendar-month"
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"event_ended": {
|
||||
"trigger": "mdi:calendar-end"
|
||||
},
|
||||
"event_started": {
|
||||
"trigger": "mdi:calendar-start"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -45,14 +45,6 @@
|
||||
"title": "Detected use of deprecated action calendar.list_events"
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"trigger_offset_type": {
|
||||
"options": {
|
||||
"after": "After",
|
||||
"before": "Before"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"create_event": {
|
||||
"description": "Adds a new calendar event.",
|
||||
@@ -111,35 +103,5 @@
|
||||
"name": "Get events"
|
||||
}
|
||||
},
|
||||
"title": "Calendar",
|
||||
"triggers": {
|
||||
"event_ended": {
|
||||
"description": "Triggers when a calendar event ends.",
|
||||
"fields": {
|
||||
"offset": {
|
||||
"description": "Offset from the end of the event.",
|
||||
"name": "Offset"
|
||||
},
|
||||
"offset_type": {
|
||||
"description": "Whether to trigger before or after the end of the event, if an offset is defined.",
|
||||
"name": "Offset type"
|
||||
}
|
||||
},
|
||||
"name": "Calendar event ended"
|
||||
},
|
||||
"event_started": {
|
||||
"description": "Triggers when a calendar event starts.",
|
||||
"fields": {
|
||||
"offset": {
|
||||
"description": "Offset from the start of the event.",
|
||||
"name": "Offset"
|
||||
},
|
||||
"offset_type": {
|
||||
"description": "Whether to trigger before or after the start of the event, if an offset is defined.",
|
||||
"name": "Offset type"
|
||||
}
|
||||
},
|
||||
"name": "Calendar event started"
|
||||
}
|
||||
}
|
||||
"title": "Calendar"
|
||||
}
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
import datetime
|
||||
@@ -11,15 +10,8 @@ from typing import TYPE_CHECKING, Any, cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
CONF_ENTITY_ID,
|
||||
CONF_EVENT,
|
||||
CONF_OFFSET,
|
||||
CONF_OPTIONS,
|
||||
CONF_TARGET,
|
||||
)
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback, split_entity_id
|
||||
from homeassistant.const import CONF_ENTITY_ID, CONF_EVENT, CONF_OFFSET, CONF_OPTIONS
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.automation import move_top_level_schema_fields_to_options
|
||||
@@ -28,13 +20,12 @@ from homeassistant.helpers.event import (
|
||||
async_track_point_in_time,
|
||||
async_track_time_interval,
|
||||
)
|
||||
from homeassistant.helpers.target import TargetEntityChangeTracker, TargetSelection
|
||||
from homeassistant.helpers.trigger import Trigger, TriggerActionRunner, TriggerConfig
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import CalendarEntity, CalendarEvent
|
||||
from .const import DATA_COMPONENT, DOMAIN
|
||||
from .const import DATA_COMPONENT
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -42,35 +33,19 @@ EVENT_START = "start"
|
||||
EVENT_END = "end"
|
||||
UPDATE_INTERVAL = datetime.timedelta(minutes=15)
|
||||
|
||||
CONF_OFFSET_TYPE = "offset_type"
|
||||
OFFSET_TYPE_BEFORE = "before"
|
||||
OFFSET_TYPE_AFTER = "after"
|
||||
|
||||
|
||||
_SINGLE_ENTITY_EVENT_OPTIONS_SCHEMA = {
|
||||
_OPTIONS_SCHEMA_DICT = {
|
||||
vol.Required(CONF_ENTITY_ID): cv.entity_id,
|
||||
vol.Optional(CONF_EVENT, default=EVENT_START): vol.In({EVENT_START, EVENT_END}),
|
||||
vol.Optional(CONF_OFFSET, default=datetime.timedelta(0)): cv.time_period,
|
||||
}
|
||||
|
||||
_SINGLE_ENTITY_EVENT_TRIGGER_SCHEMA = vol.Schema(
|
||||
_CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_OPTIONS): _SINGLE_ENTITY_EVENT_OPTIONS_SCHEMA,
|
||||
vol.Required(CONF_OPTIONS): _OPTIONS_SCHEMA_DICT,
|
||||
},
|
||||
)
|
||||
|
||||
_EVENT_TRIGGER_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_OPTIONS, default={}): {
|
||||
vol.Required(CONF_OFFSET, default=datetime.timedelta(0)): cv.time_period,
|
||||
vol.Required(CONF_OFFSET_TYPE, default=OFFSET_TYPE_BEFORE): vol.In(
|
||||
{OFFSET_TYPE_BEFORE, OFFSET_TYPE_AFTER}
|
||||
),
|
||||
},
|
||||
vol.Required(CONF_TARGET): cv.TARGET_FIELDS,
|
||||
}
|
||||
)
|
||||
|
||||
# mypy: disallow-any-generics
|
||||
|
||||
|
||||
@@ -80,7 +55,6 @@ class QueuedCalendarEvent:
|
||||
|
||||
trigger_time: datetime.datetime
|
||||
event: CalendarEvent
|
||||
entity_id: str
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -120,7 +94,7 @@ class Timespan:
|
||||
return f"[{self.start}, {self.end})"
|
||||
|
||||
|
||||
type EventFetcher = Callable[[Timespan], Awaitable[list[tuple[str, CalendarEvent]]]]
|
||||
type EventFetcher = Callable[[Timespan], Awaitable[list[CalendarEvent]]]
|
||||
type QueuedEventFetcher = Callable[[Timespan], Awaitable[list[QueuedCalendarEvent]]]
|
||||
|
||||
|
||||
@@ -136,24 +110,15 @@ def get_entity(hass: HomeAssistant, entity_id: str) -> CalendarEntity:
|
||||
return entity
|
||||
|
||||
|
||||
def event_fetcher(hass: HomeAssistant, entity_ids: set[str]) -> EventFetcher:
|
||||
def event_fetcher(hass: HomeAssistant, entity_id: str) -> EventFetcher:
|
||||
"""Build an async_get_events wrapper to fetch events during a time span."""
|
||||
|
||||
async def async_get_events(timespan: Timespan) -> list[tuple[str, CalendarEvent]]:
|
||||
async def async_get_events(timespan: Timespan) -> list[CalendarEvent]:
|
||||
"""Return events active in the specified time span."""
|
||||
entity = get_entity(hass, entity_id)
|
||||
# Expand by one second to make the end time exclusive
|
||||
end_time = timespan.end + datetime.timedelta(seconds=1)
|
||||
|
||||
events: list[tuple[str, CalendarEvent]] = []
|
||||
for entity_id in entity_ids:
|
||||
entity = get_entity(hass, entity_id)
|
||||
events.extend(
|
||||
(entity_id, event)
|
||||
for event in await entity.async_get_events(
|
||||
hass, timespan.start, end_time
|
||||
)
|
||||
)
|
||||
return events
|
||||
return await entity.async_get_events(hass, timespan.start, end_time)
|
||||
|
||||
return async_get_events
|
||||
|
||||
@@ -177,11 +142,12 @@ def queued_event_fetcher(
|
||||
# Example: For an EVENT_END trigger the event may start during this
|
||||
# time span, but need to be triggered later when the end happens.
|
||||
results = []
|
||||
for entity_id, event in active_events:
|
||||
trigger_time = get_trigger_time(event)
|
||||
for trigger_time, event in zip(
|
||||
map(get_trigger_time, active_events), active_events, strict=False
|
||||
):
|
||||
if trigger_time not in offset_timespan:
|
||||
continue
|
||||
results.append(QueuedCalendarEvent(trigger_time + offset, event, entity_id))
|
||||
results.append(QueuedCalendarEvent(trigger_time + offset, event))
|
||||
|
||||
_LOGGER.debug(
|
||||
"Scan events @ %s%s found %s eligible of %s active",
|
||||
@@ -274,7 +240,6 @@ class CalendarEventListener:
|
||||
_LOGGER.debug("Dispatching event: %s", queued_event.event)
|
||||
payload = {
|
||||
**self._trigger_payload,
|
||||
ATTR_ENTITY_ID: queued_event.entity_id,
|
||||
"calendar_event": queued_event.event.as_dict(),
|
||||
}
|
||||
self._action_runner(payload, "calendar event state change")
|
||||
@@ -295,77 +260,8 @@ class CalendarEventListener:
|
||||
self._listen_next_calendar_event()
|
||||
|
||||
|
||||
class TargetCalendarEventListener(TargetEntityChangeTracker):
|
||||
"""Helper class to listen to calendar events for target entity changes."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
target_selection: TargetSelection,
|
||||
event_type: str,
|
||||
offset: datetime.timedelta,
|
||||
run_action: TriggerActionRunner,
|
||||
) -> None:
|
||||
"""Initialize the state change tracker."""
|
||||
|
||||
def entity_filter(entities: set[str]) -> set[str]:
|
||||
return {
|
||||
entity_id
|
||||
for entity_id in entities
|
||||
if split_entity_id(entity_id)[0] == DOMAIN
|
||||
}
|
||||
|
||||
super().__init__(hass, target_selection, entity_filter)
|
||||
self._event_type = event_type
|
||||
self._offset = offset
|
||||
self._run_action = run_action
|
||||
self._trigger_data = {
|
||||
"event": event_type,
|
||||
"offset": offset,
|
||||
}
|
||||
|
||||
self._pending_listener_task: asyncio.Task[None] | None = None
|
||||
self._calendar_event_listener: CalendarEventListener | None = None
|
||||
|
||||
@callback
|
||||
def _handle_entities_update(self, tracked_entities: set[str]) -> None:
|
||||
"""Restart the listeners when the list of entities of the tracked targets is updated."""
|
||||
if self._pending_listener_task:
|
||||
self._pending_listener_task.cancel()
|
||||
self._pending_listener_task = self._hass.async_create_task(
|
||||
self._start_listening(tracked_entities)
|
||||
)
|
||||
|
||||
async def _start_listening(self, tracked_entities: set[str]) -> None:
|
||||
"""Start listening for calendar events."""
|
||||
_LOGGER.debug("Tracking events for calendars: %s", tracked_entities)
|
||||
if self._calendar_event_listener:
|
||||
self._calendar_event_listener.async_detach()
|
||||
self._calendar_event_listener = CalendarEventListener(
|
||||
self._hass,
|
||||
self._run_action,
|
||||
self._trigger_data,
|
||||
queued_event_fetcher(
|
||||
event_fetcher(self._hass, tracked_entities),
|
||||
self._event_type,
|
||||
self._offset,
|
||||
),
|
||||
)
|
||||
await self._calendar_event_listener.async_attach()
|
||||
|
||||
def _unsubscribe(self) -> None:
|
||||
"""Unsubscribe from all events."""
|
||||
super()._unsubscribe()
|
||||
if self._pending_listener_task:
|
||||
self._pending_listener_task.cancel()
|
||||
self._pending_listener_task = None
|
||||
if self._calendar_event_listener:
|
||||
self._calendar_event_listener.async_detach()
|
||||
self._calendar_event_listener = None
|
||||
|
||||
|
||||
class SingleEntityEventTrigger(Trigger):
|
||||
"""Legacy single calendar entity event trigger."""
|
||||
class EventTrigger(Trigger):
|
||||
"""Calendar event trigger."""
|
||||
|
||||
_options: dict[str, Any]
|
||||
|
||||
@@ -375,7 +271,7 @@ class SingleEntityEventTrigger(Trigger):
|
||||
) -> ConfigType:
|
||||
"""Validate complete config."""
|
||||
complete_config = move_top_level_schema_fields_to_options(
|
||||
complete_config, _SINGLE_ENTITY_EVENT_OPTIONS_SCHEMA
|
||||
complete_config, _OPTIONS_SCHEMA_DICT
|
||||
)
|
||||
return await super().async_validate_complete_config(hass, complete_config)
|
||||
|
||||
@@ -384,7 +280,7 @@ class SingleEntityEventTrigger(Trigger):
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return cast(ConfigType, _SINGLE_ENTITY_EVENT_TRIGGER_SCHEMA(config))
|
||||
return cast(ConfigType, _CONFIG_SCHEMA(config))
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize trigger."""
|
||||
@@ -415,72 +311,15 @@ class SingleEntityEventTrigger(Trigger):
|
||||
run_action,
|
||||
trigger_data,
|
||||
queued_event_fetcher(
|
||||
event_fetcher(self._hass, {entity_id}), event_type, offset
|
||||
event_fetcher(self._hass, entity_id), event_type, offset
|
||||
),
|
||||
)
|
||||
await listener.async_attach()
|
||||
return listener.async_detach
|
||||
|
||||
|
||||
class EventTrigger(Trigger):
|
||||
"""Calendar event trigger."""
|
||||
|
||||
_options: dict[str, Any]
|
||||
_event_type: str
|
||||
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return cast(ConfigType, _EVENT_TRIGGER_SCHEMA(config))
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize trigger."""
|
||||
super().__init__(hass, config)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert config.target is not None
|
||||
assert config.options is not None
|
||||
self._target = config.target
|
||||
self._options = config.options
|
||||
|
||||
async def async_attach_runner(
|
||||
self, run_action: TriggerActionRunner
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Attach a trigger."""
|
||||
|
||||
offset = self._options[CONF_OFFSET]
|
||||
offset_type = self._options[CONF_OFFSET_TYPE]
|
||||
|
||||
if offset_type == OFFSET_TYPE_BEFORE:
|
||||
offset = -offset
|
||||
|
||||
target_selection = TargetSelection(self._target)
|
||||
if not target_selection.has_any_target:
|
||||
raise HomeAssistantError(f"No target defined in {self._target}")
|
||||
listener = TargetCalendarEventListener(
|
||||
self._hass, target_selection, self._event_type, offset, run_action
|
||||
)
|
||||
return listener.async_setup()
|
||||
|
||||
|
||||
class EventStartedTrigger(EventTrigger):
|
||||
"""Calendar event started trigger."""
|
||||
|
||||
_event_type = EVENT_START
|
||||
|
||||
|
||||
class EventEndedTrigger(EventTrigger):
|
||||
"""Calendar event ended trigger."""
|
||||
|
||||
_event_type = EVENT_END
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"_": SingleEntityEventTrigger,
|
||||
"event_started": EventStartedTrigger,
|
||||
"event_ended": EventEndedTrigger,
|
||||
"_": EventTrigger,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
.trigger_common: &trigger_common
|
||||
target:
|
||||
entity:
|
||||
domain: calendar
|
||||
fields:
|
||||
offset:
|
||||
required: true
|
||||
default:
|
||||
days: 0
|
||||
hours: 0
|
||||
minutes: 0
|
||||
seconds: 0
|
||||
selector:
|
||||
duration:
|
||||
enable_day: true
|
||||
offset_type:
|
||||
required: true
|
||||
default: before
|
||||
selector:
|
||||
select:
|
||||
translation_key: trigger_offset_type
|
||||
options:
|
||||
- before
|
||||
- after
|
||||
|
||||
event_started: *trigger_common
|
||||
event_ended: *trigger_common
|
||||
@@ -31,7 +31,7 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
)
|
||||
STEP_SMS_CODE_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_SMS_CODE): str,
|
||||
vol.Required(CONF_SMS_CODE): int,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -75,7 +75,7 @@ class FressnapfTrackerConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return errors, False
|
||||
|
||||
async def _async_verify_sms_code(
|
||||
self, sms_code: str
|
||||
self, sms_code: int
|
||||
) -> tuple[dict[str, str], str | None]:
|
||||
"""Verify SMS code and return errors and access_token."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["fressnapftracker==0.2.1"]
|
||||
"requirements": ["fressnapftracker==0.2.0"]
|
||||
}
|
||||
|
||||
@@ -346,7 +346,6 @@ class SensorGroup(GroupEntity, SensorEntity):
|
||||
self._attr_name = name
|
||||
if name == DEFAULT_NAME:
|
||||
self._attr_name = f"{DEFAULT_NAME} {sensor_type}".capitalize()
|
||||
self._attr_extra_state_attributes = {ATTR_ENTITY_ID: entity_ids}
|
||||
self._attr_unique_id = unique_id
|
||||
self._ignore_non_numeric = ignore_non_numeric
|
||||
self.mode = all if ignore_non_numeric is False else any
|
||||
|
||||
@@ -20,13 +20,10 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.CAMERA]
|
||||
PLATFORMS = [Platform.BINARY_SENSOR]
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -107,16 +104,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: HikvisionConfigEntry) ->
|
||||
# Start the event stream
|
||||
await hass.async_add_executor_job(camera.start_stream)
|
||||
|
||||
# Register the main device before platforms that use via_device
|
||||
device_registry = dr.async_get(hass)
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
identifiers={(DOMAIN, device_id)},
|
||||
name=device_name,
|
||||
manufacturer="Hikvision",
|
||||
model=device_type,
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
@@ -185,27 +185,20 @@ class HikvisionBinarySensor(BinarySensorEntity):
|
||||
# Build unique ID
|
||||
self._attr_unique_id = f"{self._data.device_id}_{sensor_type}_{channel}"
|
||||
|
||||
# Device info for device registry
|
||||
# Build entity name based on device type
|
||||
if self._data.device_type == "NVR":
|
||||
# NVR channels get their own device linked to the NVR via via_device
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, f"{self._data.device_id}_{channel}")},
|
||||
via_device=(DOMAIN, self._data.device_id),
|
||||
name=f"{self._data.device_name} Channel {channel}",
|
||||
manufacturer="Hikvision",
|
||||
model="NVR Channel",
|
||||
)
|
||||
self._attr_name = sensor_type
|
||||
self._attr_name = f"{sensor_type} {channel}"
|
||||
else:
|
||||
# Single camera device
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, self._data.device_id)},
|
||||
name=self._data.device_name,
|
||||
manufacturer="Hikvision",
|
||||
model=self._data.device_type,
|
||||
)
|
||||
self._attr_name = sensor_type
|
||||
|
||||
# Device info for device registry
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, self._data.device_id)},
|
||||
name=self._data.device_name,
|
||||
manufacturer="Hikvision",
|
||||
model=self._data.device_type,
|
||||
)
|
||||
|
||||
# Set device class
|
||||
self._attr_device_class = DEVICE_CLASS_MAP.get(sensor_type)
|
||||
|
||||
|
||||
@@ -1,93 +0,0 @@
|
||||
"""Support for Hikvision cameras."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.components.camera import Camera, CameraEntityFeature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import HikvisionConfigEntry
|
||||
from .const import DOMAIN
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: HikvisionConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Hikvision cameras from a config entry."""
|
||||
data = entry.runtime_data
|
||||
camera = data.camera
|
||||
|
||||
# Get available channels from the library
|
||||
channels = await hass.async_add_executor_job(camera.get_channels)
|
||||
|
||||
if channels:
|
||||
entities = [HikvisionCamera(entry, channel) for channel in channels]
|
||||
else:
|
||||
# Fallback to single camera if no channels detected
|
||||
entities = [HikvisionCamera(entry, 1)]
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class HikvisionCamera(Camera):
|
||||
"""Representation of a Hikvision camera."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
_attr_supported_features = CameraEntityFeature.STREAM
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
entry: HikvisionConfigEntry,
|
||||
channel: int,
|
||||
) -> None:
|
||||
"""Initialize the camera."""
|
||||
super().__init__()
|
||||
self._data = entry.runtime_data
|
||||
self._channel = channel
|
||||
self._camera = self._data.camera
|
||||
|
||||
# Build unique ID (unique per platform per integration)
|
||||
self._attr_unique_id = f"{self._data.device_id}_{channel}"
|
||||
|
||||
# Device info for device registry
|
||||
if self._data.device_type == "NVR":
|
||||
# NVR channels get their own device linked to the NVR via via_device
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, f"{self._data.device_id}_{channel}")},
|
||||
via_device=(DOMAIN, self._data.device_id),
|
||||
name=f"{self._data.device_name} Channel {channel}",
|
||||
manufacturer="Hikvision",
|
||||
model="NVR Channel",
|
||||
)
|
||||
else:
|
||||
# Single camera device
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, self._data.device_id)},
|
||||
name=self._data.device_name,
|
||||
manufacturer="Hikvision",
|
||||
model=self._data.device_type,
|
||||
)
|
||||
|
||||
async def async_camera_image(
|
||||
self, width: int | None = None, height: int | None = None
|
||||
) -> bytes | None:
|
||||
"""Return a still image from the camera."""
|
||||
try:
|
||||
return await self.hass.async_add_executor_job(
|
||||
self._camera.get_snapshot, self._channel
|
||||
)
|
||||
except Exception as err:
|
||||
raise HomeAssistantError(
|
||||
f"Error getting image from {self._data.device_name} channel {self._channel}: {err}"
|
||||
) from err
|
||||
|
||||
async def stream_source(self) -> str | None:
|
||||
"""Return the stream source URL."""
|
||||
return self._camera.get_stream_url(self._channel)
|
||||
@@ -27,7 +27,7 @@ from .const import (
|
||||
SUPPORTED_PLATFORMS_UI,
|
||||
SUPPORTED_PLATFORMS_YAML,
|
||||
)
|
||||
from .expose import create_combined_knx_exposure
|
||||
from .expose import create_knx_exposure
|
||||
from .knx_module import KNXModule
|
||||
from .project import STORAGE_KEY as PROJECT_STORAGE_KEY
|
||||
from .schema import (
|
||||
@@ -121,10 +121,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
hass.data[KNX_MODULE_KEY] = knx_module
|
||||
|
||||
if CONF_KNX_EXPOSE in config:
|
||||
knx_module.yaml_exposures.extend(
|
||||
create_combined_knx_exposure(hass, knx_module.xknx, config[CONF_KNX_EXPOSE])
|
||||
)
|
||||
|
||||
for expose_config in config[CONF_KNX_EXPOSE]:
|
||||
knx_module.exposures.append(
|
||||
create_knx_exposure(hass, knx_module.xknx, expose_config)
|
||||
)
|
||||
configured_platforms_yaml = {
|
||||
platform for platform in SUPPORTED_PLATFORMS_YAML if platform in config
|
||||
}
|
||||
@@ -149,9 +149,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
# if not loaded directly return
|
||||
return True
|
||||
|
||||
for exposure in knx_module.yaml_exposures:
|
||||
exposure.async_remove()
|
||||
for exposure in knx_module.service_exposures.values():
|
||||
for exposure in knx_module.exposures:
|
||||
exposure.async_remove()
|
||||
|
||||
configured_platforms_yaml = {
|
||||
|
||||
@@ -2,22 +2,14 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from asyncio import TaskGroup
|
||||
from collections.abc import Callable, Iterable
|
||||
from dataclasses import dataclass
|
||||
from collections.abc import Callable
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from xknx import XKNX
|
||||
from xknx.devices import DateDevice, DateTimeDevice, ExposeSensor, TimeDevice
|
||||
from xknx.dpt import DPTBase, DPTNumeric, DPTString
|
||||
from xknx.dpt.dpt_1 import DPT1BitEnum, DPTSwitch
|
||||
from xknx.dpt import DPTNumeric, DPTString
|
||||
from xknx.exceptions import ConversionError
|
||||
from xknx.telegram.address import (
|
||||
GroupAddress,
|
||||
InternalGroupAddress,
|
||||
parse_device_group_address,
|
||||
)
|
||||
from xknx.remote_value import RemoteValueSensor
|
||||
|
||||
from homeassistant.const import (
|
||||
CONF_ENTITY_ID,
|
||||
@@ -49,159 +41,79 @@ _LOGGER = logging.getLogger(__name__)
|
||||
@callback
|
||||
def create_knx_exposure(
|
||||
hass: HomeAssistant, xknx: XKNX, config: ConfigType
|
||||
) -> KnxExposeEntity | KnxExposeTime:
|
||||
"""Create single exposure."""
|
||||
) -> KNXExposeSensor | KNXExposeTime:
|
||||
"""Create exposures from config."""
|
||||
|
||||
expose_type = config[ExposeSchema.CONF_KNX_EXPOSE_TYPE]
|
||||
exposure: KnxExposeEntity | KnxExposeTime
|
||||
|
||||
exposure: KNXExposeSensor | KNXExposeTime
|
||||
if (
|
||||
isinstance(expose_type, str)
|
||||
and expose_type.lower() in ExposeSchema.EXPOSE_TIME_TYPES
|
||||
):
|
||||
exposure = KnxExposeTime(
|
||||
exposure = KNXExposeTime(
|
||||
xknx=xknx,
|
||||
config=config,
|
||||
)
|
||||
else:
|
||||
exposure = KnxExposeEntity(
|
||||
hass=hass,
|
||||
exposure = KNXExposeSensor(
|
||||
hass,
|
||||
xknx=xknx,
|
||||
entity_id=config[CONF_ENTITY_ID],
|
||||
options=(_yaml_config_to_expose_options(config),),
|
||||
config=config,
|
||||
)
|
||||
exposure.async_register()
|
||||
return exposure
|
||||
|
||||
|
||||
@callback
|
||||
def create_combined_knx_exposure(
|
||||
hass: HomeAssistant, xknx: XKNX, configs: list[ConfigType]
|
||||
) -> list[KnxExposeEntity | KnxExposeTime]:
|
||||
"""Create exposures from YAML config combined by entity_id."""
|
||||
exposures: list[KnxExposeEntity | KnxExposeTime] = []
|
||||
entity_exposure_map: dict[str, list[KnxExposeOptions]] = {}
|
||||
|
||||
for config in configs:
|
||||
value_type = config[ExposeSchema.CONF_KNX_EXPOSE_TYPE]
|
||||
if value_type.lower() in ExposeSchema.EXPOSE_TIME_TYPES:
|
||||
time_exposure = KnxExposeTime(
|
||||
xknx=xknx,
|
||||
config=config,
|
||||
)
|
||||
time_exposure.async_register()
|
||||
exposures.append(time_exposure)
|
||||
continue
|
||||
|
||||
entity_id = config[CONF_ENTITY_ID]
|
||||
option = _yaml_config_to_expose_options(config)
|
||||
entity_exposure_map.setdefault(entity_id, []).append(option)
|
||||
|
||||
for entity_id, options in entity_exposure_map.items():
|
||||
entity_exposure = KnxExposeEntity(
|
||||
hass=hass,
|
||||
xknx=xknx,
|
||||
entity_id=entity_id,
|
||||
options=options,
|
||||
)
|
||||
entity_exposure.async_register()
|
||||
exposures.append(entity_exposure)
|
||||
return exposures
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class KnxExposeOptions:
|
||||
"""Options for KNX Expose."""
|
||||
|
||||
attribute: str | None
|
||||
group_address: GroupAddress | InternalGroupAddress
|
||||
dpt: type[DPTBase]
|
||||
respond_to_read: bool
|
||||
cooldown: float
|
||||
default: Any | None
|
||||
value_template: Template | None
|
||||
|
||||
|
||||
def _yaml_config_to_expose_options(config: ConfigType) -> KnxExposeOptions:
|
||||
"""Convert single yaml expose config to KnxExposeOptions."""
|
||||
value_type = config[ExposeSchema.CONF_KNX_EXPOSE_TYPE]
|
||||
dpt: type[DPTBase]
|
||||
if value_type == "binary":
|
||||
# HA yaml expose flag for DPT-1 (no explicit DPT 1 definitions in xknx back then)
|
||||
dpt = DPTSwitch
|
||||
else:
|
||||
dpt = DPTBase.parse_transcoder(config[ExposeSchema.CONF_KNX_EXPOSE_TYPE]) # type: ignore[assignment] # checked by schema validation
|
||||
ga = parse_device_group_address(config[KNX_ADDRESS])
|
||||
return KnxExposeOptions(
|
||||
attribute=config.get(ExposeSchema.CONF_KNX_EXPOSE_ATTRIBUTE),
|
||||
group_address=ga,
|
||||
dpt=dpt,
|
||||
respond_to_read=config[CONF_RESPOND_TO_READ],
|
||||
cooldown=config[ExposeSchema.CONF_KNX_EXPOSE_COOLDOWN],
|
||||
default=config.get(ExposeSchema.CONF_KNX_EXPOSE_DEFAULT),
|
||||
value_template=config.get(CONF_VALUE_TEMPLATE),
|
||||
)
|
||||
|
||||
|
||||
class KnxExposeEntity:
|
||||
"""Expose Home Assistant entity values to KNX bus."""
|
||||
class KNXExposeSensor:
|
||||
"""Object to Expose Home Assistant entity to KNX bus."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
xknx: XKNX,
|
||||
entity_id: str,
|
||||
options: Iterable[KnxExposeOptions],
|
||||
config: ConfigType,
|
||||
) -> None:
|
||||
"""Initialize KnxExposeEntity class."""
|
||||
"""Initialize of Expose class."""
|
||||
self.hass = hass
|
||||
self.xknx = xknx
|
||||
self.entity_id = entity_id
|
||||
|
||||
self.entity_id: str = config[CONF_ENTITY_ID]
|
||||
self.expose_attribute: str | None = config.get(
|
||||
ExposeSchema.CONF_KNX_EXPOSE_ATTRIBUTE
|
||||
)
|
||||
self.expose_default = config.get(ExposeSchema.CONF_KNX_EXPOSE_DEFAULT)
|
||||
self.expose_type: int | str = config[ExposeSchema.CONF_KNX_EXPOSE_TYPE]
|
||||
self.value_template: Template | None = config.get(CONF_VALUE_TEMPLATE)
|
||||
|
||||
self._remove_listener: Callable[[], None] | None = None
|
||||
self._exposures = tuple(
|
||||
(
|
||||
option,
|
||||
ExposeSensor(
|
||||
xknx=self.xknx,
|
||||
name=f"{self.entity_id} {option.attribute or 'state'}",
|
||||
group_address=option.group_address,
|
||||
respond_to_read=option.respond_to_read,
|
||||
value_type=option.dpt,
|
||||
cooldown=option.cooldown,
|
||||
),
|
||||
)
|
||||
for option in options
|
||||
self.device: ExposeSensor = ExposeSensor(
|
||||
xknx=self.xknx,
|
||||
name=f"{self.entity_id}__{self.expose_attribute or 'state'}",
|
||||
group_address=config[KNX_ADDRESS],
|
||||
respond_to_read=config[CONF_RESPOND_TO_READ],
|
||||
value_type=self.expose_type,
|
||||
cooldown=config[ExposeSchema.CONF_KNX_EXPOSE_COOLDOWN],
|
||||
)
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Return name of the expose entity."""
|
||||
expose_names = [opt.attribute or "state" for opt, _ in self._exposures]
|
||||
return f"{self.entity_id}__{'__'.join(expose_names)}"
|
||||
|
||||
@callback
|
||||
def async_register(self) -> None:
|
||||
"""Register listener and XKNX devices."""
|
||||
"""Register listener."""
|
||||
self._remove_listener = async_track_state_change_event(
|
||||
self.hass, [self.entity_id], self._async_entity_changed
|
||||
)
|
||||
for _option, xknx_expose in self._exposures:
|
||||
self.xknx.devices.async_add(xknx_expose)
|
||||
self.xknx.devices.async_add(self.device)
|
||||
self._init_expose_state()
|
||||
|
||||
@callback
|
||||
def _init_expose_state(self) -> None:
|
||||
"""Initialize state of all exposures."""
|
||||
"""Initialize state of the exposure."""
|
||||
init_state = self.hass.states.get(self.entity_id)
|
||||
for option, xknx_expose in self._exposures:
|
||||
state_value = self._get_expose_value(init_state, option)
|
||||
try:
|
||||
xknx_expose.sensor_value.value = state_value
|
||||
except ConversionError:
|
||||
_LOGGER.exception(
|
||||
"Error setting value %s for expose sensor %s",
|
||||
state_value,
|
||||
xknx_expose.name,
|
||||
)
|
||||
state_value = self._get_expose_value(init_state)
|
||||
try:
|
||||
self.device.sensor_value.value = state_value
|
||||
except ConversionError:
|
||||
_LOGGER.exception("Error during sending of expose sensor value")
|
||||
|
||||
@callback
|
||||
def async_remove(self) -> None:
|
||||
@@ -209,57 +121,53 @@ class KnxExposeEntity:
|
||||
if self._remove_listener is not None:
|
||||
self._remove_listener()
|
||||
self._remove_listener = None
|
||||
for _option, xknx_expose in self._exposures:
|
||||
self.xknx.devices.async_remove(xknx_expose)
|
||||
self.xknx.devices.async_remove(self.device)
|
||||
|
||||
def _get_expose_value(
|
||||
self, state: State | None, option: KnxExposeOptions
|
||||
) -> bool | int | float | str | None:
|
||||
"""Extract value from state for a specific option."""
|
||||
def _get_expose_value(self, state: State | None) -> bool | int | float | str | None:
|
||||
"""Extract value from state."""
|
||||
if state is None or state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE):
|
||||
if option.default is None:
|
||||
if self.expose_default is None:
|
||||
return None
|
||||
value = option.default
|
||||
elif option.attribute is not None:
|
||||
_attr = state.attributes.get(option.attribute)
|
||||
value = _attr if _attr is not None else option.default
|
||||
value = self.expose_default
|
||||
elif self.expose_attribute is not None:
|
||||
_attr = state.attributes.get(self.expose_attribute)
|
||||
value = _attr if _attr is not None else self.expose_default
|
||||
else:
|
||||
value = state.state
|
||||
|
||||
if option.value_template is not None:
|
||||
if self.value_template is not None:
|
||||
try:
|
||||
value = option.value_template.async_render_with_possible_json_value(
|
||||
value = self.value_template.async_render_with_possible_json_value(
|
||||
value, error_value=None
|
||||
)
|
||||
except (TemplateError, TypeError, ValueError) as err:
|
||||
_LOGGER.warning(
|
||||
"Error rendering value template for KNX expose %s %s %s: %s",
|
||||
self.entity_id,
|
||||
option.attribute or "state",
|
||||
option.value_template.template,
|
||||
"Error rendering value template for KNX expose %s %s: %s",
|
||||
self.device.name,
|
||||
self.value_template.template,
|
||||
err,
|
||||
)
|
||||
return None
|
||||
|
||||
if issubclass(option.dpt, DPT1BitEnum):
|
||||
if self.expose_type == "binary":
|
||||
if value in (1, STATE_ON, "True"):
|
||||
return True
|
||||
if value in (0, STATE_OFF, "False"):
|
||||
return False
|
||||
|
||||
# Handle numeric and string DPT conversions
|
||||
if value is not None:
|
||||
if value is not None and (
|
||||
isinstance(self.device.sensor_value, RemoteValueSensor)
|
||||
):
|
||||
try:
|
||||
if issubclass(option.dpt, DPTNumeric):
|
||||
if issubclass(self.device.sensor_value.dpt_class, DPTNumeric):
|
||||
return float(value)
|
||||
if issubclass(option.dpt, DPTString):
|
||||
if issubclass(self.device.sensor_value.dpt_class, DPTString):
|
||||
# DPT 16.000 only allows up to 14 Bytes
|
||||
return str(value)[:14]
|
||||
except (ValueError, TypeError) as err:
|
||||
_LOGGER.warning(
|
||||
'Could not expose %s %s value "%s" to KNX: Conversion failed: %s',
|
||||
self.entity_id,
|
||||
option.attribute or "state",
|
||||
self.expose_attribute or "state",
|
||||
value,
|
||||
err,
|
||||
)
|
||||
@@ -267,31 +175,32 @@ class KnxExposeEntity:
|
||||
return value # type: ignore[no-any-return]
|
||||
|
||||
async def _async_entity_changed(self, event: Event[EventStateChangedData]) -> None:
|
||||
"""Handle entity change for all options."""
|
||||
"""Handle entity change."""
|
||||
new_state = event.data["new_state"]
|
||||
async with TaskGroup() as tg:
|
||||
for option, xknx_expose in self._exposures:
|
||||
expose_value = self._get_expose_value(new_state, option)
|
||||
if expose_value is None:
|
||||
continue
|
||||
tg.create_task(self._async_set_knx_value(xknx_expose, expose_value))
|
||||
if (new_value := self._get_expose_value(new_state)) is None:
|
||||
return
|
||||
old_state = event.data["old_state"]
|
||||
# don't use default value for comparison on first state change (old_state is None)
|
||||
old_value = self._get_expose_value(old_state) if old_state is not None else None
|
||||
# don't send same value sequentially
|
||||
if new_value != old_value:
|
||||
await self._async_set_knx_value(new_value)
|
||||
|
||||
async def _async_set_knx_value(
|
||||
self, xknx_expose: ExposeSensor, value: StateType
|
||||
) -> None:
|
||||
async def _async_set_knx_value(self, value: StateType) -> None:
|
||||
"""Set new value on xknx ExposeSensor."""
|
||||
try:
|
||||
await xknx_expose.set(value, skip_unchanged=True)
|
||||
await self.device.set(value)
|
||||
except ConversionError as err:
|
||||
_LOGGER.warning(
|
||||
'Could not expose %s value "%s" to KNX: %s',
|
||||
xknx_expose.name,
|
||||
'Could not expose %s %s value "%s" to KNX: %s',
|
||||
self.entity_id,
|
||||
self.expose_attribute or "state",
|
||||
value,
|
||||
err,
|
||||
)
|
||||
|
||||
|
||||
class KnxExposeTime:
|
||||
class KNXExposeTime:
|
||||
"""Object to Expose Time/Date object to KNX bus."""
|
||||
|
||||
def __init__(self, xknx: XKNX, config: ConfigType) -> None:
|
||||
@@ -313,11 +222,6 @@ class KnxExposeTime:
|
||||
group_address=config[KNX_ADDRESS],
|
||||
)
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Return name of the time expose object."""
|
||||
return f"expose_{self.device.name}"
|
||||
|
||||
@callback
|
||||
def async_register(self) -> None:
|
||||
"""Register listener."""
|
||||
|
||||
@@ -54,7 +54,7 @@ from .const import (
|
||||
TELEGRAM_LOG_DEFAULT,
|
||||
)
|
||||
from .device import KNXInterfaceDevice
|
||||
from .expose import KnxExposeEntity, KnxExposeTime
|
||||
from .expose import KNXExposeSensor, KNXExposeTime
|
||||
from .project import KNXProject
|
||||
from .repairs import data_secure_group_key_issue_dispatcher
|
||||
from .storage.config_store import KNXConfigStore
|
||||
@@ -73,8 +73,8 @@ class KNXModule:
|
||||
self.hass = hass
|
||||
self.config_yaml = config
|
||||
self.connected = False
|
||||
self.yaml_exposures: list[KnxExposeEntity | KnxExposeTime] = []
|
||||
self.service_exposures: dict[str, KnxExposeEntity | KnxExposeTime] = {}
|
||||
self.exposures: list[KNXExposeSensor | KNXExposeTime] = []
|
||||
self.service_exposures: dict[str, KNXExposeSensor | KNXExposeTime] = {}
|
||||
self.entry = entry
|
||||
|
||||
self.project = KNXProject(hass=hass, entry=entry)
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
"loggers": ["xknx", "xknxproject"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"xknx==3.14.0",
|
||||
"xknx==3.13.0",
|
||||
"xknxproject==3.8.2",
|
||||
"knx-frontend==2025.12.30.151231"
|
||||
],
|
||||
|
||||
@@ -193,7 +193,7 @@ async def service_exposure_register_modify(call: ServiceCall) -> None:
|
||||
" for '%s' - %s"
|
||||
),
|
||||
group_address,
|
||||
replaced_exposure.name,
|
||||
replaced_exposure.device.name,
|
||||
)
|
||||
replaced_exposure.async_remove()
|
||||
exposure = create_knx_exposure(knx_module.hass, knx_module.xknx, call.data)
|
||||
@@ -201,7 +201,7 @@ async def service_exposure_register_modify(call: ServiceCall) -> None:
|
||||
_LOGGER.debug(
|
||||
"Service exposure_register registered exposure for '%s' - %s",
|
||||
group_address,
|
||||
exposure.name,
|
||||
exposure.device.name,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -42,7 +42,7 @@
|
||||
},
|
||||
"conditions": {
|
||||
"is_off": {
|
||||
"description": "Tests if one or more lights are off.",
|
||||
"description": "Test if a light is off.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::light::common::condition_behavior_description%]",
|
||||
@@ -52,7 +52,7 @@
|
||||
"name": "If a light is off"
|
||||
},
|
||||
"is_on": {
|
||||
"description": "Tests if one or more lights are on.",
|
||||
"description": "Test if a light is on.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::light::common::condition_behavior_description%]",
|
||||
|
||||
@@ -4,64 +4,45 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import ClientConnectionError, ClientResponseError
|
||||
from pymelcloud import get_devices
|
||||
from pymelcloud import Device, get_devices
|
||||
from pymelcloud.atw_device import Zone
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_TOKEN, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import UpdateFailed
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
from .coordinator import MelCloudConfigEntry, MelCloudDeviceUpdateCoordinator
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=15)
|
||||
|
||||
PLATFORMS = [Platform.CLIMATE, Platform.SENSOR, Platform.WATER_HEATER]
|
||||
|
||||
type MelCloudConfigEntry = ConfigEntry[dict[str, list[MelCloudDevice]]]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: MelCloudConfigEntry) -> bool:
|
||||
"""Establish connection with MELCloud."""
|
||||
token = entry.data[CONF_TOKEN]
|
||||
session = async_get_clientsession(hass)
|
||||
|
||||
conf = entry.data
|
||||
try:
|
||||
async with asyncio.timeout(10):
|
||||
all_devices = await get_devices(
|
||||
token,
|
||||
session,
|
||||
conf_update_interval=timedelta(minutes=30),
|
||||
device_set_debounce=timedelta(seconds=2),
|
||||
)
|
||||
mel_devices = await mel_devices_setup(hass, conf[CONF_TOKEN])
|
||||
except ClientResponseError as ex:
|
||||
if ex.status in (401, 403):
|
||||
if isinstance(ex, ClientResponseError) and ex.code == 401:
|
||||
raise ConfigEntryAuthFailed from ex
|
||||
if ex.status == 429:
|
||||
raise UpdateFailed(
|
||||
"MELCloud rate limit exceeded. Your account may be temporarily blocked"
|
||||
) from ex
|
||||
raise UpdateFailed(f"Error communicating with MELCloud: {ex}") from ex
|
||||
raise ConfigEntryNotReady from ex
|
||||
except (TimeoutError, ClientConnectionError) as ex:
|
||||
raise UpdateFailed(f"Error communicating with MELCloud: {ex}") from ex
|
||||
raise ConfigEntryNotReady from ex
|
||||
|
||||
# Create per-device coordinators
|
||||
coordinators: dict[str, list[MelCloudDeviceUpdateCoordinator]] = {}
|
||||
device_registry = dr.async_get(hass)
|
||||
for device_type, devices in all_devices.items():
|
||||
coordinators[device_type] = []
|
||||
for device in devices:
|
||||
coordinator = MelCloudDeviceUpdateCoordinator(hass, device, entry)
|
||||
# Perform initial refresh for this device
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
coordinators[device_type].append(coordinator)
|
||||
# Register parent device now so zone entities can reference it via via_device
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
**coordinator.device_info,
|
||||
)
|
||||
|
||||
entry.runtime_data = coordinators
|
||||
entry.runtime_data = mel_devices
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
@@ -69,3 +50,90 @@ async def async_setup_entry(hass: HomeAssistant, entry: MelCloudConfigEntry) ->
|
||||
async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS)
|
||||
|
||||
|
||||
class MelCloudDevice:
|
||||
"""MELCloud Device instance."""
|
||||
|
||||
def __init__(self, device: Device) -> None:
|
||||
"""Construct a device wrapper."""
|
||||
self.device = device
|
||||
self.name = device.name
|
||||
self._available = True
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
async def async_update(self, **kwargs):
|
||||
"""Pull the latest data from MELCloud."""
|
||||
try:
|
||||
await self.device.update()
|
||||
self._available = True
|
||||
except ClientConnectionError:
|
||||
_LOGGER.warning("Connection failed for %s", self.name)
|
||||
self._available = False
|
||||
|
||||
async def async_set(self, properties: dict[str, Any]):
|
||||
"""Write state changes to the MELCloud API."""
|
||||
try:
|
||||
await self.device.set(properties)
|
||||
self._available = True
|
||||
except ClientConnectionError:
|
||||
_LOGGER.warning("Connection failed for %s", self.name)
|
||||
self._available = False
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return self._available
|
||||
|
||||
@property
|
||||
def device_id(self):
|
||||
"""Return device ID."""
|
||||
return self.device.device_id
|
||||
|
||||
@property
|
||||
def building_id(self):
|
||||
"""Return building ID of the device."""
|
||||
return self.device.building_id
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
"""Return a device description for device registry."""
|
||||
model = None
|
||||
if (unit_infos := self.device.units) is not None:
|
||||
model = ", ".join([x["model"] for x in unit_infos if x["model"]])
|
||||
return DeviceInfo(
|
||||
connections={(CONNECTION_NETWORK_MAC, self.device.mac)},
|
||||
identifiers={(DOMAIN, f"{self.device.mac}-{self.device.serial}")},
|
||||
manufacturer="Mitsubishi Electric",
|
||||
model=model,
|
||||
name=self.name,
|
||||
)
|
||||
|
||||
def zone_device_info(self, zone: Zone) -> DeviceInfo:
|
||||
"""Return a zone device description for device registry."""
|
||||
dev = self.device
|
||||
return DeviceInfo(
|
||||
identifiers={(DOMAIN, f"{dev.mac}-{dev.serial}-{zone.zone_index}")},
|
||||
manufacturer="Mitsubishi Electric",
|
||||
model="ATW zone device",
|
||||
name=f"{self.name} {zone.name}",
|
||||
via_device=(DOMAIN, f"{dev.mac}-{dev.serial}"),
|
||||
)
|
||||
|
||||
|
||||
async def mel_devices_setup(
|
||||
hass: HomeAssistant, token: str
|
||||
) -> dict[str, list[MelCloudDevice]]:
|
||||
"""Query connected devices from MELCloud."""
|
||||
session = async_get_clientsession(hass)
|
||||
async with asyncio.timeout(10):
|
||||
all_devices = await get_devices(
|
||||
token,
|
||||
session,
|
||||
conf_update_interval=timedelta(minutes=30),
|
||||
device_set_debounce=timedelta(seconds=2),
|
||||
)
|
||||
wrapped_devices: dict[str, list[MelCloudDevice]] = {}
|
||||
for device_type, devices in all_devices.items():
|
||||
wrapped_devices[device_type] = [MelCloudDevice(device) for device in devices]
|
||||
return wrapped_devices
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
from typing import Any, cast
|
||||
|
||||
from pymelcloud import DEVICE_TYPE_ATA, DEVICE_TYPE_ATW, AtaDevice, AtwDevice
|
||||
@@ -28,6 +29,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv, entity_platform
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import MelCloudConfigEntry, MelCloudDevice
|
||||
from .const import (
|
||||
ATTR_STATUS,
|
||||
ATTR_VANE_HORIZONTAL,
|
||||
@@ -38,8 +40,9 @@ from .const import (
|
||||
SERVICE_SET_VANE_HORIZONTAL,
|
||||
SERVICE_SET_VANE_VERTICAL,
|
||||
)
|
||||
from .coordinator import MelCloudConfigEntry, MelCloudDeviceUpdateCoordinator
|
||||
from .entity import MelCloudEntity
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=60)
|
||||
|
||||
|
||||
ATA_HVAC_MODE_LOOKUP = {
|
||||
ata.OPERATION_MODE_HEAT: HVACMode.HEAT,
|
||||
@@ -71,24 +74,27 @@ ATW_ZONE_HVAC_ACTION_LOOKUP = {
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
_hass: HomeAssistant,
|
||||
hass: HomeAssistant,
|
||||
entry: MelCloudConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up MelCloud device climate based on config_entry."""
|
||||
coordinators = entry.runtime_data
|
||||
mel_devices = entry.runtime_data
|
||||
entities: list[AtaDeviceClimate | AtwDeviceZoneClimate] = [
|
||||
AtaDeviceClimate(coordinator, coordinator.device)
|
||||
for coordinator in coordinators.get(DEVICE_TYPE_ATA, [])
|
||||
AtaDeviceClimate(mel_device, mel_device.device)
|
||||
for mel_device in mel_devices[DEVICE_TYPE_ATA]
|
||||
]
|
||||
entities.extend(
|
||||
[
|
||||
AtwDeviceZoneClimate(coordinator, coordinator.device, zone)
|
||||
for coordinator in coordinators.get(DEVICE_TYPE_ATW, [])
|
||||
for zone in coordinator.device.zones
|
||||
AtwDeviceZoneClimate(mel_device, mel_device.device, zone)
|
||||
for mel_device in mel_devices[DEVICE_TYPE_ATW]
|
||||
for zone in mel_device.device.zones
|
||||
]
|
||||
)
|
||||
async_add_entities(entities)
|
||||
async_add_entities(
|
||||
entities,
|
||||
True,
|
||||
)
|
||||
|
||||
platform = entity_platform.async_get_current_platform()
|
||||
platform.async_register_entity_service(
|
||||
@@ -103,19 +109,21 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
|
||||
class MelCloudClimate(MelCloudEntity, ClimateEntity):
|
||||
class MelCloudClimate(ClimateEntity):
|
||||
"""Base climate device."""
|
||||
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: MelCloudDeviceUpdateCoordinator,
|
||||
) -> None:
|
||||
def __init__(self, device: MelCloudDevice) -> None:
|
||||
"""Initialize the climate."""
|
||||
super().__init__(coordinator)
|
||||
self._base_device = self.coordinator.device
|
||||
self.api = device
|
||||
self._base_device = self.api.device
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update state from MELCloud."""
|
||||
await self.api.async_update()
|
||||
|
||||
@property
|
||||
def target_temperature_step(self) -> float | None:
|
||||
@@ -134,29 +142,26 @@ class AtaDeviceClimate(MelCloudClimate):
|
||||
| ClimateEntityFeature.TURN_ON
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: MelCloudDeviceUpdateCoordinator,
|
||||
ata_device: AtaDevice,
|
||||
) -> None:
|
||||
def __init__(self, device: MelCloudDevice, ata_device: AtaDevice) -> None:
|
||||
"""Initialize the climate."""
|
||||
super().__init__(coordinator)
|
||||
super().__init__(device)
|
||||
self._device = ata_device
|
||||
|
||||
self._attr_unique_id = (
|
||||
f"{self.coordinator.device.serial}-{self.coordinator.device.mac}"
|
||||
)
|
||||
self._attr_device_info = self.coordinator.device_info
|
||||
self._attr_unique_id = f"{self.api.device.serial}-{self.api.device.mac}"
|
||||
self._attr_device_info = self.api.device_info
|
||||
|
||||
# Add horizontal swing if device supports it
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""When entity is added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
|
||||
# We can only check for vane_horizontal once we fetch the device data from the cloud
|
||||
if self._device.vane_horizontal:
|
||||
self._attr_supported_features |= ClimateEntityFeature.SWING_HORIZONTAL_MODE
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any] | None:
|
||||
"""Return the optional state attributes with device specific additions."""
|
||||
attr: dict[str, Any] = {}
|
||||
attr.update(self.coordinator.extra_attributes)
|
||||
attr = {}
|
||||
|
||||
if vane_horizontal := self._device.vane_horizontal:
|
||||
attr.update(
|
||||
@@ -203,7 +208,7 @@ class AtaDeviceClimate(MelCloudClimate):
|
||||
"""Set new target hvac mode."""
|
||||
set_dict: dict[str, Any] = {}
|
||||
self._apply_set_hvac_mode(hvac_mode, set_dict)
|
||||
await self.coordinator.async_set(set_dict)
|
||||
await self._device.set(set_dict)
|
||||
|
||||
@property
|
||||
def hvac_modes(self) -> list[HVACMode]:
|
||||
@@ -236,7 +241,7 @@ class AtaDeviceClimate(MelCloudClimate):
|
||||
set_dict["target_temperature"] = kwargs.get(ATTR_TEMPERATURE)
|
||||
|
||||
if set_dict:
|
||||
await self.coordinator.async_set(set_dict)
|
||||
await self._device.set(set_dict)
|
||||
|
||||
@property
|
||||
def fan_mode(self) -> str | None:
|
||||
@@ -245,7 +250,7 @@ class AtaDeviceClimate(MelCloudClimate):
|
||||
|
||||
async def async_set_fan_mode(self, fan_mode: str) -> None:
|
||||
"""Set new target fan mode."""
|
||||
await self.coordinator.async_set({"fan_speed": fan_mode})
|
||||
await self._device.set({"fan_speed": fan_mode})
|
||||
|
||||
@property
|
||||
def fan_modes(self) -> list[str] | None:
|
||||
@@ -259,7 +264,7 @@ class AtaDeviceClimate(MelCloudClimate):
|
||||
f"Invalid horizontal vane position {position}. Valid positions:"
|
||||
f" [{self._device.vane_horizontal_positions}]."
|
||||
)
|
||||
await self.coordinator.async_set({ata.PROPERTY_VANE_HORIZONTAL: position})
|
||||
await self._device.set({ata.PROPERTY_VANE_HORIZONTAL: position})
|
||||
|
||||
async def async_set_vane_vertical(self, position: str) -> None:
|
||||
"""Set vertical vane position."""
|
||||
@@ -268,7 +273,7 @@ class AtaDeviceClimate(MelCloudClimate):
|
||||
f"Invalid vertical vane position {position}. Valid positions:"
|
||||
f" [{self._device.vane_vertical_positions}]."
|
||||
)
|
||||
await self.coordinator.async_set({ata.PROPERTY_VANE_VERTICAL: position})
|
||||
await self._device.set({ata.PROPERTY_VANE_VERTICAL: position})
|
||||
|
||||
@property
|
||||
def swing_mode(self) -> str | None:
|
||||
@@ -300,11 +305,11 @@ class AtaDeviceClimate(MelCloudClimate):
|
||||
|
||||
async def async_turn_on(self) -> None:
|
||||
"""Turn the entity on."""
|
||||
await self.coordinator.async_set({"power": True})
|
||||
await self._device.set({"power": True})
|
||||
|
||||
async def async_turn_off(self) -> None:
|
||||
"""Turn the entity off."""
|
||||
await self.coordinator.async_set({"power": False})
|
||||
await self._device.set({"power": False})
|
||||
|
||||
@property
|
||||
def min_temp(self) -> float:
|
||||
@@ -333,18 +338,15 @@ class AtwDeviceZoneClimate(MelCloudClimate):
|
||||
_attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: MelCloudDeviceUpdateCoordinator,
|
||||
atw_device: AtwDevice,
|
||||
atw_zone: Zone,
|
||||
self, device: MelCloudDevice, atw_device: AtwDevice, atw_zone: Zone
|
||||
) -> None:
|
||||
"""Initialize the climate."""
|
||||
super().__init__(coordinator)
|
||||
super().__init__(device)
|
||||
self._device = atw_device
|
||||
self._zone = atw_zone
|
||||
|
||||
self._attr_unique_id = f"{self.coordinator.device.serial}-{atw_zone.zone_index}"
|
||||
self._attr_device_info = self.coordinator.zone_device_info(atw_zone)
|
||||
self._attr_unique_id = f"{self.api.device.serial}-{atw_zone.zone_index}"
|
||||
self._attr_device_info = self.api.zone_device_info(atw_zone)
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any]:
|
||||
@@ -358,16 +360,15 @@ class AtwDeviceZoneClimate(MelCloudClimate):
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode:
|
||||
"""Return hvac operation ie. heat, cool mode."""
|
||||
# Use zone status (heat/cool/idle) not operation_mode (heat-thermostat/etc.)
|
||||
status = self._zone.status
|
||||
if not self._device.power or status is None:
|
||||
mode = self._zone.operation_mode
|
||||
if not self._device.power or mode is None:
|
||||
return HVACMode.OFF
|
||||
return ATW_ZONE_HVAC_MODE_LOOKUP.get(status, HVACMode.OFF)
|
||||
return ATW_ZONE_HVAC_MODE_LOOKUP.get(mode, HVACMode.OFF)
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set new target hvac mode."""
|
||||
if hvac_mode == HVACMode.OFF:
|
||||
await self.coordinator.async_set({"power": False})
|
||||
await self._device.set({"power": False})
|
||||
return
|
||||
|
||||
operation_mode = ATW_ZONE_HVAC_MODE_REVERSE_LOOKUP.get(hvac_mode)
|
||||
@@ -380,7 +381,7 @@ class AtwDeviceZoneClimate(MelCloudClimate):
|
||||
props = {PROPERTY_ZONE_2_OPERATION_MODE: operation_mode}
|
||||
if self.hvac_mode == HVACMode.OFF:
|
||||
props["power"] = True
|
||||
await self.coordinator.async_set(props)
|
||||
await self._device.set(props)
|
||||
|
||||
@property
|
||||
def hvac_modes(self) -> list[HVACMode]:
|
||||
@@ -409,4 +410,3 @@ class AtwDeviceZoneClimate(MelCloudClimate):
|
||||
await self._zone.set_target_temperature(
|
||||
kwargs.get(ATTR_TEMPERATURE, self.target_temperature)
|
||||
)
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
@@ -60,10 +60,6 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
except (TimeoutError, ClientError):
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
except AttributeError:
|
||||
# python-melcloud library bug: login() raises AttributeError on invalid
|
||||
# credentials when API response doesn't contain expected "LoginData" key
|
||||
return self.async_abort(reason="invalid_auth")
|
||||
|
||||
return await self._create_entry(username, acquired_token)
|
||||
|
||||
|
||||
@@ -1,193 +0,0 @@
|
||||
"""DataUpdateCoordinator for the MELCloud integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import ClientConnectionError, ClientResponseError
|
||||
from pymelcloud import Device
|
||||
from pymelcloud.atw_device import Zone
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.debounce import Debouncer
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# Delay before refreshing after a state change to allow device to process
|
||||
# and avoid race conditions with rapid sequential changes
|
||||
REQUEST_REFRESH_DELAY = 1.5
|
||||
|
||||
# Default update interval in minutes (matches upstream Throttle value)
|
||||
DEFAULT_UPDATE_INTERVAL = 15
|
||||
|
||||
# Retry interval in seconds for transient failures
|
||||
RETRY_INTERVAL_SECONDS = 30
|
||||
|
||||
# Number of consecutive failures before marking device unavailable
|
||||
MAX_CONSECUTIVE_FAILURES = 3
|
||||
|
||||
|
||||
class MelCloudDeviceUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
"""Per-device coordinator for MELCloud data updates."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
device: Device,
|
||||
config_entry: ConfigEntry,
|
||||
) -> None:
|
||||
"""Initialize the per-device coordinator."""
|
||||
self.device = device
|
||||
self.device_available = True
|
||||
self._consecutive_failures = 0
|
||||
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=f"{DOMAIN}_{device.name}",
|
||||
update_interval=timedelta(minutes=DEFAULT_UPDATE_INTERVAL),
|
||||
always_update=True,
|
||||
request_refresh_debouncer=Debouncer(
|
||||
hass,
|
||||
_LOGGER,
|
||||
cooldown=REQUEST_REFRESH_DELAY,
|
||||
immediate=False,
|
||||
),
|
||||
)
|
||||
|
||||
@property
|
||||
def extra_attributes(self) -> dict[str, Any]:
|
||||
"""Return extra device attributes."""
|
||||
data: dict[str, Any] = {
|
||||
"device_id": self.device.device_id,
|
||||
"serial": self.device.serial,
|
||||
"mac": self.device.mac,
|
||||
}
|
||||
if (unit_infos := self.device.units) is not None:
|
||||
for i, unit in enumerate(unit_infos[:2]):
|
||||
data[f"unit_{i}_model"] = unit.get("model")
|
||||
data[f"unit_{i}_serial"] = unit.get("serial")
|
||||
return data
|
||||
|
||||
@property
|
||||
def device_id(self) -> str:
|
||||
"""Return device ID."""
|
||||
return self.device.device_id
|
||||
|
||||
@property
|
||||
def building_id(self) -> str:
|
||||
"""Return building ID of the device."""
|
||||
return self.device.building_id
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
"""Return a device description for device registry."""
|
||||
model = None
|
||||
if (unit_infos := self.device.units) is not None:
|
||||
model = ", ".join([x["model"] for x in unit_infos if x["model"]])
|
||||
return DeviceInfo(
|
||||
connections={(CONNECTION_NETWORK_MAC, self.device.mac)},
|
||||
identifiers={(DOMAIN, f"{self.device.mac}-{self.device.serial}")},
|
||||
manufacturer="Mitsubishi Electric",
|
||||
model=model,
|
||||
name=self.device.name,
|
||||
)
|
||||
|
||||
def zone_device_info(self, zone: Zone) -> DeviceInfo:
|
||||
"""Return a zone device description for device registry."""
|
||||
dev = self.device
|
||||
return DeviceInfo(
|
||||
identifiers={(DOMAIN, f"{dev.mac}-{dev.serial}-{zone.zone_index}")},
|
||||
manufacturer="Mitsubishi Electric",
|
||||
model="ATW zone device",
|
||||
name=f"{self.device.name} {zone.name}",
|
||||
via_device=(DOMAIN, f"{dev.mac}-{dev.serial}"),
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Fetch data for this specific device from MELCloud."""
|
||||
try:
|
||||
await self.device.update()
|
||||
# Success - reset failure counter and restore normal interval
|
||||
if self._consecutive_failures > 0:
|
||||
_LOGGER.info(
|
||||
"Connection restored for %s after %d failed attempt(s)",
|
||||
self.device.name,
|
||||
self._consecutive_failures,
|
||||
)
|
||||
self._consecutive_failures = 0
|
||||
self.update_interval = timedelta(minutes=DEFAULT_UPDATE_INTERVAL)
|
||||
self.device_available = True
|
||||
except ClientResponseError as ex:
|
||||
if ex.status in (401, 403):
|
||||
raise ConfigEntryAuthFailed from ex
|
||||
if ex.status == 429:
|
||||
_LOGGER.error(
|
||||
"MELCloud rate limit exceeded for %s. Your account may be "
|
||||
"temporarily blocked",
|
||||
self.device.name,
|
||||
)
|
||||
# Rate limit - mark unavailable immediately
|
||||
self.device_available = False
|
||||
raise UpdateFailed(
|
||||
f"Rate limit exceeded for {self.device.name}"
|
||||
) from ex
|
||||
# Other HTTP errors - use retry logic
|
||||
self._handle_failure(f"Error updating {self.device.name}: {ex}", ex)
|
||||
except ClientConnectionError as ex:
|
||||
self._handle_failure(f"Connection failed for {self.device.name}: {ex}", ex)
|
||||
|
||||
def _handle_failure(self, message: str, exception: Exception | None = None) -> None:
|
||||
"""Handle a connection failure with retry logic.
|
||||
|
||||
For transient failures, entities remain available with their last known
|
||||
values for up to MAX_CONSECUTIVE_FAILURES attempts. During retries, the
|
||||
update interval is shortened to RETRY_INTERVAL_SECONDS for faster recovery.
|
||||
After the threshold is reached, entities are marked unavailable.
|
||||
"""
|
||||
self._consecutive_failures += 1
|
||||
|
||||
if self._consecutive_failures < MAX_CONSECUTIVE_FAILURES:
|
||||
# Keep entities available with cached data, use shorter retry interval
|
||||
_LOGGER.warning(
|
||||
"%s (attempt %d/%d, retrying in %ds)",
|
||||
message,
|
||||
self._consecutive_failures,
|
||||
MAX_CONSECUTIVE_FAILURES,
|
||||
RETRY_INTERVAL_SECONDS,
|
||||
)
|
||||
self.update_interval = timedelta(seconds=RETRY_INTERVAL_SECONDS)
|
||||
else:
|
||||
# Threshold reached - mark unavailable and restore normal interval
|
||||
_LOGGER.warning(
|
||||
"%s (attempt %d/%d, marking unavailable)",
|
||||
message,
|
||||
self._consecutive_failures,
|
||||
MAX_CONSECUTIVE_FAILURES,
|
||||
)
|
||||
self.device_available = False
|
||||
self.update_interval = timedelta(minutes=DEFAULT_UPDATE_INTERVAL)
|
||||
raise UpdateFailed(message) from exception
|
||||
|
||||
async def async_set(self, properties: dict[str, Any]) -> None:
|
||||
"""Write state changes to the MELCloud API."""
|
||||
try:
|
||||
await self.device.set(properties)
|
||||
self.device_available = True
|
||||
except ClientConnectionError:
|
||||
_LOGGER.warning("Connection failed for %s", self.device.name)
|
||||
self.device_available = False
|
||||
|
||||
await self.async_request_refresh()
|
||||
|
||||
|
||||
type MelCloudConfigEntry = ConfigEntry[dict[str, list[MelCloudDeviceUpdateCoordinator]]]
|
||||
@@ -9,7 +9,7 @@ from homeassistant.const import CONF_TOKEN, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from .coordinator import MelCloudConfigEntry
|
||||
from . import MelCloudConfigEntry
|
||||
|
||||
TO_REDACT = {
|
||||
CONF_USERNAME,
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
"""Base entity for MELCloud integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .coordinator import MelCloudDeviceUpdateCoordinator
|
||||
|
||||
|
||||
class MelCloudEntity(CoordinatorEntity[MelCloudDeviceUpdateCoordinator]):
|
||||
"""Base class for MELCloud entities."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return super().available and self.coordinator.device_available
|
||||
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/melcloud",
|
||||
"integration_type": "device",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["melcloud"],
|
||||
"loggers": ["pymelcloud"],
|
||||
"requirements": ["python-melcloud==0.1.2"]
|
||||
}
|
||||
|
||||
@@ -19,8 +19,7 @@ from homeassistant.const import UnitOfEnergy, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import MelCloudConfigEntry, MelCloudDeviceUpdateCoordinator
|
||||
from .entity import MelCloudEntity
|
||||
from . import MelCloudConfigEntry, MelCloudDevice
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True, kw_only=True)
|
||||
@@ -112,67 +111,70 @@ ATW_ZONE_SENSORS: tuple[MelcloudSensorEntityDescription, ...] = (
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
_hass: HomeAssistant,
|
||||
hass: HomeAssistant,
|
||||
entry: MelCloudConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up MELCloud device sensors based on config_entry."""
|
||||
coordinators = entry.runtime_data
|
||||
mel_devices = entry.runtime_data
|
||||
|
||||
entities: list[MelDeviceSensor] = [
|
||||
MelDeviceSensor(coordinator, description)
|
||||
MelDeviceSensor(mel_device, description)
|
||||
for description in ATA_SENSORS
|
||||
for coordinator in coordinators.get(DEVICE_TYPE_ATA, [])
|
||||
if description.enabled(coordinator)
|
||||
for mel_device in mel_devices[DEVICE_TYPE_ATA]
|
||||
if description.enabled(mel_device)
|
||||
] + [
|
||||
MelDeviceSensor(coordinator, description)
|
||||
MelDeviceSensor(mel_device, description)
|
||||
for description in ATW_SENSORS
|
||||
for coordinator in coordinators.get(DEVICE_TYPE_ATW, [])
|
||||
if description.enabled(coordinator)
|
||||
for mel_device in mel_devices[DEVICE_TYPE_ATW]
|
||||
if description.enabled(mel_device)
|
||||
]
|
||||
entities.extend(
|
||||
[
|
||||
AtwZoneSensor(coordinator, zone, description)
|
||||
for coordinator in coordinators.get(DEVICE_TYPE_ATW, [])
|
||||
for zone in coordinator.device.zones
|
||||
AtwZoneSensor(mel_device, zone, description)
|
||||
for mel_device in mel_devices[DEVICE_TYPE_ATW]
|
||||
for zone in mel_device.device.zones
|
||||
for description in ATW_ZONE_SENSORS
|
||||
if description.enabled(zone)
|
||||
]
|
||||
)
|
||||
async_add_entities(entities)
|
||||
async_add_entities(entities, True)
|
||||
|
||||
|
||||
class MelDeviceSensor(MelCloudEntity, SensorEntity):
|
||||
class MelDeviceSensor(SensorEntity):
|
||||
"""Representation of a Sensor."""
|
||||
|
||||
entity_description: MelcloudSensorEntityDescription
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: MelCloudDeviceUpdateCoordinator,
|
||||
api: MelCloudDevice,
|
||||
description: MelcloudSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator)
|
||||
self._api = api
|
||||
self.entity_description = description
|
||||
|
||||
self._attr_unique_id = (
|
||||
f"{coordinator.device.serial}-{coordinator.device.mac}-{description.key}"
|
||||
)
|
||||
self._attr_device_info = coordinator.device_info
|
||||
self._attr_unique_id = f"{api.device.serial}-{api.device.mac}-{description.key}"
|
||||
self._attr_device_info = api.device_info
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | None:
|
||||
"""Return the state of the sensor."""
|
||||
return self.entity_description.value_fn(self.coordinator)
|
||||
return self.entity_description.value_fn(self._api)
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Retrieve latest state."""
|
||||
await self._api.async_update()
|
||||
|
||||
|
||||
class AtwZoneSensor(MelDeviceSensor):
|
||||
"""Air-to-Water zone sensor."""
|
||||
"""Air-to-Air device sensor."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: MelCloudDeviceUpdateCoordinator,
|
||||
api: MelCloudDevice,
|
||||
zone: Zone,
|
||||
description: MelcloudSensorEntityDescription,
|
||||
) -> None:
|
||||
@@ -182,9 +184,9 @@ class AtwZoneSensor(MelDeviceSensor):
|
||||
description,
|
||||
key=f"{description.key}-zone-{zone.zone_index}",
|
||||
)
|
||||
super().__init__(coordinator, description)
|
||||
super().__init__(api, description)
|
||||
|
||||
self._attr_device_info = coordinator.zone_device_info(zone)
|
||||
self._attr_device_info = api.zone_device_info(zone)
|
||||
self._zone = zone
|
||||
|
||||
@property
|
||||
|
||||
@@ -43,9 +43,6 @@
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"energy_consumed": {
|
||||
"name": "Energy consumed"
|
||||
},
|
||||
"flow_temperature": {
|
||||
"name": "Flow temperature"
|
||||
},
|
||||
|
||||
@@ -21,27 +21,27 @@ from homeassistant.const import UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import MelCloudConfigEntry, MelCloudDevice
|
||||
from .const import ATTR_STATUS
|
||||
from .coordinator import MelCloudConfigEntry, MelCloudDeviceUpdateCoordinator
|
||||
from .entity import MelCloudEntity
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
_hass: HomeAssistant,
|
||||
hass: HomeAssistant,
|
||||
entry: MelCloudConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up MelCloud device climate based on config_entry."""
|
||||
coordinators = entry.runtime_data
|
||||
mel_devices = entry.runtime_data
|
||||
async_add_entities(
|
||||
[
|
||||
AtwWaterHeater(coordinator, coordinator.device)
|
||||
for coordinator in coordinators.get(DEVICE_TYPE_ATW, [])
|
||||
]
|
||||
AtwWaterHeater(mel_device, mel_device.device)
|
||||
for mel_device in mel_devices[DEVICE_TYPE_ATW]
|
||||
],
|
||||
True,
|
||||
)
|
||||
|
||||
|
||||
class AtwWaterHeater(MelCloudEntity, WaterHeaterEntity):
|
||||
class AtwWaterHeater(WaterHeaterEntity):
|
||||
"""Air-to-Water water heater."""
|
||||
|
||||
_attr_supported_features = (
|
||||
@@ -49,26 +49,27 @@ class AtwWaterHeater(MelCloudEntity, WaterHeaterEntity):
|
||||
| WaterHeaterEntityFeature.ON_OFF
|
||||
| WaterHeaterEntityFeature.OPERATION_MODE
|
||||
)
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: MelCloudDeviceUpdateCoordinator,
|
||||
device: AtwDevice,
|
||||
) -> None:
|
||||
def __init__(self, api: MelCloudDevice, device: AtwDevice) -> None:
|
||||
"""Initialize water heater device."""
|
||||
super().__init__(coordinator)
|
||||
self._api = api
|
||||
self._device = device
|
||||
self._attr_unique_id = coordinator.device.serial
|
||||
self._attr_device_info = coordinator.device_info
|
||||
self._attr_unique_id = api.device.serial
|
||||
self._attr_device_info = api.device_info
|
||||
|
||||
async def async_turn_on(self, **_kwargs: Any) -> None:
|
||||
async def async_update(self) -> None:
|
||||
"""Update state from MELCloud."""
|
||||
await self._api.async_update()
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity on."""
|
||||
await self.coordinator.async_set({PROPERTY_POWER: True})
|
||||
await self._device.set({PROPERTY_POWER: True})
|
||||
|
||||
async def async_turn_off(self, **_kwargs: Any) -> None:
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity off."""
|
||||
await self.coordinator.async_set({PROPERTY_POWER: False})
|
||||
await self._device.set({PROPERTY_POWER: False})
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any] | None:
|
||||
@@ -102,7 +103,7 @@ class AtwWaterHeater(MelCloudEntity, WaterHeaterEntity):
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
await self.coordinator.async_set(
|
||||
await self._device.set(
|
||||
{
|
||||
PROPERTY_TARGET_TANK_TEMPERATURE: kwargs.get(
|
||||
"temperature", self.target_temperature
|
||||
@@ -112,7 +113,7 @@ class AtwWaterHeater(MelCloudEntity, WaterHeaterEntity):
|
||||
|
||||
async def async_set_operation_mode(self, operation_mode: str) -> None:
|
||||
"""Set new target operation mode."""
|
||||
await self.coordinator.async_set({PROPERTY_OPERATION_MODE: operation_mode})
|
||||
await self._device.set({PROPERTY_OPERATION_MODE: operation_mode})
|
||||
|
||||
@property
|
||||
def min_temp(self) -> float:
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from mycroftapi import MycroftAPI
|
||||
|
||||
@@ -11,8 +10,6 @@ from homeassistant.components.notify import BaseNotificationService
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -22,17 +19,17 @@ def get_service(
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> MycroftNotificationService:
|
||||
"""Get the Mycroft notification service."""
|
||||
return MycroftNotificationService(hass.data[DOMAIN])
|
||||
return MycroftNotificationService(hass.data["mycroft"])
|
||||
|
||||
|
||||
class MycroftNotificationService(BaseNotificationService):
|
||||
"""The Mycroft Notification Service."""
|
||||
|
||||
def __init__(self, mycroft_ip: str) -> None:
|
||||
def __init__(self, mycroft_ip):
|
||||
"""Initialize the service."""
|
||||
self.mycroft_ip = mycroft_ip
|
||||
|
||||
def send_message(self, message: str = "", **kwargs: Any) -> None:
|
||||
def send_message(self, message="", **kwargs):
|
||||
"""Send a message mycroft to speak on instance."""
|
||||
|
||||
text = message
|
||||
@@ -40,4 +37,4 @@ class MycroftNotificationService(BaseNotificationService):
|
||||
if mycroft is not None:
|
||||
mycroft.speak_text(text)
|
||||
else:
|
||||
_LOGGER.warning("Could not reach this instance of mycroft")
|
||||
_LOGGER.log("Could not reach this instance of mycroft")
|
||||
|
||||
@@ -3,25 +3,23 @@
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from aiohttp import ClientError, ClientSession
|
||||
import defusedxml.ElementTree as ET
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
||||
from homeassistant.const import CONF_DOMAIN, CONF_HOST, CONF_PASSWORD
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN, UPDATE_URL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DOMAIN = "namecheapdns"
|
||||
|
||||
INTERVAL = timedelta(minutes=5)
|
||||
|
||||
UPDATE_URL = "https://dynamicdns.park-your-domain.com/update"
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
@@ -36,74 +34,39 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
type NamecheapConfigEntry = ConfigEntry[None]
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Initialize the namecheap DNS component."""
|
||||
|
||||
if DOMAIN in config:
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_IMPORT}, data=config[DOMAIN]
|
||||
)
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: NamecheapConfigEntry) -> bool:
|
||||
"""Set up Namecheap DynamicDNS from a config entry."""
|
||||
host = entry.data[CONF_HOST]
|
||||
domain = entry.data[CONF_DOMAIN]
|
||||
password = entry.data[CONF_PASSWORD]
|
||||
host = config[DOMAIN][CONF_HOST]
|
||||
domain = config[DOMAIN][CONF_DOMAIN]
|
||||
password = config[DOMAIN][CONF_PASSWORD]
|
||||
|
||||
session = async_get_clientsession(hass)
|
||||
|
||||
try:
|
||||
if not await update_namecheapdns(session, host, domain, password):
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_failed",
|
||||
translation_placeholders={
|
||||
CONF_DOMAIN: f"{entry.data[CONF_HOST]}.{entry.data[CONF_DOMAIN]}"
|
||||
},
|
||||
)
|
||||
except ClientError as e:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="connection_error",
|
||||
translation_placeholders={
|
||||
CONF_DOMAIN: f"{entry.data[CONF_HOST]}.{entry.data[CONF_DOMAIN]}"
|
||||
},
|
||||
) from e
|
||||
result = await _update_namecheapdns(session, host, domain, password)
|
||||
|
||||
if not result:
|
||||
return False
|
||||
|
||||
async def update_domain_interval(now):
|
||||
"""Update the namecheap DNS entry."""
|
||||
await update_namecheapdns(session, host, domain, password)
|
||||
await _update_namecheapdns(session, host, domain, password)
|
||||
|
||||
entry.async_on_unload(
|
||||
async_track_time_interval(hass, update_domain_interval, INTERVAL)
|
||||
)
|
||||
async_track_time_interval(hass, update_domain_interval, INTERVAL)
|
||||
|
||||
return True
|
||||
return result
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: NamecheapConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return True
|
||||
|
||||
|
||||
async def update_namecheapdns(
|
||||
session: ClientSession, host: str, domain: str, password: str
|
||||
):
|
||||
async def _update_namecheapdns(session, host, domain, password):
|
||||
"""Update namecheap DNS entry."""
|
||||
params = {"host": host, "domain": domain, "password": password}
|
||||
|
||||
resp = await session.get(UPDATE_URL, params=params)
|
||||
xml_string = await resp.text()
|
||||
root = ET.fromstring(xml_string)
|
||||
err_count = root.find("ErrCount").text
|
||||
|
||||
if "<ErrCount>0</ErrCount>" not in xml_string:
|
||||
if int(err_count) != 0:
|
||||
_LOGGER.warning("Updating namecheap domain failed: %s", domain)
|
||||
return False
|
||||
|
||||
|
||||
@@ -1,91 +0,0 @@
|
||||
"""Config flow for the Namecheap DynamicDNS integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import ClientError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_DOMAIN, CONF_HOST, CONF_PASSWORD
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import (
|
||||
TextSelector,
|
||||
TextSelectorConfig,
|
||||
TextSelectorType,
|
||||
)
|
||||
|
||||
from . import update_namecheapdns
|
||||
from .const import DOMAIN
|
||||
from .issue import deprecate_yaml_issue
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST, default="@"): cv.string,
|
||||
vol.Required(CONF_DOMAIN): cv.string,
|
||||
vol.Required(CONF_PASSWORD): TextSelector(
|
||||
TextSelectorConfig(
|
||||
type=TextSelectorType.PASSWORD, autocomplete="current-password"
|
||||
)
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class NamecheapDnsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Namecheap DynamicDNS."""
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
self._async_abort_entries_match(
|
||||
{CONF_HOST: user_input[CONF_HOST], CONF_DOMAIN: user_input[CONF_DOMAIN]}
|
||||
)
|
||||
session = async_get_clientsession(self.hass)
|
||||
try:
|
||||
if not await update_namecheapdns(session, **user_input):
|
||||
errors["base"] = "update_failed"
|
||||
except ClientError:
|
||||
_LOGGER.debug("Cannot connect", exc_info=True)
|
||||
errors["base"] = "cannot_connect"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
|
||||
if not errors:
|
||||
return self.async_create_entry(
|
||||
title=f"{user_input[CONF_HOST]}.{user_input[CONF_DOMAIN]}",
|
||||
data=user_input,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
data_schema=STEP_USER_DATA_SCHEMA, suggested_values=user_input
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders={"account_panel": "https://ap.www.namecheap.com/"},
|
||||
)
|
||||
|
||||
async def async_step_import(self, import_info: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Import config from yaml."""
|
||||
|
||||
self._async_abort_entries_match(
|
||||
{CONF_HOST: import_info[CONF_HOST], CONF_DOMAIN: import_info[CONF_DOMAIN]}
|
||||
)
|
||||
result = await self.async_step_user(import_info)
|
||||
if errors := result.get("errors"):
|
||||
deprecate_yaml_issue(self.hass, import_success=False)
|
||||
return self.async_abort(reason=errors["base"])
|
||||
|
||||
deprecate_yaml_issue(self.hass, import_success=True)
|
||||
return result
|
||||
@@ -1,6 +0,0 @@
|
||||
"""Constants for the Namecheap DynamicDNS integration."""
|
||||
|
||||
DOMAIN = "namecheapdns"
|
||||
|
||||
|
||||
UPDATE_URL = "https://dynamicdns.park-your-domain.com/update"
|
||||
@@ -1,40 +0,0 @@
|
||||
"""Issues for Namecheap DynamicDNS integration."""
|
||||
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
@callback
|
||||
def deprecate_yaml_issue(hass: HomeAssistant, *, import_success: bool) -> None:
|
||||
"""Deprecate yaml issue."""
|
||||
if import_success:
|
||||
async_create_issue(
|
||||
hass,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_yaml_{DOMAIN}",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
breaks_in_ha_version="2026.8.0",
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_yaml",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "Namecheap DynamicDNS",
|
||||
},
|
||||
)
|
||||
else:
|
||||
async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"deprecated_yaml_import_issue_error",
|
||||
breaks_in_ha_version="2026.8.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_yaml_import_issue_error",
|
||||
translation_placeholders={
|
||||
"url": f"/config/integrations/dashboard/add?domain={DOMAIN}"
|
||||
},
|
||||
)
|
||||
@@ -1,10 +1,9 @@
|
||||
{
|
||||
"domain": "namecheapdns",
|
||||
"name": "Namecheap DynamicDNS",
|
||||
"codeowners": ["@tr4nt0r"],
|
||||
"config_flow": true,
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/namecheapdns",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_push",
|
||||
"requirements": []
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["defusedxml==0.7.1"]
|
||||
}
|
||||
|
||||
@@ -1,41 +0,0 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]",
|
||||
"update_failed": "Updating DNS failed"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"domain": "[%key:common::config_flow::data::username%]",
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"password": "Dynamic DNS password"
|
||||
},
|
||||
"data_description": {
|
||||
"domain": "The domain to update ('example.com')",
|
||||
"host": "The host to update ('home' for home.example.com). Use '@' to update the root domain",
|
||||
"password": "Dynamic DNS password for the domain"
|
||||
},
|
||||
"description": "Enter your Namecheap DynamicDNS domain and password below to configure dynamic DNS updates. You can find the Dynamic DNS password in your [Namecheap account]({account_panel}) under Domain List > Manage > Advanced DNS > Dynamic DNS."
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"connection_error": {
|
||||
"message": "Updating Namecheap DynamicDNS domain {domain} failed due to a connection error"
|
||||
},
|
||||
"update_failed": {
|
||||
"message": "Updating Namecheap DynamicDNS domain {domain} failed"
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_yaml_import_issue_error": {
|
||||
"description": "Configuring Namecheap DynamicDNS using YAML is being removed but there was an error when trying to import the YAML configuration.\n\nEnsure the YAML configuration is correct and restart Home Assistant to try again or remove the Namecheap DynamicDNS YAML configuration from your `configuration.yaml` file and continue to [set up the integration]({url}) manually.",
|
||||
"title": "The Namecheap DynamicDNS YAML configuration import failed"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -21,7 +21,6 @@ from .nasweb_data import NASwebData
|
||||
|
||||
PLATFORMS: list[Platform] = [
|
||||
Platform.ALARM_CONTROL_PANEL,
|
||||
Platform.CLIMATE,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
]
|
||||
|
||||
@@ -1,168 +0,0 @@
|
||||
"""Platform for NASweb thermostat."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import time
|
||||
from typing import Any
|
||||
|
||||
from webio_api import Thermostat as NASwebThermostat
|
||||
from webio_api.const import KEY_THERMOSTAT
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACAction,
|
||||
HVACMode,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.components.sensor import SensorDeviceClass
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import DiscoveryInfoType
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
BaseCoordinatorEntity,
|
||||
BaseDataUpdateCoordinatorProtocol,
|
||||
)
|
||||
|
||||
from . import NASwebConfigEntry
|
||||
from .const import DOMAIN, STATUS_UPDATE_MAX_TIME_INTERVAL
|
||||
|
||||
CLIMATE_TRANSLATION_KEY = "thermostat"
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config: NASwebConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up Climate platform."""
|
||||
coordinator = config.runtime_data
|
||||
nasweb_thermostat: NASwebThermostat = coordinator.data[KEY_THERMOSTAT]
|
||||
climate = Thermostat(coordinator, nasweb_thermostat)
|
||||
async_add_entities([climate])
|
||||
|
||||
|
||||
class Thermostat(ClimateEntity, BaseCoordinatorEntity):
|
||||
"""Entity representing NASweb thermostat."""
|
||||
|
||||
_attr_device_class = SensorDeviceClass.TEMPERATURE
|
||||
_attr_has_entity_name = True
|
||||
_attr_hvac_modes = [
|
||||
HVACMode.OFF,
|
||||
HVACMode.HEAT,
|
||||
HVACMode.COOL,
|
||||
HVACMode.HEAT_COOL,
|
||||
HVACMode.FAN_ONLY,
|
||||
]
|
||||
_attr_max_temp = 50
|
||||
_attr_min_temp = -50
|
||||
_attr_precision = 1.0
|
||||
_attr_should_poll = False
|
||||
_attr_supported_features = ClimateEntityFeature(
|
||||
ClimateEntityFeature.TARGET_TEMPERATURE_RANGE
|
||||
)
|
||||
_attr_target_temperature_step = 1.0
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_translation_key = CLIMATE_TRANSLATION_KEY
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: BaseDataUpdateCoordinatorProtocol,
|
||||
nasweb_thermostat: NASwebThermostat,
|
||||
) -> None:
|
||||
"""Initialize Thermostat."""
|
||||
super().__init__(coordinator)
|
||||
self._thermostat = nasweb_thermostat
|
||||
self._attr_available = False
|
||||
self._attr_name = nasweb_thermostat.name
|
||||
self._attr_unique_id = f"{DOMAIN}.{self._thermostat.webio_serial}.thermostat"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, self._thermostat.webio_serial)}
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""When entity is added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
self._handle_coordinator_update()
|
||||
|
||||
def _set_attr_available(
|
||||
self, entity_last_update: float, available: bool | None
|
||||
) -> None:
|
||||
if (
|
||||
self.coordinator.last_update is None
|
||||
or time.time() - entity_last_update >= STATUS_UPDATE_MAX_TIME_INTERVAL
|
||||
):
|
||||
self._attr_available = False
|
||||
else:
|
||||
self._attr_available = available if available is not None else False
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self._attr_current_temperature = self._thermostat.current_temp
|
||||
self._attr_target_temperature_low = self._thermostat.temp_target_min
|
||||
self._attr_target_temperature_high = self._thermostat.temp_target_max
|
||||
self._attr_hvac_mode = self._get_current_hvac_mode()
|
||||
self._attr_hvac_action = self._get_current_action()
|
||||
self._attr_name = self._thermostat.name if self._thermostat.name else None
|
||||
self._set_attr_available(
|
||||
self._thermostat.last_update, self._thermostat.available
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
|
||||
def _get_current_hvac_mode(self) -> HVACMode:
|
||||
have_cooling = self._thermostat.enabled_above_output
|
||||
have_heating = self._thermostat.enabled_below_output
|
||||
if have_cooling and have_heating:
|
||||
return HVACMode.HEAT_COOL
|
||||
if have_cooling:
|
||||
return HVACMode.COOL
|
||||
if have_heating:
|
||||
return HVACMode.HEAT
|
||||
if self._thermostat.enabled_inrange_output:
|
||||
return HVACMode.FAN_ONLY
|
||||
return HVACMode.OFF
|
||||
|
||||
def _get_current_action(self) -> HVACAction:
|
||||
if self._thermostat.current_temp is None:
|
||||
return HVACAction.OFF
|
||||
if (
|
||||
self._thermostat.temp_target_min is not None
|
||||
and self._thermostat.current_temp < self._thermostat.temp_target_min
|
||||
and self._thermostat.enabled_below_output
|
||||
):
|
||||
return HVACAction.HEATING
|
||||
if (
|
||||
self._thermostat.temp_target_max is not None
|
||||
and self._thermostat.current_temp > self._thermostat.temp_target_max
|
||||
and self._thermostat.enabled_above_output
|
||||
):
|
||||
return HVACAction.COOLING
|
||||
if (
|
||||
self._thermostat.temp_target_min is not None
|
||||
and self._thermostat.temp_target_max is not None
|
||||
and self._thermostat.current_temp >= self._thermostat.temp_target_min
|
||||
and self._thermostat.current_temp <= self._thermostat.temp_target_max
|
||||
and self._thermostat.enabled_inrange_output
|
||||
):
|
||||
return HVACAction.FAN
|
||||
return HVACAction.IDLE
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the entity.
|
||||
|
||||
Only used by the generic entity update service.
|
||||
Scheduling updates is not necessary, the coordinator takes care of updates via push notifications.
|
||||
"""
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set HVACMode for Thermostat."""
|
||||
await self._thermostat.set_hvac_mode(hvac_mode)
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set temperature range for Thermostat."""
|
||||
await self._thermostat.set_temperature(
|
||||
kwargs["target_temp_low"], kwargs["target_temp_high"]
|
||||
)
|
||||
@@ -23,7 +23,6 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
KEY_INPUTS = "inputs"
|
||||
KEY_OUTPUTS = "outputs"
|
||||
KEY_THERMOSTAT = "thermostat"
|
||||
KEY_ZONES = "zones"
|
||||
|
||||
|
||||
@@ -105,7 +104,6 @@ class NASwebCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
KEY_OUTPUTS: self.webio_api.outputs,
|
||||
KEY_INPUTS: self.webio_api.inputs,
|
||||
KEY_TEMP_SENSOR: self.webio_api.temp_sensor,
|
||||
KEY_THERMOSTAT: self.webio_api.thermostat,
|
||||
KEY_ZONES: self.webio_api.zones,
|
||||
}
|
||||
self.async_set_updated_data(data)
|
||||
@@ -201,7 +199,6 @@ class NASwebCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
KEY_OUTPUTS: self.webio_api.outputs,
|
||||
KEY_INPUTS: self.webio_api.inputs,
|
||||
KEY_TEMP_SENSOR: self.webio_api.temp_sensor,
|
||||
KEY_THERMOSTAT: self.webio_api.thermostat,
|
||||
KEY_ZONES: self.webio_api.zones,
|
||||
}
|
||||
self.async_set_updated_data(new_data)
|
||||
|
||||
@@ -29,11 +29,6 @@
|
||||
"name": "Zone {index}"
|
||||
}
|
||||
},
|
||||
"climate": {
|
||||
"thermostat": {
|
||||
"name": "[%key:component::climate::entity_component::_::name%]"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"sensor_input": {
|
||||
"name": "Input {index}",
|
||||
|
||||
@@ -4,35 +4,27 @@ from __future__ import annotations
|
||||
|
||||
from openevsehttp.__main__ import OpenEVSE
|
||||
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.exceptions import ConfigEntryError
|
||||
|
||||
from .coordinator import OpenEVSEConfigEntry, OpenEVSEDataUpdateCoordinator
|
||||
type OpenEVSEConfigEntry = ConfigEntry[OpenEVSE]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: OpenEVSEConfigEntry) -> bool:
|
||||
"""Set up OpenEVSE from a config entry."""
|
||||
charger = OpenEVSE(
|
||||
entry.data[CONF_HOST],
|
||||
entry.data.get(CONF_USERNAME),
|
||||
entry.data.get(CONF_PASSWORD),
|
||||
)
|
||||
"""Set up openevse from a config entry."""
|
||||
|
||||
entry.runtime_data = OpenEVSE(entry.data[CONF_HOST])
|
||||
try:
|
||||
await charger.test_and_get()
|
||||
await entry.runtime_data.test_and_get()
|
||||
except TimeoutError as ex:
|
||||
raise ConfigEntryNotReady("Unable to connect to charger") from ex
|
||||
|
||||
coordinator = OpenEVSEDataUpdateCoordinator(hass, entry, charger)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
raise ConfigEntryError("Unable to connect to charger") from ex
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, [Platform.SENSOR])
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: OpenEVSEConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, [Platform.SENSOR])
|
||||
|
||||
@@ -3,22 +3,14 @@
|
||||
from typing import Any
|
||||
|
||||
from openevsehttp.__main__ import OpenEVSE
|
||||
from openevsehttp.exceptions import AuthenticationError, MissingSerial
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.const import CONF_HOST, CONF_NAME
|
||||
from homeassistant.helpers.service_info import zeroconf
|
||||
|
||||
from .const import CONF_ID, CONF_SERIAL, DOMAIN
|
||||
|
||||
USER_SCHEMA = vol.Schema({vol.Required(CONF_HOST): cv.string})
|
||||
|
||||
AUTH_SCHEMA = vol.Schema(
|
||||
{vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string}
|
||||
)
|
||||
|
||||
|
||||
class OpenEVSEConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""OpenEVSE config flow."""
|
||||
@@ -29,49 +21,39 @@ class OpenEVSEConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
def __init__(self) -> None:
|
||||
"""Set up the instance."""
|
||||
self.discovery_info: dict[str, Any] = {}
|
||||
self._host: str | None = None
|
||||
|
||||
async def check_status(
|
||||
self, host: str, user: str | None = None, password: str | None = None
|
||||
) -> tuple[dict[str, str], str | None]:
|
||||
async def check_status(self, host: str) -> tuple[bool, str | None]:
|
||||
"""Check if we can connect to the OpenEVSE charger."""
|
||||
|
||||
charger = OpenEVSE(host, user, password)
|
||||
charger = OpenEVSE(host)
|
||||
try:
|
||||
result = await charger.test_and_get()
|
||||
except TimeoutError:
|
||||
return {"base": "cannot_connect"}, None
|
||||
except AuthenticationError:
|
||||
return {"base": "invalid_auth"}, None
|
||||
except MissingSerial:
|
||||
return {}, None
|
||||
return {}, result.get(CONF_SERIAL)
|
||||
return False, None
|
||||
return True, result.get(CONF_SERIAL)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
errors = {}
|
||||
if user_input is not None:
|
||||
self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]})
|
||||
errors, serial = await self.check_status(user_input[CONF_HOST])
|
||||
|
||||
if not errors:
|
||||
if serial is not None:
|
||||
if (result := await self.check_status(user_input[CONF_HOST]))[0]:
|
||||
if (serial := result[1]) is not None:
|
||||
await self.async_set_unique_id(serial, raise_on_progress=False)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=f"OpenEVSE {user_input[CONF_HOST]}",
|
||||
data=user_input,
|
||||
)
|
||||
if errors["base"] == "invalid_auth":
|
||||
self._host = user_input[CONF_HOST]
|
||||
return await self.async_step_auth()
|
||||
errors = {CONF_HOST: "cannot_connect"}
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=self.add_suggested_values_to_schema(USER_SCHEMA, user_input),
|
||||
data_schema=vol.Schema({vol.Required(CONF_HOST): str}),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
@@ -79,10 +61,9 @@ class OpenEVSEConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle the initial step."""
|
||||
|
||||
self._async_abort_entries_match({CONF_HOST: data[CONF_HOST]})
|
||||
errors, serial = await self.check_status(data[CONF_HOST])
|
||||
|
||||
if not errors:
|
||||
if serial is not None:
|
||||
if (result := await self.check_status(data[CONF_HOST]))[0]:
|
||||
if (serial := result[1]) is not None:
|
||||
await self.async_set_unique_id(serial)
|
||||
self._abort_if_unique_id_configured()
|
||||
else:
|
||||
@@ -111,20 +92,17 @@ class OpenEVSEConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
}
|
||||
)
|
||||
self.context.update({"title_placeholders": {"name": name}})
|
||||
|
||||
if not (await self.check_status(host))[0]:
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
|
||||
return await self.async_step_discovery_confirm()
|
||||
|
||||
async def async_step_discovery_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm discovery."""
|
||||
errors, _ = await self.check_status(self.discovery_info[CONF_HOST])
|
||||
if errors:
|
||||
if errors["base"] == "invalid_auth":
|
||||
return await self.async_step_auth()
|
||||
return self.async_abort(reason="unavailable_host")
|
||||
|
||||
if user_input is None:
|
||||
self._set_confirm_only()
|
||||
return self.async_show_form(
|
||||
step_id="discovery_confirm",
|
||||
description_placeholders={"name": self.discovery_info[CONF_NAME]},
|
||||
@@ -134,36 +112,3 @@ class OpenEVSEConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title=self.discovery_info[CONF_NAME],
|
||||
data={CONF_HOST: self.discovery_info[CONF_HOST]},
|
||||
)
|
||||
|
||||
async def async_step_auth(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the authentication step."""
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
host = self._host or self.discovery_info[CONF_HOST]
|
||||
errors, serial = await self.check_status(
|
||||
host,
|
||||
user_input[CONF_USERNAME],
|
||||
user_input[CONF_PASSWORD],
|
||||
)
|
||||
|
||||
if not errors:
|
||||
if self.unique_id is None and serial is not None:
|
||||
await self.async_set_unique_id(serial)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=f"OpenEVSE {host}",
|
||||
data={
|
||||
CONF_HOST: host,
|
||||
CONF_USERNAME: user_input[CONF_USERNAME],
|
||||
CONF_PASSWORD: user_input[CONF_PASSWORD],
|
||||
},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="auth",
|
||||
data_schema=self.add_suggested_values_to_schema(AUTH_SCHEMA, user_input),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
"""Data update coordinator for OpenEVSE."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from openevsehttp.__main__ import OpenEVSE
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
type OpenEVSEConfigEntry = ConfigEntry[OpenEVSEDataUpdateCoordinator]
|
||||
|
||||
|
||||
class OpenEVSEDataUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
"""Class to manage fetching OpenEVSE data."""
|
||||
|
||||
config_entry: OpenEVSEConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: OpenEVSEConfigEntry,
|
||||
charger: OpenEVSE,
|
||||
) -> None:
|
||||
"""Initialize coordinator."""
|
||||
self.charger = charger
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=DOMAIN,
|
||||
update_interval=SCAN_INTERVAL,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Fetch data from OpenEVSE charger."""
|
||||
try:
|
||||
await self.charger.update()
|
||||
except TimeoutError as error:
|
||||
raise UpdateFailed(
|
||||
f"Timeout communicating with charger: {error}"
|
||||
) from error
|
||||
@@ -2,8 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
|
||||
from openevsehttp.__main__ import OpenEVSE
|
||||
@@ -35,82 +33,61 @@ from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, StateType
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import ConfigEntry
|
||||
from .const import DOMAIN, INTEGRATION_TITLE
|
||||
from .coordinator import OpenEVSEConfigEntry, OpenEVSEDataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class OpenEVSESensorDescription(SensorEntityDescription):
|
||||
"""Describes an OpenEVSE sensor entity."""
|
||||
|
||||
value_fn: Callable[[OpenEVSE], str | float | None]
|
||||
|
||||
|
||||
SENSOR_TYPES: tuple[OpenEVSESensorDescription, ...] = (
|
||||
OpenEVSESensorDescription(
|
||||
SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
|
||||
SensorEntityDescription(
|
||||
key="status",
|
||||
translation_key="status",
|
||||
value_fn=lambda ev: ev.status,
|
||||
),
|
||||
OpenEVSESensorDescription(
|
||||
SensorEntityDescription(
|
||||
key="charge_time",
|
||||
translation_key="charge_time",
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
suggested_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda ev: ev.charge_time_elapsed,
|
||||
),
|
||||
OpenEVSESensorDescription(
|
||||
SensorEntityDescription(
|
||||
key="ambient_temp",
|
||||
translation_key="ambient_temp",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda ev: ev.ambient_temperature,
|
||||
),
|
||||
OpenEVSESensorDescription(
|
||||
SensorEntityDescription(
|
||||
key="ir_temp",
|
||||
translation_key="ir_temp",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda ev: ev.ir_temperature,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
OpenEVSESensorDescription(
|
||||
SensorEntityDescription(
|
||||
key="rtc_temp",
|
||||
translation_key="rtc_temp",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda ev: ev.rtc_temperature,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
OpenEVSESensorDescription(
|
||||
SensorEntityDescription(
|
||||
key="usage_session",
|
||||
translation_key="usage_session",
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
value_fn=lambda ev: ev.usage_session,
|
||||
),
|
||||
OpenEVSESensorDescription(
|
||||
SensorEntityDescription(
|
||||
key="usage_total",
|
||||
translation_key="usage_total",
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
value_fn=lambda ev: ev.usage_total,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -177,34 +154,41 @@ async def async_setup_platform(
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: OpenEVSEConfigEntry,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up OpenEVSE sensors based on config entry."""
|
||||
coordinator = entry.runtime_data
|
||||
identifier = entry.unique_id or entry.entry_id
|
||||
"""Add sensors for passed config_entry in HA."""
|
||||
async_add_entities(
|
||||
OpenEVSESensor(coordinator, description, identifier, entry.unique_id)
|
||||
for description in SENSOR_TYPES
|
||||
(
|
||||
OpenEVSESensor(
|
||||
config_entry.runtime_data,
|
||||
description,
|
||||
config_entry.entry_id,
|
||||
config_entry.unique_id,
|
||||
)
|
||||
for description in SENSOR_TYPES
|
||||
),
|
||||
True,
|
||||
)
|
||||
|
||||
|
||||
class OpenEVSESensor(CoordinatorEntity[OpenEVSEDataUpdateCoordinator], SensorEntity):
|
||||
class OpenEVSESensor(SensorEntity):
|
||||
"""Implementation of an OpenEVSE sensor."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
entity_description: OpenEVSESensorDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: OpenEVSEDataUpdateCoordinator,
|
||||
description: OpenEVSESensorDescription,
|
||||
identifier: str,
|
||||
charger: OpenEVSE,
|
||||
description: SensorEntityDescription,
|
||||
entry_id: str,
|
||||
unique_id: str | None,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
self.charger = charger
|
||||
|
||||
identifier = unique_id or entry_id
|
||||
self._attr_unique_id = f"{identifier}-{description.key}"
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
@@ -217,7 +201,28 @@ class OpenEVSESensor(CoordinatorEntity[OpenEVSEDataUpdateCoordinator], SensorEnt
|
||||
}
|
||||
self._attr_device_info[ATTR_SERIAL_NUMBER] = unique_id
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the sensor."""
|
||||
return self.entity_description.value_fn(self.coordinator.charger)
|
||||
async def async_update(self) -> None:
|
||||
"""Get the monitored data from the charger."""
|
||||
try:
|
||||
await self.charger.update()
|
||||
except TimeoutError:
|
||||
_LOGGER.warning("Could not update status for %s", self.name)
|
||||
return
|
||||
|
||||
sensor_type = self.entity_description.key
|
||||
if sensor_type == "status":
|
||||
self._attr_native_value = self.charger.status
|
||||
elif sensor_type == "charge_time":
|
||||
self._attr_native_value = self.charger.charge_time_elapsed / 60
|
||||
elif sensor_type == "ambient_temp":
|
||||
self._attr_native_value = self.charger.ambient_temperature
|
||||
elif sensor_type == "ir_temp":
|
||||
self._attr_native_value = self.charger.ir_temperature
|
||||
elif sensor_type == "rtc_temp":
|
||||
self._attr_native_value = self.charger.rtc_temperature
|
||||
elif sensor_type == "usage_session":
|
||||
self._attr_native_value = float(self.charger.usage_session) / 1000
|
||||
elif sensor_type == "usage_total":
|
||||
self._attr_native_value = float(self.charger.usage_total) / 1000
|
||||
else:
|
||||
self._attr_native_value = "Unknown"
|
||||
|
||||
@@ -5,20 +5,9 @@
|
||||
"unavailable_host": "Unable to connect to host"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]"
|
||||
"cannot_connect": "Unable to connect"
|
||||
},
|
||||
"step": {
|
||||
"auth": {
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "The password to access your OpenEVSE charger",
|
||||
"username": "The username to access your OpenEVSE charger"
|
||||
}
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]"
|
||||
|
||||
@@ -6,7 +6,6 @@ import base64
|
||||
from http import HTTPStatus
|
||||
import logging
|
||||
import mimetypes
|
||||
from typing import Any
|
||||
|
||||
import requests
|
||||
from requests.auth import HTTPBasicAuth
|
||||
@@ -66,23 +65,26 @@ def get_service(
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> PushsaferNotificationService:
|
||||
"""Get the Pushsafer.com notification service."""
|
||||
return PushsaferNotificationService(config[CONF_DEVICE_KEY])
|
||||
return PushsaferNotificationService(
|
||||
config.get(CONF_DEVICE_KEY), hass.config.is_allowed_path
|
||||
)
|
||||
|
||||
|
||||
class PushsaferNotificationService(BaseNotificationService):
|
||||
"""Implementation of the notification service for Pushsafer.com."""
|
||||
|
||||
def __init__(self, private_key: str) -> None:
|
||||
def __init__(self, private_key, is_allowed_path):
|
||||
"""Initialize the service."""
|
||||
self._private_key = private_key
|
||||
self.is_allowed_path = is_allowed_path
|
||||
|
||||
def send_message(self, message: str = "", **kwargs: Any) -> None:
|
||||
def send_message(self, message="", **kwargs):
|
||||
"""Send a message to specified target."""
|
||||
targets: list[str] | None
|
||||
if (targets := kwargs.get(ATTR_TARGET)) is None:
|
||||
if kwargs.get(ATTR_TARGET) is None:
|
||||
targets = ["a"]
|
||||
_LOGGER.debug("No target specified. Sending push to all")
|
||||
else:
|
||||
targets = kwargs.get(ATTR_TARGET)
|
||||
_LOGGER.debug("%s target(s) specified", len(targets))
|
||||
|
||||
title = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT)
|
||||
@@ -168,7 +170,7 @@ class PushsaferNotificationService(BaseNotificationService):
|
||||
try:
|
||||
if local_path is not None:
|
||||
_LOGGER.debug("Loading image from local path")
|
||||
if self.hass.config.is_allowed_path(local_path):
|
||||
if self.is_allowed_path(local_path):
|
||||
file_mimetype = mimetypes.guess_type(local_path)
|
||||
_LOGGER.debug("Detected mimetype %s", file_mimetype)
|
||||
with open(local_path, "rb") as binary_file:
|
||||
|
||||
@@ -21,5 +21,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/qingping",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["qingping-ble==1.1.0"]
|
||||
"requirements": ["qingping-ble==1.0.1"]
|
||||
}
|
||||
|
||||
@@ -33,7 +33,7 @@ from .const import (
|
||||
from .coordinator import TibberDataAPICoordinator
|
||||
from .services import async_setup_services
|
||||
|
||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.NOTIFY, Platform.SENSOR]
|
||||
PLATFORMS = [Platform.NOTIFY, Platform.SENSOR]
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
|
||||
@@ -1,123 +0,0 @@
|
||||
"""Support for Tibber binary sensors."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
|
||||
import tibber
|
||||
from tibber.data_api import TibberDevice
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
BinarySensorEntityDescription,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN, TibberConfigEntry
|
||||
from .coordinator import TibberDataAPICoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class TibberBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||
"""Describes Tibber binary sensor entity."""
|
||||
|
||||
is_on_fn: Callable[[str], bool | None]
|
||||
|
||||
|
||||
DATA_API_BINARY_SENSORS: tuple[TibberBinarySensorEntityDescription, ...] = (
|
||||
TibberBinarySensorEntityDescription(
|
||||
key="connector.status",
|
||||
device_class=BinarySensorDeviceClass.PLUG,
|
||||
is_on_fn={"connected": True, "disconnected": False}.get,
|
||||
),
|
||||
TibberBinarySensorEntityDescription(
|
||||
key="charging.status",
|
||||
device_class=BinarySensorDeviceClass.BATTERY_CHARGING,
|
||||
is_on_fn={"charging": True, "idle": False}.get,
|
||||
),
|
||||
TibberBinarySensorEntityDescription(
|
||||
key="onOff",
|
||||
device_class=BinarySensorDeviceClass.POWER,
|
||||
is_on_fn={"on": True, "off": False}.get,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: TibberConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Tibber binary sensors."""
|
||||
coordinator = entry.runtime_data.data_api_coordinator
|
||||
assert coordinator is not None
|
||||
|
||||
entities: list[TibberDataAPIBinarySensor] = []
|
||||
api_binary_sensors = {sensor.key: sensor for sensor in DATA_API_BINARY_SENSORS}
|
||||
|
||||
for device in coordinator.data.values():
|
||||
for sensor in device.sensors:
|
||||
description: TibberBinarySensorEntityDescription | None = (
|
||||
api_binary_sensors.get(sensor.id)
|
||||
)
|
||||
if description is None:
|
||||
continue
|
||||
entities.append(TibberDataAPIBinarySensor(coordinator, device, description))
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class TibberDataAPIBinarySensor(
|
||||
CoordinatorEntity[TibberDataAPICoordinator], BinarySensorEntity
|
||||
):
|
||||
"""Representation of a Tibber Data API binary sensor."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
entity_description: TibberBinarySensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: TibberDataAPICoordinator,
|
||||
device: TibberDevice,
|
||||
entity_description: TibberBinarySensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the binary sensor."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
self._device_id: str = device.id
|
||||
self.entity_description = entity_description
|
||||
|
||||
self._attr_unique_id = f"{device.id}_{entity_description.key}"
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, device.external_id)},
|
||||
name=device.name,
|
||||
manufacturer=device.brand,
|
||||
model=device.model,
|
||||
)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return (
|
||||
super().available and self._device_id in self.coordinator.sensors_by_device
|
||||
)
|
||||
|
||||
@property
|
||||
def device(self) -> dict[str, tibber.data_api.Sensor]:
|
||||
"""Return the device sensors."""
|
||||
return self.coordinator.sensors_by_device[self._device_id]
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return the state of the binary sensor."""
|
||||
return self.entity_description.is_on_fn(
|
||||
str(self.device[self.entity_description.key].value)
|
||||
)
|
||||
@@ -34,7 +34,7 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.exceptions import PlatformNotReady
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
@@ -351,6 +351,7 @@ async def _async_setup_graphql_sensors(
|
||||
tibber_connection = entry.runtime_data.tibber_connection
|
||||
|
||||
entity_registry = er.async_get(hass)
|
||||
device_registry = dr.async_get(hass)
|
||||
|
||||
coordinator: TibberDataCoordinator | None = None
|
||||
entities: list[TibberSensor] = []
|
||||
@@ -390,6 +391,25 @@ async def _async_setup_graphql_sensors(
|
||||
).async_set_updated_data
|
||||
)
|
||||
|
||||
# migrate
|
||||
old_id = home.info["viewer"]["home"]["meteringPointData"]["consumptionEan"]
|
||||
if old_id is None:
|
||||
continue
|
||||
|
||||
# migrate to new device ids
|
||||
old_entity_id = entity_registry.async_get_entity_id("sensor", DOMAIN, old_id)
|
||||
if old_entity_id is not None:
|
||||
entity_registry.async_update_entity(
|
||||
old_entity_id, new_unique_id=home.home_id
|
||||
)
|
||||
|
||||
# migrate to new device ids
|
||||
device_entry = device_registry.async_get_device(identifiers={(DOMAIN, old_id)})
|
||||
if device_entry and entry.entry_id in device_entry.config_entries:
|
||||
device_registry.async_update_device(
|
||||
device_entry.id, new_identifiers={(DOMAIN, home.home_id)}
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
@@ -410,6 +430,9 @@ def _setup_data_api_sensors(
|
||||
for sensor in device.sensors:
|
||||
description: SensorEntityDescription | None = api_sensors.get(sensor.id)
|
||||
if description is None:
|
||||
_LOGGER.debug(
|
||||
"Sensor %s not found in DATA_API_SENSORS, skipping", sensor
|
||||
)
|
||||
continue
|
||||
entities.append(TibberDataAPISensor(coordinator, device, description))
|
||||
async_add_entities(entities)
|
||||
|
||||
@@ -38,7 +38,6 @@ from .models import (
|
||||
DPCodeEnumWrapper,
|
||||
DPCodeIntegerWrapper,
|
||||
)
|
||||
from .type_information import EnumTypeInformation
|
||||
|
||||
TUYA_HVAC_TO_HA = {
|
||||
"auto": HVACMode.HEAT_COOL,
|
||||
@@ -140,58 +139,6 @@ class _SwingModeWrapper(DeviceWrapper):
|
||||
return commands
|
||||
|
||||
|
||||
class _HvacModeWrapper(DPCodeEnumWrapper):
|
||||
"""Wrapper for managing climate HVACMode."""
|
||||
|
||||
# Modes that do not map to HVAC modes are ignored (they are handled by PresetWrapper)
|
||||
|
||||
def __init__(self, dpcode: str, type_information: EnumTypeInformation) -> None:
|
||||
"""Init _HvacModeWrapper."""
|
||||
super().__init__(dpcode, type_information)
|
||||
self.options = [
|
||||
TUYA_HVAC_TO_HA[tuya_mode]
|
||||
for tuya_mode in type_information.range
|
||||
if tuya_mode in TUYA_HVAC_TO_HA
|
||||
]
|
||||
|
||||
def read_device_status(self, device: CustomerDevice) -> HVACMode | None:
|
||||
"""Read the device status."""
|
||||
if (raw := super().read_device_status(device)) not in TUYA_HVAC_TO_HA:
|
||||
return None
|
||||
return TUYA_HVAC_TO_HA[raw]
|
||||
|
||||
def _convert_value_to_raw_value(
|
||||
self, device: CustomerDevice, value: HVACMode
|
||||
) -> Any:
|
||||
"""Convert value to raw value."""
|
||||
return next(
|
||||
tuya_mode
|
||||
for tuya_mode, ha_mode in TUYA_HVAC_TO_HA.items()
|
||||
if ha_mode == value
|
||||
)
|
||||
|
||||
|
||||
class _PresetWrapper(DPCodeEnumWrapper):
|
||||
"""Wrapper for managing climate preset modes."""
|
||||
|
||||
# Modes that map to HVAC modes are ignored (they are handled by HVACModeWrapper)
|
||||
|
||||
def __init__(self, dpcode: str, type_information: EnumTypeInformation) -> None:
|
||||
"""Init _PresetWrapper."""
|
||||
super().__init__(dpcode, type_information)
|
||||
self.options = [
|
||||
tuya_mode
|
||||
for tuya_mode in type_information.range
|
||||
if tuya_mode not in TUYA_HVAC_TO_HA
|
||||
]
|
||||
|
||||
def read_device_status(self, device: CustomerDevice) -> str | None:
|
||||
"""Read the device status."""
|
||||
if (raw := super().read_device_status(device)) in TUYA_HVAC_TO_HA:
|
||||
return None
|
||||
return raw
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class TuyaClimateEntityDescription(ClimateEntityDescription):
|
||||
"""Describe an Tuya climate entity."""
|
||||
@@ -349,10 +296,7 @@ async def async_setup_entry(
|
||||
(DPCode.FAN_SPEED_ENUM, DPCode.LEVEL, DPCode.WINDSPEED),
|
||||
prefer_function=True,
|
||||
),
|
||||
hvac_mode_wrapper=_HvacModeWrapper.find_dpcode(
|
||||
device, DPCode.MODE, prefer_function=True
|
||||
),
|
||||
preset_wrapper=_PresetWrapper.find_dpcode(
|
||||
hvac_mode_wrapper=DPCodeEnumWrapper.find_dpcode(
|
||||
device, DPCode.MODE, prefer_function=True
|
||||
),
|
||||
set_temperature_wrapper=temperature_wrappers[1],
|
||||
@@ -378,6 +322,7 @@ async def async_setup_entry(
|
||||
class TuyaClimateEntity(TuyaEntity, ClimateEntity):
|
||||
"""Tuya Climate Device."""
|
||||
|
||||
_hvac_to_tuya: dict[str, str]
|
||||
entity_description: TuyaClimateEntityDescription
|
||||
_attr_name = None
|
||||
|
||||
@@ -390,8 +335,7 @@ class TuyaClimateEntity(TuyaEntity, ClimateEntity):
|
||||
current_humidity_wrapper: DeviceWrapper[int] | None,
|
||||
current_temperature_wrapper: DeviceWrapper[float] | None,
|
||||
fan_mode_wrapper: DeviceWrapper[str] | None,
|
||||
hvac_mode_wrapper: DeviceWrapper[HVACMode] | None,
|
||||
preset_wrapper: DeviceWrapper[str] | None,
|
||||
hvac_mode_wrapper: DeviceWrapper[str] | None,
|
||||
set_temperature_wrapper: DeviceWrapper[float] | None,
|
||||
swing_wrapper: DeviceWrapper[str] | None,
|
||||
switch_wrapper: DeviceWrapper[bool] | None,
|
||||
@@ -407,7 +351,6 @@ class TuyaClimateEntity(TuyaEntity, ClimateEntity):
|
||||
self._current_temperature = current_temperature_wrapper
|
||||
self._fan_mode_wrapper = fan_mode_wrapper
|
||||
self._hvac_mode_wrapper = hvac_mode_wrapper
|
||||
self._preset_wrapper = preset_wrapper
|
||||
self._set_temperature = set_temperature_wrapper
|
||||
self._swing_wrapper = swing_wrapper
|
||||
self._switch_wrapper = switch_wrapper
|
||||
@@ -423,24 +366,29 @@ class TuyaClimateEntity(TuyaEntity, ClimateEntity):
|
||||
self._attr_target_temperature_step = set_temperature_wrapper.value_step
|
||||
|
||||
# Determine HVAC modes
|
||||
self._attr_hvac_modes = []
|
||||
self._attr_hvac_modes: list[HVACMode] = []
|
||||
self._hvac_to_tuya = {}
|
||||
if hvac_mode_wrapper:
|
||||
self._attr_hvac_modes = [HVACMode.OFF]
|
||||
for mode in hvac_mode_wrapper.options:
|
||||
self._attr_hvac_modes.append(HVACMode(mode))
|
||||
unknown_hvac_modes: list[str] = []
|
||||
for tuya_mode in hvac_mode_wrapper.options:
|
||||
if tuya_mode in TUYA_HVAC_TO_HA:
|
||||
ha_mode = TUYA_HVAC_TO_HA[tuya_mode]
|
||||
self._hvac_to_tuya[ha_mode] = tuya_mode
|
||||
self._attr_hvac_modes.append(ha_mode)
|
||||
else:
|
||||
unknown_hvac_modes.append(tuya_mode)
|
||||
|
||||
if unknown_hvac_modes: # Tuya modes are presets instead of hvac_modes
|
||||
self._attr_hvac_modes.append(description.switch_only_hvac_mode)
|
||||
self._attr_preset_modes = unknown_hvac_modes
|
||||
self._attr_supported_features |= ClimateEntityFeature.PRESET_MODE
|
||||
elif switch_wrapper:
|
||||
self._attr_hvac_modes = [
|
||||
HVACMode.OFF,
|
||||
description.switch_only_hvac_mode,
|
||||
]
|
||||
|
||||
# Determine preset modes (ignore if empty options)
|
||||
if preset_wrapper and preset_wrapper.options:
|
||||
self._attr_hvac_modes.append(description.switch_only_hvac_mode)
|
||||
self._attr_preset_modes = preset_wrapper.options
|
||||
self._attr_supported_features |= ClimateEntityFeature.PRESET_MODE
|
||||
|
||||
# Determine dpcode to use for setting the humidity
|
||||
if target_humidity_wrapper:
|
||||
self._attr_supported_features |= ClimateEntityFeature.TARGET_HUMIDITY
|
||||
@@ -471,15 +419,17 @@ class TuyaClimateEntity(TuyaEntity, ClimateEntity):
|
||||
self.device, hvac_mode != HVACMode.OFF
|
||||
)
|
||||
)
|
||||
if self._hvac_mode_wrapper and hvac_mode in self._hvac_mode_wrapper.options:
|
||||
if self._hvac_mode_wrapper and hvac_mode in self._hvac_to_tuya:
|
||||
commands.extend(
|
||||
self._hvac_mode_wrapper.get_update_commands(self.device, hvac_mode)
|
||||
self._hvac_mode_wrapper.get_update_commands(
|
||||
self.device, self._hvac_to_tuya[hvac_mode]
|
||||
)
|
||||
)
|
||||
await self._async_send_commands(commands)
|
||||
|
||||
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
||||
"""Set new target preset mode."""
|
||||
await self._async_send_wrapper_updates(self._preset_wrapper, preset_mode)
|
||||
await self._async_send_wrapper_updates(self._hvac_mode_wrapper, preset_mode)
|
||||
|
||||
async def async_set_fan_mode(self, fan_mode: str) -> None:
|
||||
"""Set new target fan mode."""
|
||||
@@ -534,12 +484,21 @@ class TuyaClimateEntity(TuyaEntity, ClimateEntity):
|
||||
return None
|
||||
|
||||
# If we do have a mode wrapper, check if the mode maps to an HVAC mode.
|
||||
return self._read_wrapper(self._hvac_mode_wrapper)
|
||||
if (hvac_status := self._read_wrapper(self._hvac_mode_wrapper)) is None:
|
||||
return None
|
||||
return TUYA_HVAC_TO_HA.get(hvac_status)
|
||||
|
||||
@property
|
||||
def preset_mode(self) -> str | None:
|
||||
"""Return preset mode."""
|
||||
return self._read_wrapper(self._preset_wrapper)
|
||||
if self._hvac_mode_wrapper is None:
|
||||
return None
|
||||
|
||||
mode = self._read_wrapper(self._hvac_mode_wrapper)
|
||||
if mode in TUYA_HVAC_TO_HA:
|
||||
return None
|
||||
|
||||
return mode
|
||||
|
||||
@property
|
||||
def fan_mode(self) -> str | None:
|
||||
|
||||
@@ -10,12 +10,7 @@ from .const import _LOGGER, CONF_DEVICE_DETAILS, DEVICE_TYPE, DEVICE_URL
|
||||
from .coordinator import VodafoneConfigEntry, VodafoneStationRouter
|
||||
from .utils import async_client_session
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.BUTTON,
|
||||
Platform.DEVICE_TRACKER,
|
||||
Platform.IMAGE,
|
||||
Platform.SENSOR,
|
||||
]
|
||||
PLATFORMS = [Platform.BUTTON, Platform.DEVICE_TRACKER, Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: VodafoneConfigEntry) -> bool:
|
||||
|
||||
@@ -54,7 +54,6 @@ class UpdateCoordinatorDataType:
|
||||
|
||||
devices: dict[str, VodafoneStationDeviceInfo]
|
||||
sensors: dict[str, Any]
|
||||
wifi: dict[str, Any]
|
||||
|
||||
|
||||
class VodafoneStationRouter(DataUpdateCoordinator[UpdateCoordinatorDataType]):
|
||||
@@ -138,7 +137,6 @@ class VodafoneStationRouter(DataUpdateCoordinator[UpdateCoordinatorDataType]):
|
||||
await self.api.login()
|
||||
raw_data_devices = await self.api.get_devices_data()
|
||||
data_sensors = await self.api.get_sensor_data()
|
||||
data_wifi = await self.api.get_wifi_data()
|
||||
await self.api.logout()
|
||||
except exceptions.CannotAuthenticate as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
@@ -180,7 +178,7 @@ class VodafoneStationRouter(DataUpdateCoordinator[UpdateCoordinatorDataType]):
|
||||
|
||||
self.previous_devices = current_devices
|
||||
|
||||
return UpdateCoordinatorDataType(data_devices, data_sensors, data_wifi)
|
||||
return UpdateCoordinatorDataType(data_devices, data_sensors)
|
||||
|
||||
@property
|
||||
def signal_device_new(self) -> str:
|
||||
|
||||
@@ -1,87 +0,0 @@
|
||||
"""Vodafone Station image."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from io import BytesIO
|
||||
from typing import Final, cast
|
||||
|
||||
from aiovodafone.const import WIFI_DATA
|
||||
|
||||
from homeassistant.components.image import ImageEntity, ImageEntityDescription
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import _LOGGER
|
||||
from .coordinator import VodafoneConfigEntry, VodafoneStationRouter
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
IMAGE_TYPES: Final = (
|
||||
ImageEntityDescription(
|
||||
key="guest",
|
||||
translation_key="guest",
|
||||
),
|
||||
ImageEntityDescription(
|
||||
key="guest_5g",
|
||||
translation_key="guest_5g",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: VodafoneConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Guest WiFi QR code for device."""
|
||||
_LOGGER.debug("Setting up Vodafone Station images")
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
wifi = coordinator.data.wifi
|
||||
|
||||
async_add_entities(
|
||||
VodafoneGuestWifiQRImage(hass, coordinator, image_desc)
|
||||
for image_desc in IMAGE_TYPES
|
||||
if image_desc.key in wifi[WIFI_DATA]
|
||||
and "qr_code" in wifi[WIFI_DATA][image_desc.key]
|
||||
)
|
||||
|
||||
|
||||
class VodafoneGuestWifiQRImage(
|
||||
CoordinatorEntity[VodafoneStationRouter],
|
||||
ImageEntity,
|
||||
):
|
||||
"""Implementation of the Guest wifi QR code image entity."""
|
||||
|
||||
_attr_content_type = "image/png"
|
||||
_attr_entity_category = EntityCategory.DIAGNOSTIC
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
coordinator: VodafoneStationRouter,
|
||||
description: ImageEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize QR code image entity."""
|
||||
super().__init__(coordinator)
|
||||
ImageEntity.__init__(self, hass)
|
||||
|
||||
self.entity_description = description
|
||||
self._attr_device_info = coordinator.device_info
|
||||
self._attr_unique_id = f"{coordinator.serial_number}-{description.key}-qr-code"
|
||||
|
||||
async def async_image(self) -> bytes | None:
|
||||
"""Return QR code image bytes."""
|
||||
qr_code = cast(
|
||||
BytesIO,
|
||||
self.coordinator.data.wifi[WIFI_DATA][self.entity_description.key][
|
||||
"qr_code"
|
||||
],
|
||||
)
|
||||
return qr_code.getvalue()
|
||||
@@ -65,14 +65,6 @@
|
||||
"name": "Internet key reconnect"
|
||||
}
|
||||
},
|
||||
"image": {
|
||||
"guest": {
|
||||
"name": "Guest network"
|
||||
},
|
||||
"guest_5g": {
|
||||
"name": "Guest 5GHz network"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"active_connection": {
|
||||
"name": "Active connection",
|
||||
|
||||
@@ -9,5 +9,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["voip_utils"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["voip-utils==0.3.5"]
|
||||
"requirements": ["voip-utils==0.3.4"]
|
||||
}
|
||||
|
||||
@@ -5,14 +5,15 @@ from __future__ import annotations
|
||||
import abc
|
||||
import asyncio
|
||||
from collections import defaultdict
|
||||
from collections.abc import Callable, Container, Hashable, Iterable, Mapping
|
||||
from collections.abc import Callable, Container, Coroutine, Hashable, Iterable, Mapping
|
||||
from contextlib import suppress
|
||||
import copy
|
||||
from dataclasses import dataclass
|
||||
from enum import StrEnum
|
||||
import functools
|
||||
import logging
|
||||
from types import MappingProxyType
|
||||
from typing import Any, Generic, Required, TypedDict, TypeVar, cast
|
||||
from typing import Any, Concatenate, Generic, Required, TypedDict, TypeVar, cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -150,6 +151,15 @@ class FlowResult(TypedDict, Generic[_FlowContextT, _HandlerT], total=False):
|
||||
url: str
|
||||
|
||||
|
||||
class ProgressStepData[_FlowResultT](TypedDict):
|
||||
"""Typed data for progress step tracking."""
|
||||
|
||||
tasks: dict[str, asyncio.Task[Any]]
|
||||
abort_reason: str
|
||||
abort_description_placeholders: Mapping[str, str]
|
||||
next_step_result: _FlowResultT | None
|
||||
|
||||
|
||||
def _map_error_to_schema_errors(
|
||||
schema_errors: dict[str, Any],
|
||||
error: vol.Invalid,
|
||||
@@ -635,6 +645,24 @@ class FlowHandler(Generic[_FlowContextT, _FlowResultT, _HandlerT]):
|
||||
__progress_task: asyncio.Task[Any] | None = None
|
||||
__no_progress_task_reported = False
|
||||
deprecated_show_progress = False
|
||||
__progress_step_data: ProgressStepData[_FlowResultT] | None = None
|
||||
|
||||
@property
|
||||
def _progress_step_data(self) -> ProgressStepData[_FlowResultT]:
|
||||
"""Return progress step data.
|
||||
|
||||
A property is used instead of a simple attribute as derived classes
|
||||
do not call super().__init__.
|
||||
The property makes sure that the dict is initialized if needed.
|
||||
"""
|
||||
if not self.__progress_step_data:
|
||||
self.__progress_step_data = {
|
||||
"tasks": {},
|
||||
"abort_reason": "",
|
||||
"abort_description_placeholders": MappingProxyType({}),
|
||||
"next_step_result": None,
|
||||
}
|
||||
return self.__progress_step_data
|
||||
|
||||
@property
|
||||
def source(self) -> str | None:
|
||||
@@ -757,6 +785,39 @@ class FlowHandler(Generic[_FlowContextT, _FlowResultT, _HandlerT]):
|
||||
description_placeholders=description_placeholders,
|
||||
)
|
||||
|
||||
async def async_step__progress_step_abort(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> _FlowResultT:
|
||||
"""Abort the flow."""
|
||||
progress_step_data = self._progress_step_data
|
||||
return self.async_abort(
|
||||
reason=progress_step_data["abort_reason"],
|
||||
description_placeholders=progress_step_data[
|
||||
"abort_description_placeholders"
|
||||
],
|
||||
)
|
||||
|
||||
async def async_step__progress_step_progress_done(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> _FlowResultT:
|
||||
"""Progress done. Return the next step.
|
||||
|
||||
Used by the progress_step decorator
|
||||
to allow decorated step methods
|
||||
to call the next step method, to change step,
|
||||
without using async_show_progress_done.
|
||||
If no next step is set, abort the flow.
|
||||
"""
|
||||
progress_step_data = self._progress_step_data
|
||||
if (next_step_result := progress_step_data["next_step_result"]) is None:
|
||||
return self.async_abort(
|
||||
reason=progress_step_data["abort_reason"],
|
||||
description_placeholders=progress_step_data[
|
||||
"abort_description_placeholders"
|
||||
],
|
||||
)
|
||||
return next_step_result
|
||||
|
||||
@callback
|
||||
def async_external_step(
|
||||
self,
|
||||
@@ -937,3 +998,90 @@ class section:
|
||||
def __call__(self, value: Any) -> Any:
|
||||
"""Validate input."""
|
||||
return self.schema(value)
|
||||
|
||||
|
||||
type _FuncType[_T: FlowHandler[Any, Any, Any], _R: FlowResult[Any, Any], **_P] = (
|
||||
Callable[Concatenate[_T, _P], Coroutine[Any, Any, _R]]
|
||||
)
|
||||
|
||||
|
||||
def progress_step[
|
||||
HandlerT: FlowHandler[Any, Any, Any],
|
||||
ResultT: FlowResult[Any, Any],
|
||||
**P,
|
||||
](
|
||||
description_placeholders: (
|
||||
dict[str, str] | Callable[[Any], dict[str, str]] | None
|
||||
) = None,
|
||||
) -> Callable[[_FuncType[HandlerT, ResultT, P]], _FuncType[HandlerT, ResultT, P]]:
|
||||
"""Decorator to create a progress step from an async function.
|
||||
|
||||
The decorated method should be a step method
|
||||
which needs to show progress.
|
||||
The method should accept dict[str, Any] as user_input
|
||||
and should return a FlowResult or raise AbortFlow.
|
||||
The method can call self.async_update_progress(progress)
|
||||
to update progress.
|
||||
|
||||
Args:
|
||||
description_placeholders: Static dict or callable that returns dict for progress UI placeholders.
|
||||
"""
|
||||
|
||||
def decorator(
|
||||
func: _FuncType[HandlerT, ResultT, P],
|
||||
) -> _FuncType[HandlerT, ResultT, P]:
|
||||
@functools.wraps(func)
|
||||
async def wrapper(
|
||||
self: FlowHandler[Any, ResultT], *args: P.args, **kwargs: P.kwargs
|
||||
) -> ResultT:
|
||||
step_id = func.__name__.replace("async_step_", "")
|
||||
progress_step_data = self._progress_step_data
|
||||
# Check if we have a progress task running
|
||||
progress_task = progress_step_data["tasks"].get(step_id)
|
||||
|
||||
if progress_task is None:
|
||||
# First call - create and start the progress task
|
||||
progress_task = self.hass.async_create_task(
|
||||
func(self, *args, **kwargs), # type: ignore[arg-type]
|
||||
f"Progress step {step_id}",
|
||||
)
|
||||
progress_step_data["tasks"][step_id] = progress_task
|
||||
|
||||
if not progress_task.done():
|
||||
# Handle description placeholders
|
||||
placeholders = None
|
||||
if description_placeholders is not None:
|
||||
if callable(description_placeholders):
|
||||
placeholders = description_placeholders(self)
|
||||
else:
|
||||
placeholders = description_placeholders
|
||||
|
||||
return self.async_show_progress(
|
||||
step_id=step_id,
|
||||
progress_action=step_id,
|
||||
progress_task=progress_task,
|
||||
description_placeholders=placeholders,
|
||||
)
|
||||
|
||||
# Task is done or this is a subsequent call
|
||||
try:
|
||||
progress_step_data["next_step_result"] = await progress_task
|
||||
except AbortFlow as err:
|
||||
progress_step_data["abort_reason"] = err.reason
|
||||
progress_step_data["abort_description_placeholders"] = (
|
||||
err.description_placeholders or {}
|
||||
)
|
||||
return self.async_show_progress_done(
|
||||
next_step_id="_progress_step_abort"
|
||||
)
|
||||
finally:
|
||||
# Clean up task reference
|
||||
progress_step_data["tasks"].pop(step_id, None)
|
||||
|
||||
return self.async_show_progress_done(
|
||||
next_step_id="_progress_step_progress_done"
|
||||
)
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
1
homeassistant/generated/config_flows.py
generated
1
homeassistant/generated/config_flows.py
generated
@@ -443,7 +443,6 @@ FLOWS = {
|
||||
"mystrom",
|
||||
"myuplink",
|
||||
"nam",
|
||||
"namecheapdns",
|
||||
"nanoleaf",
|
||||
"nasweb",
|
||||
"neato",
|
||||
|
||||
@@ -4343,8 +4343,8 @@
|
||||
},
|
||||
"namecheapdns": {
|
||||
"name": "Namecheap DynamicDNS",
|
||||
"integration_type": "service",
|
||||
"config_flow": true,
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_push"
|
||||
},
|
||||
"nanoleaf": {
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import abc
|
||||
from collections.abc import Callable
|
||||
import dataclasses
|
||||
import logging
|
||||
@@ -269,74 +268,7 @@ def async_extract_referenced_entity_ids(
|
||||
return selected
|
||||
|
||||
|
||||
class TargetEntityChangeTracker(abc.ABC):
|
||||
"""Helper class to manage entity change tracking for targets."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
target_selection: TargetSelection,
|
||||
entity_filter: Callable[[set[str]], set[str]],
|
||||
) -> None:
|
||||
"""Initialize the state change tracker."""
|
||||
self._hass = hass
|
||||
self._target_selection = target_selection
|
||||
self._entity_filter = entity_filter
|
||||
|
||||
self._registry_unsubs: list[CALLBACK_TYPE] = []
|
||||
|
||||
def async_setup(self) -> Callable[[], None]:
|
||||
"""Set up the state change tracking."""
|
||||
self._setup_registry_listeners()
|
||||
self._handle_target_update()
|
||||
return self._unsubscribe
|
||||
|
||||
@abc.abstractmethod
|
||||
@callback
|
||||
def _handle_entities_update(self, tracked_entities: set[str]) -> None:
|
||||
"""Called when there's an update to the list of entities of the tracked targets."""
|
||||
|
||||
@callback
|
||||
def _handle_target_update(self, event: Event[Any] | None = None) -> None:
|
||||
"""Handle updates in the tracked targets."""
|
||||
selected = async_extract_referenced_entity_ids(
|
||||
self._hass, self._target_selection, expand_group=False
|
||||
)
|
||||
filtered_entities = self._entity_filter(
|
||||
selected.referenced | selected.indirectly_referenced
|
||||
)
|
||||
self._handle_entities_update(filtered_entities)
|
||||
|
||||
def _setup_registry_listeners(self) -> None:
|
||||
"""Set up listeners for registry changes that require resubscription."""
|
||||
|
||||
# Subscribe to registry updates that can change the entities to track:
|
||||
# - Entity registry: entity added/removed; entity labels changed; entity area changed.
|
||||
# - Device registry: device labels changed; device area changed.
|
||||
# - Area registry: area floor changed.
|
||||
#
|
||||
# We don't track other registries (like floor or label registries) because their
|
||||
# changes don't affect which entities are tracked.
|
||||
self._registry_unsubs = [
|
||||
self._hass.bus.async_listen(
|
||||
er.EVENT_ENTITY_REGISTRY_UPDATED, self._handle_target_update
|
||||
),
|
||||
self._hass.bus.async_listen(
|
||||
dr.EVENT_DEVICE_REGISTRY_UPDATED, self._handle_target_update
|
||||
),
|
||||
self._hass.bus.async_listen(
|
||||
ar.EVENT_AREA_REGISTRY_UPDATED, self._handle_target_update
|
||||
),
|
||||
]
|
||||
|
||||
def _unsubscribe(self) -> None:
|
||||
"""Unsubscribe from all events."""
|
||||
for registry_unsub in self._registry_unsubs:
|
||||
registry_unsub()
|
||||
self._registry_unsubs.clear()
|
||||
|
||||
|
||||
class TargetStateChangeTracker(TargetEntityChangeTracker):
|
||||
class TargetStateChangeTracker:
|
||||
"""Helper class to manage state change tracking for targets."""
|
||||
|
||||
def __init__(
|
||||
@@ -347,29 +279,78 @@ class TargetStateChangeTracker(TargetEntityChangeTracker):
|
||||
entity_filter: Callable[[set[str]], set[str]],
|
||||
) -> None:
|
||||
"""Initialize the state change tracker."""
|
||||
super().__init__(hass, target_selection, entity_filter)
|
||||
self._hass = hass
|
||||
self._target_selection = target_selection
|
||||
self._action = action
|
||||
self._state_change_unsub: CALLBACK_TYPE | None = None
|
||||
self._entity_filter = entity_filter
|
||||
|
||||
def _handle_entities_update(self, tracked_entities: set[str]) -> None:
|
||||
"""Handle the tracked entities."""
|
||||
self._state_change_unsub: CALLBACK_TYPE | None = None
|
||||
self._registry_unsubs: list[CALLBACK_TYPE] = []
|
||||
|
||||
def async_setup(self) -> Callable[[], None]:
|
||||
"""Set up the state change tracking."""
|
||||
self._setup_registry_listeners()
|
||||
self._track_entities_state_change()
|
||||
return self._unsubscribe
|
||||
|
||||
def _track_entities_state_change(self) -> None:
|
||||
"""Set up state change tracking for currently selected entities."""
|
||||
selected = async_extract_referenced_entity_ids(
|
||||
self._hass, self._target_selection, expand_group=False
|
||||
)
|
||||
|
||||
tracked_entities = self._entity_filter(
|
||||
selected.referenced | selected.indirectly_referenced
|
||||
)
|
||||
|
||||
@callback
|
||||
def state_change_listener(event: Event[EventStateChangedData]) -> None:
|
||||
"""Handle state change events."""
|
||||
if event.data["entity_id"] in tracked_entities:
|
||||
if (
|
||||
event.data["entity_id"] in selected.referenced
|
||||
or event.data["entity_id"] in selected.indirectly_referenced
|
||||
):
|
||||
self._action(TargetStateChangedData(event, tracked_entities))
|
||||
|
||||
_LOGGER.debug("Tracking state changes for entities: %s", tracked_entities)
|
||||
if self._state_change_unsub:
|
||||
self._state_change_unsub()
|
||||
self._state_change_unsub = async_track_state_change_event(
|
||||
self._hass, tracked_entities, state_change_listener
|
||||
)
|
||||
|
||||
def _setup_registry_listeners(self) -> None:
|
||||
"""Set up listeners for registry changes that require resubscription."""
|
||||
|
||||
@callback
|
||||
def resubscribe_state_change_event(event: Event[Any] | None = None) -> None:
|
||||
"""Resubscribe to state change events when registry changes."""
|
||||
if self._state_change_unsub:
|
||||
self._state_change_unsub()
|
||||
self._track_entities_state_change()
|
||||
|
||||
# Subscribe to registry updates that can change the entities to track:
|
||||
# - Entity registry: entity added/removed; entity labels changed; entity area changed.
|
||||
# - Device registry: device labels changed; device area changed.
|
||||
# - Area registry: area floor changed.
|
||||
#
|
||||
# We don't track other registries (like floor or label registries) because their
|
||||
# changes don't affect which entities are tracked.
|
||||
self._registry_unsubs = [
|
||||
self._hass.bus.async_listen(
|
||||
er.EVENT_ENTITY_REGISTRY_UPDATED, resubscribe_state_change_event
|
||||
),
|
||||
self._hass.bus.async_listen(
|
||||
dr.EVENT_DEVICE_REGISTRY_UPDATED, resubscribe_state_change_event
|
||||
),
|
||||
self._hass.bus.async_listen(
|
||||
ar.EVENT_AREA_REGISTRY_UPDATED, resubscribe_state_change_event
|
||||
),
|
||||
]
|
||||
|
||||
def _unsubscribe(self) -> None:
|
||||
"""Unsubscribe from all events."""
|
||||
super()._unsubscribe()
|
||||
for registry_unsub in self._registry_unsubs:
|
||||
registry_unsub()
|
||||
self._registry_unsubs.clear()
|
||||
if self._state_change_unsub:
|
||||
self._state_change_unsub()
|
||||
self._state_change_unsub = None
|
||||
|
||||
@@ -53,10 +53,10 @@ Pillow==12.0.0
|
||||
propcache==0.4.1
|
||||
psutil-home-assistant==0.0.1
|
||||
PyJWT==2.10.1
|
||||
pymicro-vad==1.0.1
|
||||
PyNaCl==1.6.0
|
||||
pyOpenSSL==25.3.0
|
||||
pyserial==3.5
|
||||
pysilero-vad==3.2.0
|
||||
pyspeex-noise==1.0.2
|
||||
python-slugify==8.0.4
|
||||
PyTurboJPEG==1.8.0
|
||||
|
||||
@@ -16,7 +16,7 @@ from homeassistant.const import Platform
|
||||
|
||||
if TYPE_CHECKING:
|
||||
# InferenceResult is available only from astroid >= 2.12.0
|
||||
# prek should still work on out of date environments
|
||||
# pre-commit should still work on out of date environments
|
||||
from astroid.typing import InferenceResult
|
||||
|
||||
_COMMON_ARGUMENTS: dict[str, list[str]] = {
|
||||
|
||||
2
requirements.txt
generated
2
requirements.txt
generated
@@ -39,8 +39,8 @@ Pillow==12.0.0
|
||||
propcache==0.4.1
|
||||
psutil-home-assistant==0.0.1
|
||||
PyJWT==2.10.1
|
||||
pymicro-vad==1.0.1
|
||||
pyOpenSSL==25.3.0
|
||||
pysilero-vad==3.2.0
|
||||
pyspeex-noise==1.0.2
|
||||
python-slugify==8.0.4
|
||||
PyTurboJPEG==1.8.0
|
||||
|
||||
15
requirements_all.txt
generated
15
requirements_all.txt
generated
@@ -785,6 +785,7 @@ decora-wifi==1.4
|
||||
deebot-client==17.0.1
|
||||
|
||||
# homeassistant.components.ihc
|
||||
# homeassistant.components.namecheapdns
|
||||
# homeassistant.components.ohmconnect
|
||||
# homeassistant.components.sonos
|
||||
defusedxml==0.7.1
|
||||
@@ -1010,7 +1011,7 @@ freebox-api==1.2.2
|
||||
freesms==0.2.0
|
||||
|
||||
# homeassistant.components.fressnapf_tracker
|
||||
fressnapftracker==0.2.1
|
||||
fressnapftracker==0.2.0
|
||||
|
||||
# homeassistant.components.fritz
|
||||
# homeassistant.components.fritzbox_callmonitor
|
||||
@@ -2200,9 +2201,6 @@ pymediaroom==0.6.5.4
|
||||
# homeassistant.components.meteoclimatic
|
||||
pymeteoclimatic==0.1.0
|
||||
|
||||
# homeassistant.components.assist_pipeline
|
||||
pymicro-vad==1.0.1
|
||||
|
||||
# homeassistant.components.miele
|
||||
pymiele==0.6.1
|
||||
|
||||
@@ -2410,6 +2408,9 @@ pysiaalarm==3.1.1
|
||||
# homeassistant.components.signal_messenger
|
||||
pysignalclirestapi==0.3.24
|
||||
|
||||
# homeassistant.components.assist_pipeline
|
||||
pysilero-vad==3.2.0
|
||||
|
||||
# homeassistant.components.sky_hub
|
||||
pyskyqhub==0.1.4
|
||||
|
||||
@@ -2707,7 +2708,7 @@ qbittorrent-api==2024.9.67
|
||||
qbusmqttapi==1.4.2
|
||||
|
||||
# homeassistant.components.qingping
|
||||
qingping-ble==1.1.0
|
||||
qingping-ble==1.0.1
|
||||
|
||||
# homeassistant.components.qnap
|
||||
qnapstats==0.4.0
|
||||
@@ -3145,7 +3146,7 @@ visionpluspython==1.0.2
|
||||
vobject==0.9.9
|
||||
|
||||
# homeassistant.components.voip
|
||||
voip-utils==0.3.5
|
||||
voip-utils==0.3.4
|
||||
|
||||
# homeassistant.components.volkszaehler
|
||||
volkszaehler==0.4.0
|
||||
@@ -3218,7 +3219,7 @@ wyoming==1.7.2
|
||||
xiaomi-ble==1.4.1
|
||||
|
||||
# homeassistant.components.knx
|
||||
xknx==3.14.0
|
||||
xknx==3.13.0
|
||||
|
||||
# homeassistant.components.knx
|
||||
xknxproject==3.8.2
|
||||
|
||||
@@ -15,7 +15,7 @@ librt==0.2.1
|
||||
license-expression==30.4.3
|
||||
mock-open==1.4.0
|
||||
mypy-dev==1.19.0a4
|
||||
prek==0.2.28
|
||||
pre-commit==4.2.0
|
||||
pydantic==2.12.2
|
||||
pylint==4.0.1
|
||||
pylint-per-file-ignores==1.4.0
|
||||
|
||||
15
requirements_test_all.txt
generated
15
requirements_test_all.txt
generated
@@ -694,6 +694,7 @@ debugpy==1.8.17
|
||||
deebot-client==17.0.1
|
||||
|
||||
# homeassistant.components.ihc
|
||||
# homeassistant.components.namecheapdns
|
||||
# homeassistant.components.ohmconnect
|
||||
# homeassistant.components.sonos
|
||||
defusedxml==0.7.1
|
||||
@@ -889,7 +890,7 @@ forecast-solar==4.2.0
|
||||
freebox-api==1.2.2
|
||||
|
||||
# homeassistant.components.fressnapf_tracker
|
||||
fressnapftracker==0.2.1
|
||||
fressnapftracker==0.2.0
|
||||
|
||||
# homeassistant.components.fritz
|
||||
# homeassistant.components.fritzbox_callmonitor
|
||||
@@ -1862,9 +1863,6 @@ pymata-express==1.19
|
||||
# homeassistant.components.meteoclimatic
|
||||
pymeteoclimatic==0.1.0
|
||||
|
||||
# homeassistant.components.assist_pipeline
|
||||
pymicro-vad==1.0.1
|
||||
|
||||
# homeassistant.components.miele
|
||||
pymiele==0.6.1
|
||||
|
||||
@@ -2036,6 +2034,9 @@ pysiaalarm==3.1.1
|
||||
# homeassistant.components.signal_messenger
|
||||
pysignalclirestapi==0.3.24
|
||||
|
||||
# homeassistant.components.assist_pipeline
|
||||
pysilero-vad==3.2.0
|
||||
|
||||
# homeassistant.components.sma
|
||||
pysma==1.1.0
|
||||
|
||||
@@ -2276,7 +2277,7 @@ qbittorrent-api==2024.9.67
|
||||
qbusmqttapi==1.4.2
|
||||
|
||||
# homeassistant.components.qingping
|
||||
qingping-ble==1.1.0
|
||||
qingping-ble==1.0.1
|
||||
|
||||
# homeassistant.components.qnap
|
||||
qnapstats==0.4.0
|
||||
@@ -2633,7 +2634,7 @@ visionpluspython==1.0.2
|
||||
vobject==0.9.9
|
||||
|
||||
# homeassistant.components.voip
|
||||
voip-utils==0.3.5
|
||||
voip-utils==0.3.4
|
||||
|
||||
# homeassistant.components.volvo
|
||||
volvocarsapi==0.4.3
|
||||
@@ -2691,7 +2692,7 @@ wyoming==1.7.2
|
||||
xiaomi-ble==1.4.1
|
||||
|
||||
# homeassistant.components.knx
|
||||
xknx==3.14.0
|
||||
xknx==3.13.0
|
||||
|
||||
# homeassistant.components.knx
|
||||
xknxproject==3.8.2
|
||||
|
||||
@@ -427,7 +427,7 @@ def validate(integrations: dict[str, Integration], config: Config) -> None:
|
||||
if config.action == "generate" and manifests_resorted:
|
||||
subprocess.run(
|
||||
[
|
||||
"prek",
|
||||
"pre-commit",
|
||||
"run",
|
||||
"--hook-stage",
|
||||
"manual",
|
||||
|
||||
@@ -15,7 +15,7 @@ printf "%s\n" $files
|
||||
echo "=============="
|
||||
echo "LINT with ruff"
|
||||
echo "=============="
|
||||
prek run ruff-check --files $files
|
||||
pre-commit run ruff-check --files $files
|
||||
echo "================"
|
||||
echo "LINT with pylint"
|
||||
echo "================"
|
||||
|
||||
@@ -119,7 +119,7 @@ async def pylint(files):
|
||||
|
||||
async def ruff(files):
|
||||
"""Exec ruff."""
|
||||
_, log = await async_exec("prek", "run", "ruff", "--files", *files)
|
||||
_, log = await async_exec("pre-commit", "run", "ruff", "--files", *files)
|
||||
res = []
|
||||
for line in log.splitlines():
|
||||
line = line.split(":")
|
||||
|
||||
@@ -24,12 +24,7 @@ def gather_info(arguments) -> Info:
|
||||
info = _gather_info(
|
||||
{
|
||||
"domain": {
|
||||
"prompt": (
|
||||
"""What is the domain?
|
||||
|
||||
Hint: The domain is a short name consisting of characters and underscores.
|
||||
This domain has to be unique, cannot be changed, and has to match the directory name of the integration."""
|
||||
),
|
||||
"prompt": "What is the domain?",
|
||||
"validators": [
|
||||
CHECK_EMPTY,
|
||||
[
|
||||
@@ -77,8 +72,13 @@ def gather_new_integration(determine_auth: bool) -> Info:
|
||||
},
|
||||
"codeowner": {
|
||||
"prompt": "What is your GitHub handle?",
|
||||
"validators": [CHECK_EMPTY],
|
||||
"converter": lambda value: value if value.startswith("@") else f"@{value}",
|
||||
"validators": [
|
||||
CHECK_EMPTY,
|
||||
[
|
||||
'GitHub handles need to start with an "@"',
|
||||
lambda value: value.startswith("@"),
|
||||
],
|
||||
],
|
||||
},
|
||||
"requirement": {
|
||||
"prompt": "What PyPI package and version do you depend on? Leave blank for none.",
|
||||
|
||||
@@ -31,7 +31,7 @@ fi
|
||||
|
||||
script/bootstrap
|
||||
|
||||
prek install
|
||||
pre-commit install
|
||||
|
||||
hass --script ensure_config -c config
|
||||
|
||||
|
||||
@@ -2,19 +2,15 @@
|
||||
"""Helper script to bump the current version."""
|
||||
|
||||
import argparse
|
||||
from copy import replace
|
||||
from pathlib import Path
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
import packaging
|
||||
from packaging.version import Version
|
||||
|
||||
from homeassistant import const
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
_PACKAGING_VERSION_BELOW_26 = Version(packaging.__version__) < Version("26.0dev0")
|
||||
|
||||
|
||||
def _bump_release(release, bump_type):
|
||||
"""Bump a release tuple consisting of 3 numbers."""
|
||||
@@ -29,13 +25,6 @@ def _bump_release(release, bump_type):
|
||||
return major, minor, patch
|
||||
|
||||
|
||||
def _get_dev_change(dev: int) -> int | tuple[str, int]:
|
||||
"""Return the dev change based on packaging version."""
|
||||
if _PACKAGING_VERSION_BELOW_26:
|
||||
return ("dev", dev)
|
||||
return dev
|
||||
|
||||
|
||||
def bump_version(
|
||||
version: Version, bump_type: str, *, nightly_version: str | None = None
|
||||
) -> Version:
|
||||
@@ -69,10 +58,9 @@ def bump_version(
|
||||
# Convert 0.67.3.b5 to 0.67.4.dev0
|
||||
# Convert 0.67.3.dev0 to 0.67.3.dev1
|
||||
if version.is_devrelease:
|
||||
to_change["dev"] = _get_dev_change(version.dev + 1)
|
||||
to_change["dev"] = ("dev", version.dev + 1)
|
||||
else:
|
||||
to_change["dev"] = _get_dev_change(0)
|
||||
to_change["pre"] = None
|
||||
to_change["pre"] = ("dev", 0)
|
||||
to_change["release"] = _bump_release(version.release, "minor")
|
||||
|
||||
elif bump_type == "beta":
|
||||
@@ -111,19 +99,14 @@ def bump_version(
|
||||
raise ValueError("Nightly version must be a dev version")
|
||||
new_dev = new_version.dev
|
||||
|
||||
if not isinstance(new_dev, int):
|
||||
new_dev = int(new_dev)
|
||||
to_change["dev"] = _get_dev_change(new_dev)
|
||||
to_change["dev"] = ("dev", new_dev)
|
||||
|
||||
else:
|
||||
raise ValueError(f"Unsupported type: {bump_type}")
|
||||
|
||||
if _PACKAGING_VERSION_BELOW_26:
|
||||
temp = Version("0")
|
||||
temp._version = version._version._replace(**to_change) # noqa: SLF001
|
||||
return Version(str(temp))
|
||||
|
||||
return replace(version, **to_change)
|
||||
temp = Version("0")
|
||||
temp._version = version._version._replace(**to_change) # noqa: SLF001
|
||||
return Version(str(temp))
|
||||
|
||||
|
||||
def write_version(version):
|
||||
|
||||
@@ -44,16 +44,10 @@ class MockCalendarEntity(CalendarEntity):
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
events: list[CalendarEvent] | None = None,
|
||||
unique_id: str | None = None,
|
||||
) -> None:
|
||||
def __init__(self, name: str, events: list[CalendarEvent] | None = None) -> None:
|
||||
"""Initialize entity."""
|
||||
self._attr_name = name.capitalize()
|
||||
self._events = events or []
|
||||
self._attr_unique_id = unique_id
|
||||
|
||||
@property
|
||||
def event(self) -> CalendarEvent | None:
|
||||
@@ -188,7 +182,6 @@ def create_test_entities() -> list[MockCalendarEntity]:
|
||||
location="Future Location",
|
||||
)
|
||||
],
|
||||
unique_id="calendar_1_id",
|
||||
)
|
||||
entity1.async_get_events = AsyncMock(wraps=entity1.async_get_events)
|
||||
|
||||
@@ -202,7 +195,6 @@ def create_test_entities() -> list[MockCalendarEntity]:
|
||||
summary="Current Event",
|
||||
)
|
||||
],
|
||||
unique_id="calendar_2_id",
|
||||
)
|
||||
entity2.async_get_events = AsyncMock(wraps=entity2.async_get_events)
|
||||
|
||||
|
||||
@@ -11,7 +11,6 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import AsyncIterator, Callable, Generator
|
||||
from contextlib import asynccontextmanager
|
||||
from dataclasses import dataclass
|
||||
import datetime
|
||||
import logging
|
||||
from typing import Any
|
||||
@@ -22,141 +21,19 @@ from freezegun.api import FrozenDateTimeFactory
|
||||
import pytest
|
||||
|
||||
from homeassistant.components import automation, calendar
|
||||
from homeassistant.components.calendar.trigger import (
|
||||
CONF_OFFSET_TYPE,
|
||||
EVENT_END,
|
||||
EVENT_START,
|
||||
OFFSET_TYPE_AFTER,
|
||||
OFFSET_TYPE_BEFORE,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
ATTR_AREA_ID,
|
||||
ATTR_DEVICE_ID,
|
||||
ATTR_ENTITY_ID,
|
||||
ATTR_LABEL_ID,
|
||||
CONF_OFFSET,
|
||||
CONF_OPTIONS,
|
||||
CONF_PLATFORM,
|
||||
CONF_TARGET,
|
||||
SERVICE_TURN_OFF,
|
||||
)
|
||||
from homeassistant.components.calendar.trigger import EVENT_END, EVENT_START
|
||||
from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_OFF
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import (
|
||||
area_registry as ar,
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
label_registry as lr,
|
||||
)
|
||||
from homeassistant.setup import async_setup_component
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .conftest import MockCalendarEntity
|
||||
|
||||
from tests.common import (
|
||||
MockConfigEntry,
|
||||
async_fire_time_changed,
|
||||
async_mock_service,
|
||||
mock_device_registry,
|
||||
)
|
||||
from tests.common import MockConfigEntry, async_fire_time_changed, async_mock_service
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class TriggerFormat:
|
||||
"""Abstraction for different trigger configuration formats."""
|
||||
|
||||
id: str
|
||||
|
||||
def get_platform(self, event_type: str) -> str:
|
||||
"""Get the platform string for trigger payload assertions."""
|
||||
raise NotImplementedError
|
||||
|
||||
def get_trigger_data(
|
||||
self, entity_id: str, event_type: str, offset: datetime.timedelta | None = None
|
||||
) -> dict[str, Any]:
|
||||
"""Get the trigger configuration data."""
|
||||
raise NotImplementedError
|
||||
|
||||
def get_expected_call_data(
|
||||
self, entity_id: str, event_type: str, calendar_event: dict[str, Any]
|
||||
) -> dict[str, Any]:
|
||||
"""Get the expected call data for assertion."""
|
||||
return {
|
||||
"platform": self.get_platform(event_type),
|
||||
"event": event_type,
|
||||
"entity_id": entity_id,
|
||||
"calendar_event": calendar_event,
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class LegacyTriggerFormat(TriggerFormat):
|
||||
"""Legacy trigger format using platform: calendar with entity_id and event."""
|
||||
|
||||
id: str = "legacy"
|
||||
|
||||
def get_platform(self, event_type: str) -> str:
|
||||
"""Get the platform string for trigger payload assertions."""
|
||||
return calendar.DOMAIN
|
||||
|
||||
def get_trigger_data(
|
||||
self, entity_id: str, event_type: str, offset: datetime.timedelta | None = None
|
||||
) -> dict[str, Any]:
|
||||
"""Get the trigger configuration data."""
|
||||
trigger_data: dict[str, Any] = {
|
||||
CONF_PLATFORM: calendar.DOMAIN,
|
||||
"entity_id": entity_id,
|
||||
"event": event_type,
|
||||
}
|
||||
if offset:
|
||||
trigger_data[CONF_OFFSET] = offset
|
||||
return trigger_data
|
||||
|
||||
|
||||
@dataclass
|
||||
class TargetTriggerFormat(TriggerFormat):
|
||||
"""Target trigger format using platform: calendar.event_started/ended with target."""
|
||||
|
||||
id: str = "target"
|
||||
|
||||
def get_platform(self, event_type: str) -> str:
|
||||
"""Get the platform string for trigger payload assertions."""
|
||||
trigger_type = "event_started" if event_type == EVENT_START else "event_ended"
|
||||
return f"{calendar.DOMAIN}.{trigger_type}"
|
||||
|
||||
def get_trigger_data(
|
||||
self, entity_id: str, event_type: str, offset: datetime.timedelta | None = None
|
||||
) -> dict[str, Any]:
|
||||
"""Get the trigger configuration data."""
|
||||
trigger_type = "event_started" if event_type == EVENT_START else "event_ended"
|
||||
trigger_data: dict[str, Any] = {
|
||||
CONF_PLATFORM: f"{calendar.DOMAIN}.{trigger_type}",
|
||||
CONF_TARGET: {"entity_id": entity_id},
|
||||
}
|
||||
if offset:
|
||||
options: dict[str, Any] = {}
|
||||
# Convert signed offset to offset + offset_type
|
||||
if offset < datetime.timedelta(0):
|
||||
options[CONF_OFFSET] = -offset
|
||||
options[CONF_OFFSET_TYPE] = OFFSET_TYPE_BEFORE
|
||||
else:
|
||||
options[CONF_OFFSET] = offset
|
||||
options[CONF_OFFSET_TYPE] = OFFSET_TYPE_AFTER
|
||||
trigger_data[CONF_OPTIONS] = options
|
||||
return trigger_data
|
||||
|
||||
|
||||
TRIGGER_FORMATS = [LegacyTriggerFormat(), TargetTriggerFormat()]
|
||||
TRIGGER_FORMAT_IDS = [fmt.id for fmt in TRIGGER_FORMATS]
|
||||
|
||||
|
||||
@pytest.fixture(params=TRIGGER_FORMATS, ids=TRIGGER_FORMAT_IDS)
|
||||
def trigger_format(request: pytest.FixtureRequest) -> TriggerFormat:
|
||||
"""Fixture providing both trigger formats for parameterized tests."""
|
||||
return request.param
|
||||
|
||||
|
||||
CALENDAR_ENTITY_ID = "calendar.calendar_2"
|
||||
|
||||
TEST_AUTOMATION_ACTION = {
|
||||
@@ -164,7 +41,6 @@ TEST_AUTOMATION_ACTION = {
|
||||
"data": {
|
||||
"platform": "{{ trigger.platform }}",
|
||||
"event": "{{ trigger.event }}",
|
||||
"entity_id": "{{ trigger.entity_id }}",
|
||||
"calendar_event": "{{ trigger.calendar_event }}",
|
||||
},
|
||||
}
|
||||
@@ -175,59 +51,6 @@ TEST_AUTOMATION_ACTION = {
|
||||
TEST_TIME_ADVANCE_INTERVAL = datetime.timedelta(minutes=1)
|
||||
TEST_UPDATE_INTERVAL = datetime.timedelta(minutes=7)
|
||||
|
||||
TARGET_TEST_FIRST_START_CALL_DATA = [
|
||||
{
|
||||
"platform": "calendar.event_started",
|
||||
"event": "start",
|
||||
"entity_id": "calendar.calendar_1",
|
||||
"calendar_event": {
|
||||
"start": "2022-04-19T11:00:00+00:00",
|
||||
"end": "2022-04-19T11:30:00+00:00",
|
||||
"summary": "Event on Calendar 1",
|
||||
"all_day": False,
|
||||
},
|
||||
}
|
||||
]
|
||||
TARGET_TEST_SECOND_START_CALL_DATA = [
|
||||
{
|
||||
"platform": "calendar.event_started",
|
||||
"event": "start",
|
||||
"entity_id": "calendar.calendar_2",
|
||||
"calendar_event": {
|
||||
"start": "2022-04-19T11:15:00+00:00",
|
||||
"end": "2022-04-19T11:45:00+00:00",
|
||||
"summary": "Event on Calendar 2",
|
||||
"all_day": False,
|
||||
},
|
||||
}
|
||||
]
|
||||
TARGET_TEST_FIRST_END_CALL_DATA = [
|
||||
{
|
||||
"platform": "calendar.event_ended",
|
||||
"event": "end",
|
||||
"entity_id": "calendar.calendar_1",
|
||||
"calendar_event": {
|
||||
"start": "2022-04-19T11:00:00+00:00",
|
||||
"end": "2022-04-19T11:30:00+00:00",
|
||||
"summary": "Event on Calendar 1",
|
||||
"all_day": False,
|
||||
},
|
||||
}
|
||||
]
|
||||
TARGET_TEST_SECOND_END_CALL_DATA = [
|
||||
{
|
||||
"platform": "calendar.event_ended",
|
||||
"event": "end",
|
||||
"entity_id": "calendar.calendar_2",
|
||||
"calendar_event": {
|
||||
"start": "2022-04-19T11:15:00+00:00",
|
||||
"end": "2022-04-19T11:45:00+00:00",
|
||||
"summary": "Event on Calendar 2",
|
||||
"all_day": False,
|
||||
},
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
class FakeSchedule:
|
||||
"""Test fixture class for return events in a specific date range."""
|
||||
@@ -287,65 +110,18 @@ async def mock_setup_platform(
|
||||
await hass.async_block_till_done()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def target_calendars(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
area_registry: ar.AreaRegistry,
|
||||
label_registry: lr.LabelRegistry,
|
||||
):
|
||||
"""Associate calendar entities with different targets.
|
||||
|
||||
Sets up the following target structure:
|
||||
- area_both: An area containing both calendar entities
|
||||
- label_calendar_1: A label assigned to calendar 1 only
|
||||
- device_calendar_1: A device associated with calendar 1
|
||||
- device_calendar_2: A device associated with calendar 2
|
||||
- area_devices: An area containing both devices
|
||||
"""
|
||||
area_both = area_registry.async_get_or_create("area_both_calendars")
|
||||
label_calendar_1 = label_registry.async_create("calendar_1_label")
|
||||
label_on_devices = label_registry.async_create("label_on_devices")
|
||||
|
||||
device_calendar_1 = dr.DeviceEntry(
|
||||
id="device_calendar_1", labels=[label_on_devices.label_id]
|
||||
)
|
||||
device_calendar_2 = dr.DeviceEntry(
|
||||
id="device_calendar_2", labels=[label_on_devices.label_id]
|
||||
)
|
||||
mock_device_registry(
|
||||
hass,
|
||||
{
|
||||
device_calendar_1.id: device_calendar_1,
|
||||
device_calendar_2.id: device_calendar_2,
|
||||
},
|
||||
)
|
||||
|
||||
# Associate calendar entities with targets
|
||||
entity_registry.async_update_entity(
|
||||
"calendar.calendar_1",
|
||||
area_id=area_both.id,
|
||||
labels={label_calendar_1.label_id},
|
||||
device_id=device_calendar_1.id,
|
||||
)
|
||||
entity_registry.async_update_entity(
|
||||
"calendar.calendar_2",
|
||||
area_id=area_both.id,
|
||||
device_id=device_calendar_2.id,
|
||||
)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def create_automation(
|
||||
hass: HomeAssistant,
|
||||
trigger_format: TriggerFormat,
|
||||
event_type: str,
|
||||
offset: datetime.timedelta | None = None,
|
||||
hass: HomeAssistant, event_type: str, offset=None
|
||||
) -> AsyncIterator[None]:
|
||||
"""Register an automation using the specified trigger format."""
|
||||
trigger_data = trigger_format.get_trigger_data(
|
||||
CALENDAR_ENTITY_ID, event_type, offset
|
||||
)
|
||||
"""Register an automation."""
|
||||
trigger_data = {
|
||||
"platform": calendar.DOMAIN,
|
||||
"entity_id": CALENDAR_ENTITY_ID,
|
||||
"event": event_type,
|
||||
}
|
||||
if offset:
|
||||
trigger_data["offset"] = offset
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
automation.DOMAIN,
|
||||
@@ -397,14 +173,13 @@ async def test_event_start_trigger(
|
||||
calls_data: Callable[[], list[dict[str, Any]]],
|
||||
fake_schedule: FakeSchedule,
|
||||
test_entity: MockCalendarEntity,
|
||||
trigger_format: TriggerFormat,
|
||||
) -> None:
|
||||
"""Test the a calendar trigger based on start time."""
|
||||
event_data = test_entity.create_event(
|
||||
start=datetime.datetime.fromisoformat("2022-04-19 11:00:00+00:00"),
|
||||
end=datetime.datetime.fromisoformat("2022-04-19 11:30:00+00:00"),
|
||||
)
|
||||
async with create_automation(hass, trigger_format, EVENT_START):
|
||||
async with create_automation(hass, EVENT_START):
|
||||
assert len(calls_data()) == 0
|
||||
|
||||
await fake_schedule.fire_until(
|
||||
@@ -412,17 +187,19 @@ async def test_event_start_trigger(
|
||||
)
|
||||
|
||||
assert calls_data() == [
|
||||
trigger_format.get_expected_call_data(
|
||||
CALENDAR_ENTITY_ID, EVENT_START, event_data
|
||||
)
|
||||
{
|
||||
"platform": "calendar",
|
||||
"event": EVENT_START,
|
||||
"calendar_event": event_data,
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("offset_delta"),
|
||||
("offset_str", "offset_delta"),
|
||||
[
|
||||
datetime.timedelta(hours=-1),
|
||||
datetime.timedelta(hours=1),
|
||||
("-01:00", datetime.timedelta(hours=-1)),
|
||||
("+01:00", datetime.timedelta(hours=1)),
|
||||
],
|
||||
)
|
||||
async def test_event_start_trigger_with_offset(
|
||||
@@ -430,17 +207,15 @@ async def test_event_start_trigger_with_offset(
|
||||
calls_data: Callable[[], list[dict[str, Any]]],
|
||||
fake_schedule: FakeSchedule,
|
||||
test_entity: MockCalendarEntity,
|
||||
trigger_format: TriggerFormat,
|
||||
offset_delta: datetime.timedelta,
|
||||
offset_str,
|
||||
offset_delta,
|
||||
) -> None:
|
||||
"""Test the a calendar trigger based on start time with an offset."""
|
||||
event_data = test_entity.create_event(
|
||||
start=datetime.datetime.fromisoformat("2022-04-19 12:00:00+00:00"),
|
||||
end=datetime.datetime.fromisoformat("2022-04-19 12:30:00+00:00"),
|
||||
)
|
||||
async with create_automation(
|
||||
hass, trigger_format, EVENT_START, offset=offset_delta
|
||||
):
|
||||
async with create_automation(hass, EVENT_START, offset=offset_str):
|
||||
# No calls yet
|
||||
await fake_schedule.fire_until(
|
||||
datetime.datetime.fromisoformat("2022-04-19 11:55:00+00:00") + offset_delta,
|
||||
@@ -452,9 +227,11 @@ async def test_event_start_trigger_with_offset(
|
||||
datetime.datetime.fromisoformat("2022-04-19 12:05:00+00:00") + offset_delta,
|
||||
)
|
||||
assert calls_data() == [
|
||||
trigger_format.get_expected_call_data(
|
||||
CALENDAR_ENTITY_ID, EVENT_START, event_data
|
||||
)
|
||||
{
|
||||
"platform": "calendar",
|
||||
"event": EVENT_START,
|
||||
"calendar_event": event_data,
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
@@ -463,14 +240,13 @@ async def test_event_end_trigger(
|
||||
calls_data: Callable[[], list[dict[str, Any]]],
|
||||
fake_schedule: FakeSchedule,
|
||||
test_entity: MockCalendarEntity,
|
||||
trigger_format: TriggerFormat,
|
||||
) -> None:
|
||||
"""Test the a calendar trigger based on end time."""
|
||||
event_data = test_entity.create_event(
|
||||
start=datetime.datetime.fromisoformat("2022-04-19 11:00:00+00:00"),
|
||||
end=datetime.datetime.fromisoformat("2022-04-19 12:00:00+00:00"),
|
||||
)
|
||||
async with create_automation(hass, trigger_format, EVENT_END):
|
||||
async with create_automation(hass, EVENT_END):
|
||||
# Event started, nothing should fire yet
|
||||
await fake_schedule.fire_until(
|
||||
datetime.datetime.fromisoformat("2022-04-19 11:10:00+00:00")
|
||||
@@ -482,17 +258,19 @@ async def test_event_end_trigger(
|
||||
datetime.datetime.fromisoformat("2022-04-19 12:10:00+00:00")
|
||||
)
|
||||
assert calls_data() == [
|
||||
trigger_format.get_expected_call_data(
|
||||
CALENDAR_ENTITY_ID, EVENT_END, event_data
|
||||
)
|
||||
{
|
||||
"platform": "calendar",
|
||||
"event": EVENT_END,
|
||||
"calendar_event": event_data,
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("offset_delta"),
|
||||
("offset_str", "offset_delta"),
|
||||
[
|
||||
datetime.timedelta(hours=-1),
|
||||
datetime.timedelta(hours=1),
|
||||
("-01:00", datetime.timedelta(hours=-1)),
|
||||
("+01:00", datetime.timedelta(hours=1)),
|
||||
],
|
||||
)
|
||||
async def test_event_end_trigger_with_offset(
|
||||
@@ -500,15 +278,15 @@ async def test_event_end_trigger_with_offset(
|
||||
calls_data: Callable[[], list[dict[str, Any]]],
|
||||
fake_schedule: FakeSchedule,
|
||||
test_entity: MockCalendarEntity,
|
||||
trigger_format: TriggerFormat,
|
||||
offset_delta: datetime.timedelta,
|
||||
offset_str,
|
||||
offset_delta,
|
||||
) -> None:
|
||||
"""Test the a calendar trigger based on end time with an offset."""
|
||||
event_data = test_entity.create_event(
|
||||
start=datetime.datetime.fromisoformat("2022-04-19 12:00:00+00:00"),
|
||||
end=datetime.datetime.fromisoformat("2022-04-19 12:30:00+00:00"),
|
||||
)
|
||||
async with create_automation(hass, trigger_format, EVENT_END, offset=offset_delta):
|
||||
async with create_automation(hass, EVENT_END, offset=offset_str):
|
||||
# No calls yet
|
||||
await fake_schedule.fire_until(
|
||||
datetime.datetime.fromisoformat("2022-04-19 12:05:00+00:00") + offset_delta,
|
||||
@@ -520,9 +298,11 @@ async def test_event_end_trigger_with_offset(
|
||||
datetime.datetime.fromisoformat("2022-04-19 12:35:00+00:00") + offset_delta,
|
||||
)
|
||||
assert calls_data() == [
|
||||
trigger_format.get_expected_call_data(
|
||||
CALENDAR_ENTITY_ID, EVENT_END, event_data
|
||||
)
|
||||
{
|
||||
"platform": "calendar",
|
||||
"event": EVENT_END,
|
||||
"calendar_event": event_data,
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
@@ -530,14 +310,10 @@ async def test_calendar_trigger_with_no_events(
|
||||
hass: HomeAssistant,
|
||||
calls_data: Callable[[], list[dict[str, Any]]],
|
||||
fake_schedule: FakeSchedule,
|
||||
trigger_format: TriggerFormat,
|
||||
) -> None:
|
||||
"""Test a calendar trigger setup with no events."""
|
||||
|
||||
async with (
|
||||
create_automation(hass, trigger_format, EVENT_START),
|
||||
create_automation(hass, trigger_format, EVENT_END),
|
||||
):
|
||||
async with create_automation(hass, EVENT_START), create_automation(hass, EVENT_END):
|
||||
# No calls, at arbitrary times
|
||||
await fake_schedule.fire_until(
|
||||
datetime.datetime.fromisoformat("2022-04-19 11:00:00+00:00")
|
||||
@@ -550,7 +326,6 @@ async def test_multiple_start_events(
|
||||
calls_data: Callable[[], list[dict[str, Any]]],
|
||||
fake_schedule: FakeSchedule,
|
||||
test_entity: MockCalendarEntity,
|
||||
trigger_format: TriggerFormat,
|
||||
) -> None:
|
||||
"""Test that a trigger fires for multiple events."""
|
||||
|
||||
@@ -562,17 +337,21 @@ async def test_multiple_start_events(
|
||||
start=datetime.datetime.fromisoformat("2022-04-19 11:00:00+00:00"),
|
||||
end=datetime.datetime.fromisoformat("2022-04-19 11:15:00+00:00"),
|
||||
)
|
||||
async with create_automation(hass, trigger_format, EVENT_START):
|
||||
async with create_automation(hass, EVENT_START):
|
||||
await fake_schedule.fire_until(
|
||||
datetime.datetime.fromisoformat("2022-04-19 11:30:00+00:00")
|
||||
)
|
||||
assert calls_data() == [
|
||||
trigger_format.get_expected_call_data(
|
||||
CALENDAR_ENTITY_ID, EVENT_START, event_data1
|
||||
),
|
||||
trigger_format.get_expected_call_data(
|
||||
CALENDAR_ENTITY_ID, EVENT_START, event_data2
|
||||
),
|
||||
{
|
||||
"platform": "calendar",
|
||||
"event": EVENT_START,
|
||||
"calendar_event": event_data1,
|
||||
},
|
||||
{
|
||||
"platform": "calendar",
|
||||
"event": EVENT_START,
|
||||
"calendar_event": event_data2,
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
@@ -581,7 +360,6 @@ async def test_multiple_end_events(
|
||||
calls_data: Callable[[], list[dict[str, Any]]],
|
||||
fake_schedule: FakeSchedule,
|
||||
test_entity: MockCalendarEntity,
|
||||
trigger_format: TriggerFormat,
|
||||
) -> None:
|
||||
"""Test that a trigger fires for multiple events."""
|
||||
|
||||
@@ -593,18 +371,22 @@ async def test_multiple_end_events(
|
||||
start=datetime.datetime.fromisoformat("2022-04-19 11:00:00+00:00"),
|
||||
end=datetime.datetime.fromisoformat("2022-04-19 11:15:00+00:00"),
|
||||
)
|
||||
async with create_automation(hass, trigger_format, EVENT_END):
|
||||
async with create_automation(hass, EVENT_END):
|
||||
await fake_schedule.fire_until(
|
||||
datetime.datetime.fromisoformat("2022-04-19 11:30:00+00:00")
|
||||
)
|
||||
|
||||
assert calls_data() == [
|
||||
trigger_format.get_expected_call_data(
|
||||
CALENDAR_ENTITY_ID, EVENT_END, event_data1
|
||||
),
|
||||
trigger_format.get_expected_call_data(
|
||||
CALENDAR_ENTITY_ID, EVENT_END, event_data2
|
||||
),
|
||||
{
|
||||
"platform": "calendar",
|
||||
"event": EVENT_END,
|
||||
"calendar_event": event_data1,
|
||||
},
|
||||
{
|
||||
"platform": "calendar",
|
||||
"event": EVENT_END,
|
||||
"calendar_event": event_data2,
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
@@ -613,7 +395,6 @@ async def test_multiple_events_sharing_start_time(
|
||||
calls_data: Callable[[], list[dict[str, Any]]],
|
||||
fake_schedule: FakeSchedule,
|
||||
test_entity: MockCalendarEntity,
|
||||
trigger_format: TriggerFormat,
|
||||
) -> None:
|
||||
"""Test that a trigger fires for every event sharing a start time."""
|
||||
|
||||
@@ -625,18 +406,22 @@ async def test_multiple_events_sharing_start_time(
|
||||
start=datetime.datetime.fromisoformat("2022-04-19 11:00:00+00:00"),
|
||||
end=datetime.datetime.fromisoformat("2022-04-19 11:30:00+00:00"),
|
||||
)
|
||||
async with create_automation(hass, trigger_format, EVENT_START):
|
||||
async with create_automation(hass, EVENT_START):
|
||||
await fake_schedule.fire_until(
|
||||
datetime.datetime.fromisoformat("2022-04-19 11:35:00+00:00")
|
||||
)
|
||||
|
||||
assert calls_data() == [
|
||||
trigger_format.get_expected_call_data(
|
||||
CALENDAR_ENTITY_ID, EVENT_START, event_data1
|
||||
),
|
||||
trigger_format.get_expected_call_data(
|
||||
CALENDAR_ENTITY_ID, EVENT_START, event_data2
|
||||
),
|
||||
{
|
||||
"platform": "calendar",
|
||||
"event": EVENT_START,
|
||||
"calendar_event": event_data1,
|
||||
},
|
||||
{
|
||||
"platform": "calendar",
|
||||
"event": EVENT_START,
|
||||
"calendar_event": event_data2,
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
@@ -645,7 +430,6 @@ async def test_overlap_events(
|
||||
calls_data: Callable[[], list[dict[str, Any]]],
|
||||
fake_schedule: FakeSchedule,
|
||||
test_entity: MockCalendarEntity,
|
||||
trigger_format: TriggerFormat,
|
||||
) -> None:
|
||||
"""Test that a trigger fires for events that overlap."""
|
||||
|
||||
@@ -657,18 +441,22 @@ async def test_overlap_events(
|
||||
start=datetime.datetime.fromisoformat("2022-04-19 11:15:00+00:00"),
|
||||
end=datetime.datetime.fromisoformat("2022-04-19 11:45:00+00:00"),
|
||||
)
|
||||
async with create_automation(hass, trigger_format, EVENT_START):
|
||||
async with create_automation(hass, EVENT_START):
|
||||
await fake_schedule.fire_until(
|
||||
datetime.datetime.fromisoformat("2022-04-19 11:20:00+00:00")
|
||||
)
|
||||
|
||||
assert calls_data() == [
|
||||
trigger_format.get_expected_call_data(
|
||||
CALENDAR_ENTITY_ID, EVENT_START, event_data1
|
||||
),
|
||||
trigger_format.get_expected_call_data(
|
||||
CALENDAR_ENTITY_ID, EVENT_START, event_data2
|
||||
),
|
||||
{
|
||||
"platform": "calendar",
|
||||
"event": EVENT_START,
|
||||
"calendar_event": event_data1,
|
||||
},
|
||||
{
|
||||
"platform": "calendar",
|
||||
"event": EVENT_START,
|
||||
"calendar_event": event_data2,
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
@@ -719,7 +507,6 @@ async def test_update_next_event(
|
||||
calls_data: Callable[[], list[dict[str, Any]]],
|
||||
fake_schedule: FakeSchedule,
|
||||
test_entity: MockCalendarEntity,
|
||||
trigger_format: TriggerFormat,
|
||||
) -> None:
|
||||
"""Test detection of a new event after initial trigger is setup."""
|
||||
|
||||
@@ -727,7 +514,7 @@ async def test_update_next_event(
|
||||
start=datetime.datetime.fromisoformat("2022-04-19 11:00:00+00:00"),
|
||||
end=datetime.datetime.fromisoformat("2022-04-19 11:15:00+00:00"),
|
||||
)
|
||||
async with create_automation(hass, trigger_format, EVENT_START):
|
||||
async with create_automation(hass, EVENT_START):
|
||||
# No calls before event start
|
||||
await fake_schedule.fire_until(
|
||||
datetime.datetime.fromisoformat("2022-04-19 10:45:00+00:00")
|
||||
@@ -745,12 +532,16 @@ async def test_update_next_event(
|
||||
datetime.datetime.fromisoformat("2022-04-19 11:30:00+00:00")
|
||||
)
|
||||
assert calls_data() == [
|
||||
trigger_format.get_expected_call_data(
|
||||
CALENDAR_ENTITY_ID, EVENT_START, event_data2
|
||||
),
|
||||
trigger_format.get_expected_call_data(
|
||||
CALENDAR_ENTITY_ID, EVENT_START, event_data1
|
||||
),
|
||||
{
|
||||
"platform": "calendar",
|
||||
"event": EVENT_START,
|
||||
"calendar_event": event_data2,
|
||||
},
|
||||
{
|
||||
"platform": "calendar",
|
||||
"event": EVENT_START,
|
||||
"calendar_event": event_data1,
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
@@ -759,7 +550,6 @@ async def test_update_missed(
|
||||
calls_data: Callable[[], list[dict[str, Any]]],
|
||||
fake_schedule: FakeSchedule,
|
||||
test_entity: MockCalendarEntity,
|
||||
trigger_format: TriggerFormat,
|
||||
) -> None:
|
||||
"""Test that new events are missed if they arrive outside the update interval."""
|
||||
|
||||
@@ -767,7 +557,7 @@ async def test_update_missed(
|
||||
start=datetime.datetime.fromisoformat("2022-04-19 11:00:00+00:00"),
|
||||
end=datetime.datetime.fromisoformat("2022-04-19 11:30:00+00:00"),
|
||||
)
|
||||
async with create_automation(hass, trigger_format, EVENT_START):
|
||||
async with create_automation(hass, EVENT_START):
|
||||
# Events are refreshed at t+TEST_UPDATE_INTERVAL minutes. A new event is
|
||||
# added, but the next update happens after the event is already over.
|
||||
await fake_schedule.fire_until(
|
||||
@@ -785,9 +575,11 @@ async def test_update_missed(
|
||||
datetime.datetime.fromisoformat("2022-04-19 11:05:00+00:00")
|
||||
)
|
||||
assert calls_data() == [
|
||||
trigger_format.get_expected_call_data(
|
||||
CALENDAR_ENTITY_ID, EVENT_START, event_data1
|
||||
),
|
||||
{
|
||||
"platform": "calendar",
|
||||
"event": EVENT_START,
|
||||
"calendar_event": event_data1,
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
@@ -849,21 +641,22 @@ async def test_event_payload(
|
||||
fake_schedule: FakeSchedule,
|
||||
test_entity: MockCalendarEntity,
|
||||
set_time_zone: None,
|
||||
trigger_format: TriggerFormat,
|
||||
create_data,
|
||||
fire_time,
|
||||
payload_data,
|
||||
) -> None:
|
||||
"""Test the fields in the calendar event payload are set."""
|
||||
test_entity.create_event(**create_data)
|
||||
async with create_automation(hass, trigger_format, EVENT_START):
|
||||
async with create_automation(hass, EVENT_START):
|
||||
assert len(calls_data()) == 0
|
||||
|
||||
await fake_schedule.fire_until(fire_time)
|
||||
assert calls_data() == [
|
||||
trigger_format.get_expected_call_data(
|
||||
CALENDAR_ENTITY_ID, EVENT_START, payload_data
|
||||
)
|
||||
{
|
||||
"platform": "calendar",
|
||||
"event": EVENT_START,
|
||||
"calendar_event": payload_data,
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
@@ -873,7 +666,6 @@ async def test_trigger_timestamp_window_edge(
|
||||
fake_schedule: FakeSchedule,
|
||||
test_entity: MockCalendarEntity,
|
||||
freezer: FrozenDateTimeFactory,
|
||||
trigger_format: TriggerFormat,
|
||||
) -> None:
|
||||
"""Test that events in the edge of a scan are included."""
|
||||
freezer.move_to("2022-04-19 11:00:00+00:00")
|
||||
@@ -883,16 +675,18 @@ async def test_trigger_timestamp_window_edge(
|
||||
start=datetime.datetime.fromisoformat("2022-04-19 11:14:00+00:00"),
|
||||
end=datetime.datetime.fromisoformat("2022-04-19 11:30:00+00:00"),
|
||||
)
|
||||
async with create_automation(hass, trigger_format, EVENT_START):
|
||||
async with create_automation(hass, EVENT_START):
|
||||
assert len(calls_data()) == 0
|
||||
|
||||
await fake_schedule.fire_until(
|
||||
datetime.datetime.fromisoformat("2022-04-19 11:20:00+00:00")
|
||||
)
|
||||
assert calls_data() == [
|
||||
trigger_format.get_expected_call_data(
|
||||
CALENDAR_ENTITY_ID, EVENT_START, event_data
|
||||
)
|
||||
{
|
||||
"platform": "calendar",
|
||||
"event": EVENT_START,
|
||||
"calendar_event": event_data,
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
@@ -902,7 +696,6 @@ async def test_event_start_trigger_dst(
|
||||
fake_schedule: FakeSchedule,
|
||||
test_entity: MockCalendarEntity,
|
||||
freezer: FrozenDateTimeFactory,
|
||||
trigger_format: TriggerFormat,
|
||||
) -> None:
|
||||
"""Test a calendar event trigger happening at the start of daylight savings time."""
|
||||
await hass.config.async_set_time_zone("America/Los_Angeles")
|
||||
@@ -927,7 +720,7 @@ async def test_event_start_trigger_dst(
|
||||
start=datetime.datetime(2023, 3, 12, 3, 30, tzinfo=tzinfo),
|
||||
end=datetime.datetime(2023, 3, 12, 3, 45, tzinfo=tzinfo),
|
||||
)
|
||||
async with create_automation(hass, trigger_format, EVENT_START):
|
||||
async with create_automation(hass, EVENT_START):
|
||||
assert len(calls_data()) == 0
|
||||
|
||||
await fake_schedule.fire_until(
|
||||
@@ -935,15 +728,21 @@ async def test_event_start_trigger_dst(
|
||||
)
|
||||
|
||||
assert calls_data() == [
|
||||
trigger_format.get_expected_call_data(
|
||||
CALENDAR_ENTITY_ID, EVENT_START, event1_data
|
||||
),
|
||||
trigger_format.get_expected_call_data(
|
||||
CALENDAR_ENTITY_ID, EVENT_START, event2_data
|
||||
),
|
||||
trigger_format.get_expected_call_data(
|
||||
CALENDAR_ENTITY_ID, EVENT_START, event3_data
|
||||
),
|
||||
{
|
||||
"platform": "calendar",
|
||||
"event": EVENT_START,
|
||||
"calendar_event": event1_data,
|
||||
},
|
||||
{
|
||||
"platform": "calendar",
|
||||
"event": EVENT_START,
|
||||
"calendar_event": event2_data,
|
||||
},
|
||||
{
|
||||
"platform": "calendar",
|
||||
"event": EVENT_START,
|
||||
"calendar_event": event3_data,
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
@@ -952,8 +751,8 @@ async def test_config_entry_reload(
|
||||
calls_data: Callable[[], list[dict[str, Any]]],
|
||||
fake_schedule: FakeSchedule,
|
||||
test_entities: list[MockCalendarEntity],
|
||||
setup_platform: None,
|
||||
config_entry: MockConfigEntry,
|
||||
trigger_format: TriggerFormat,
|
||||
) -> None:
|
||||
"""Test the a calendar trigger after a config entry reload.
|
||||
|
||||
@@ -962,7 +761,7 @@ async def test_config_entry_reload(
|
||||
the automation kept a reference to the specific entity which would be
|
||||
invalid after a config entry was reloaded.
|
||||
"""
|
||||
async with create_automation(hass, trigger_format, EVENT_START):
|
||||
async with create_automation(hass, EVENT_START):
|
||||
assert len(calls_data()) == 0
|
||||
|
||||
assert await hass.config_entries.async_reload(config_entry.entry_id)
|
||||
@@ -979,9 +778,11 @@ async def test_config_entry_reload(
|
||||
)
|
||||
|
||||
assert calls_data() == [
|
||||
trigger_format.get_expected_call_data(
|
||||
CALENDAR_ENTITY_ID, EVENT_START, event_data
|
||||
)
|
||||
{
|
||||
"platform": "calendar",
|
||||
"event": EVENT_START,
|
||||
"calendar_event": event_data,
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
@@ -990,12 +791,12 @@ async def test_config_entry_unload(
|
||||
calls_data: Callable[[], list[dict[str, Any]]],
|
||||
fake_schedule: FakeSchedule,
|
||||
test_entities: list[MockCalendarEntity],
|
||||
setup_platform: None,
|
||||
config_entry: MockConfigEntry,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
trigger_format: TriggerFormat,
|
||||
) -> None:
|
||||
"""Test an automation that references a calendar entity that is unloaded."""
|
||||
async with create_automation(hass, trigger_format, EVENT_START):
|
||||
async with create_automation(hass, EVENT_START):
|
||||
assert len(calls_data()) == 0
|
||||
|
||||
assert await hass.config_entries.async_unload(config_entry.entry_id)
|
||||
@@ -1005,172 +806,3 @@ async def test_config_entry_unload(
|
||||
)
|
||||
|
||||
assert "Entity does not exist calendar.calendar_2" in caplog.text
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("target_calendars")
|
||||
@pytest.mark.parametrize(
|
||||
(
|
||||
"trigger_target_conf",
|
||||
"first_start_call_data",
|
||||
"first_end_call_data",
|
||||
"second_start_call_data",
|
||||
"second_end_call_data",
|
||||
),
|
||||
[
|
||||
({}, [], [], [], []),
|
||||
(
|
||||
{ATTR_ENTITY_ID: "calendar.calendar_2"},
|
||||
[],
|
||||
[],
|
||||
TARGET_TEST_SECOND_START_CALL_DATA,
|
||||
TARGET_TEST_SECOND_END_CALL_DATA,
|
||||
),
|
||||
(
|
||||
{ATTR_ENTITY_ID: ["calendar.calendar_1", "calendar.calendar_2"]},
|
||||
TARGET_TEST_FIRST_START_CALL_DATA,
|
||||
TARGET_TEST_FIRST_END_CALL_DATA,
|
||||
TARGET_TEST_SECOND_START_CALL_DATA,
|
||||
TARGET_TEST_SECOND_END_CALL_DATA,
|
||||
),
|
||||
(
|
||||
{ATTR_AREA_ID: "area_both_calendars"},
|
||||
TARGET_TEST_FIRST_START_CALL_DATA,
|
||||
TARGET_TEST_FIRST_END_CALL_DATA,
|
||||
TARGET_TEST_SECOND_START_CALL_DATA,
|
||||
TARGET_TEST_SECOND_END_CALL_DATA,
|
||||
),
|
||||
(
|
||||
{ATTR_LABEL_ID: "calendar_1_label"},
|
||||
TARGET_TEST_FIRST_START_CALL_DATA,
|
||||
TARGET_TEST_FIRST_END_CALL_DATA,
|
||||
[],
|
||||
[],
|
||||
),
|
||||
(
|
||||
{ATTR_DEVICE_ID: "device_calendar_1"},
|
||||
TARGET_TEST_FIRST_START_CALL_DATA,
|
||||
TARGET_TEST_FIRST_END_CALL_DATA,
|
||||
[],
|
||||
[],
|
||||
),
|
||||
(
|
||||
{ATTR_DEVICE_ID: "device_calendar_2"},
|
||||
[],
|
||||
[],
|
||||
TARGET_TEST_SECOND_START_CALL_DATA,
|
||||
TARGET_TEST_SECOND_END_CALL_DATA,
|
||||
),
|
||||
(
|
||||
{ATTR_LABEL_ID: "label_on_devices"},
|
||||
TARGET_TEST_FIRST_START_CALL_DATA,
|
||||
TARGET_TEST_FIRST_END_CALL_DATA,
|
||||
TARGET_TEST_SECOND_START_CALL_DATA,
|
||||
TARGET_TEST_SECOND_END_CALL_DATA,
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_trigger_with_targets(
|
||||
hass: HomeAssistant,
|
||||
calls_data: Callable[[], list[dict[str, Any]]],
|
||||
fake_schedule: FakeSchedule,
|
||||
test_entities: list[MockCalendarEntity],
|
||||
trigger_target_conf: dict[str, Any],
|
||||
first_start_call_data: list[dict[str, Any]],
|
||||
first_end_call_data: list[dict[str, Any]],
|
||||
second_start_call_data: list[dict[str, Any]],
|
||||
second_end_call_data: list[dict[str, Any]],
|
||||
) -> None:
|
||||
"""Test that triggers fire for multiple calendar entities with target selector."""
|
||||
calendar_1 = test_entities[0]
|
||||
calendar_2 = test_entities[1]
|
||||
|
||||
calendar_1.create_event(
|
||||
start=datetime.datetime.fromisoformat("2022-04-19 11:00:00+00:00"),
|
||||
end=datetime.datetime.fromisoformat("2022-04-19 11:30:00+00:00"),
|
||||
summary="Event on Calendar 1",
|
||||
)
|
||||
calendar_2.create_event(
|
||||
start=datetime.datetime.fromisoformat("2022-04-19 11:15:00+00:00"),
|
||||
end=datetime.datetime.fromisoformat("2022-04-19 11:45:00+00:00"),
|
||||
summary="Event on Calendar 2",
|
||||
)
|
||||
|
||||
trigger_start = {
|
||||
CONF_PLATFORM: "calendar.event_started",
|
||||
CONF_TARGET: {**trigger_target_conf},
|
||||
}
|
||||
trigger_end = {
|
||||
CONF_PLATFORM: "calendar.event_ended",
|
||||
CONF_TARGET: {**trigger_target_conf},
|
||||
}
|
||||
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
automation.DOMAIN,
|
||||
{
|
||||
automation.DOMAIN: [
|
||||
{
|
||||
"alias": "start_trigger",
|
||||
"trigger": trigger_start,
|
||||
"action": TEST_AUTOMATION_ACTION,
|
||||
"mode": "queued",
|
||||
},
|
||||
{
|
||||
"alias": "end_trigger",
|
||||
"trigger": trigger_end,
|
||||
"action": TEST_AUTOMATION_ACTION,
|
||||
"mode": "queued",
|
||||
},
|
||||
]
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(calls_data()) == 0
|
||||
|
||||
# Advance past first event start
|
||||
await fake_schedule.fire_until(
|
||||
datetime.datetime.fromisoformat("2022-04-19 11:10:00+00:00")
|
||||
)
|
||||
assert calls_data() == first_start_call_data
|
||||
|
||||
# Advance past second event start
|
||||
await fake_schedule.fire_until(
|
||||
datetime.datetime.fromisoformat("2022-04-19 11:20:00+00:00")
|
||||
)
|
||||
assert calls_data() == first_start_call_data + second_start_call_data
|
||||
|
||||
# Advance past first event end
|
||||
await fake_schedule.fire_until(
|
||||
datetime.datetime.fromisoformat("2022-04-19 11:40:00+00:00")
|
||||
)
|
||||
assert (
|
||||
calls_data()
|
||||
== first_start_call_data + second_start_call_data + first_end_call_data
|
||||
)
|
||||
|
||||
# Advance past second event end
|
||||
await fake_schedule.fire_until(
|
||||
datetime.datetime.fromisoformat("2022-04-19 11:50:00+00:00")
|
||||
)
|
||||
assert (
|
||||
calls_data()
|
||||
== first_start_call_data
|
||||
+ second_start_call_data
|
||||
+ first_end_call_data
|
||||
+ second_end_call_data
|
||||
)
|
||||
|
||||
# Disable automations to cleanup lingering timers
|
||||
await hass.services.async_call(
|
||||
automation.DOMAIN,
|
||||
SERVICE_TURN_OFF,
|
||||
{ATTR_ENTITY_ID: "automation.start_trigger"},
|
||||
blocking=True,
|
||||
)
|
||||
await hass.services.async_call(
|
||||
automation.DOMAIN,
|
||||
SERVICE_TURN_OFF,
|
||||
{ATTR_ENTITY_ID: "automation.end_trigger"},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
@@ -50,7 +50,7 @@ async def test_user_flow_success(
|
||||
# Submit SMS code
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{CONF_SMS_CODE: "0123456"},
|
||||
{CONF_SMS_CODE: 123456},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
@@ -107,7 +107,7 @@ async def test_user_flow_request_sms_code_errors(
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{CONF_SMS_CODE: "0123456"},
|
||||
{CONF_SMS_CODE: 123456},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
@@ -142,7 +142,7 @@ async def test_user_flow_verify_phone_number_errors(
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{CONF_SMS_CODE: "999999"},
|
||||
{CONF_SMS_CODE: 999999},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
@@ -153,7 +153,7 @@ async def test_user_flow_verify_phone_number_errors(
|
||||
mock_auth_client.verify_phone_number.side_effect = None
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{CONF_SMS_CODE: "0123456"},
|
||||
{CONF_SMS_CODE: 123456},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
@@ -246,7 +246,7 @@ async def test_reauth_reconfigure_flow(
|
||||
# Submit SMS code
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{CONF_SMS_CODE: "0123456"},
|
||||
{CONF_SMS_CODE: 123456},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
@@ -311,7 +311,7 @@ async def test_reauth_reconfigure_flow_invalid_phone_number(
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{CONF_SMS_CODE: "0123456"},
|
||||
{CONF_SMS_CODE: 123456},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
@@ -358,7 +358,7 @@ async def test_reauth_reconfigure_flow_invalid_sms_code(
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{CONF_SMS_CODE: "999999"},
|
||||
{CONF_SMS_CODE: 999999},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
@@ -369,7 +369,7 @@ async def test_reauth_reconfigure_flow_invalid_sms_code(
|
||||
mock_auth_client.verify_phone_number.side_effect = None
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{CONF_SMS_CODE: "0123456"},
|
||||
{CONF_SMS_CODE: 123456},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
@@ -436,7 +436,7 @@ async def test_reauth_reconfigure_flow_invalid_user_id(
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{CONF_SMS_CODE: "0123456"},
|
||||
{CONF_SMS_CODE: 123456},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
"""Common fixtures for the Hikvision tests."""
|
||||
|
||||
from collections.abc import AsyncGenerator, Generator
|
||||
from unittest.mock import MagicMock, patch
|
||||
from collections.abc import Generator
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.hikvision import PLATFORMS
|
||||
from homeassistant.components.hikvision.const import DOMAIN
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
@@ -13,7 +12,6 @@ from homeassistant.const import (
|
||||
CONF_PORT,
|
||||
CONF_SSL,
|
||||
CONF_USERNAME,
|
||||
Platform,
|
||||
)
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
@@ -27,20 +25,7 @@ TEST_DEVICE_NAME = "Front Camera"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def platforms() -> list[Platform]:
|
||||
"""Platforms, which should be loaded during the test."""
|
||||
return PLATFORMS
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
async def mock_patch_platforms(platforms: list[Platform]) -> AsyncGenerator[None]:
|
||||
"""Fixture to set up platforms for tests."""
|
||||
with patch(f"homeassistant.components.{DOMAIN}.PLATFORMS", platforms):
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_setup_entry() -> Generator[MagicMock]:
|
||||
def mock_setup_entry() -> Generator[AsyncMock]:
|
||||
"""Override async_setup_entry."""
|
||||
with patch(
|
||||
"homeassistant.components.hikvision.async_setup_entry", return_value=True
|
||||
@@ -73,6 +58,7 @@ def mock_hikcamera() -> Generator[MagicMock]:
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.hikvision.HikCamera",
|
||||
autospec=True,
|
||||
) as hikcamera_mock,
|
||||
patch(
|
||||
"homeassistant.components.hikvision.config_flow.HikCamera",
|
||||
@@ -94,15 +80,6 @@ def mock_hikcamera() -> Generator[MagicMock]:
|
||||
"2024-01-01T00:00:00Z",
|
||||
)
|
||||
camera.get_event_triggers.return_value = {}
|
||||
|
||||
# pyHik 0.4.0 methods
|
||||
camera.get_channels.return_value = [1]
|
||||
camera.get_snapshot.return_value = b"fake_image_data"
|
||||
camera.get_stream_url.return_value = (
|
||||
f"rtsp://{TEST_USERNAME}:{TEST_PASSWORD}"
|
||||
f"@{TEST_HOST}:554/Streaming/Channels/1"
|
||||
)
|
||||
|
||||
yield hikcamera_mock
|
||||
|
||||
|
||||
|
||||
@@ -1,154 +0,0 @@
|
||||
# serializer version: 1
|
||||
# name: test_all_entities[camera.front_camera-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'camera',
|
||||
'entity_category': None,
|
||||
'entity_id': 'camera.front_camera',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': None,
|
||||
'platform': 'hikvision',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': <CameraEntityFeature: 2>,
|
||||
'translation_key': None,
|
||||
'unique_id': 'DS-2CD2142FWD-I20170101AAAA_1',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[camera.front_camera-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'access_token': '1caab5c3b3',
|
||||
'entity_picture': '/api/camera_proxy/camera.front_camera?token=1caab5c3b3',
|
||||
'friendly_name': 'Front Camera',
|
||||
'supported_features': <CameraEntityFeature: 2>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'camera.front_camera',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'idle',
|
||||
})
|
||||
# ---
|
||||
# name: test_nvr_entities[camera.front_camera_channel_1-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'camera',
|
||||
'entity_category': None,
|
||||
'entity_id': 'camera.front_camera_channel_1',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': None,
|
||||
'platform': 'hikvision',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': <CameraEntityFeature: 2>,
|
||||
'translation_key': None,
|
||||
'unique_id': 'DS-2CD2142FWD-I20170101AAAA_1',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_nvr_entities[camera.front_camera_channel_1-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'access_token': '1caab5c3b3',
|
||||
'entity_picture': '/api/camera_proxy/camera.front_camera_channel_1?token=1caab5c3b3',
|
||||
'friendly_name': 'Front Camera Channel 1',
|
||||
'supported_features': <CameraEntityFeature: 2>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'camera.front_camera_channel_1',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'idle',
|
||||
})
|
||||
# ---
|
||||
# name: test_nvr_entities[camera.front_camera_channel_2-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'camera',
|
||||
'entity_category': None,
|
||||
'entity_id': 'camera.front_camera_channel_2',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': None,
|
||||
'platform': 'hikvision',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': <CameraEntityFeature: 2>,
|
||||
'translation_key': None,
|
||||
'unique_id': 'DS-2CD2142FWD-I20170101AAAA_2',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_nvr_entities[camera.front_camera_channel_2-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'access_token': '1caab5c3b3',
|
||||
'entity_picture': '/api/camera_proxy/camera.front_camera_channel_2?token=1caab5c3b3',
|
||||
'friendly_name': 'Front Camera Channel 2',
|
||||
'supported_features': <CameraEntityFeature: 2>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'camera.front_camera_channel_2',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'idle',
|
||||
})
|
||||
# ---
|
||||
@@ -17,7 +17,6 @@ from homeassistant.const import (
|
||||
CONF_SSL,
|
||||
CONF_USERNAME,
|
||||
STATE_OFF,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
|
||||
from homeassistant.helpers import (
|
||||
@@ -40,12 +39,6 @@ from .conftest import (
|
||||
from tests.common import MockConfigEntry, snapshot_platform
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def platforms() -> list[Platform]:
|
||||
"""Platforms, which should be loaded during the test."""
|
||||
return [Platform.BINARY_SENSOR]
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
async def test_all_entities(
|
||||
hass: HomeAssistant,
|
||||
@@ -139,11 +132,11 @@ async def test_binary_sensor_nvr_device(
|
||||
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
|
||||
# NVR sensors are on per-channel devices
|
||||
state = hass.states.get("binary_sensor.front_camera_channel_1_motion")
|
||||
# NVR sensors should include channel number in name
|
||||
state = hass.states.get("binary_sensor.front_camera_motion_1")
|
||||
assert state is not None
|
||||
|
||||
state = hass.states.get("binary_sensor.front_camera_channel_2_motion")
|
||||
state = hass.states.get("binary_sensor.front_camera_motion_2")
|
||||
assert state is not None
|
||||
|
||||
|
||||
|
||||
@@ -1,165 +0,0 @@
|
||||
"""Test Hikvision cameras."""
|
||||
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
|
||||
from homeassistant.components.camera import async_get_image, async_get_stream_source
|
||||
from homeassistant.components.hikvision.const import DOMAIN
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
|
||||
from . import setup_integration
|
||||
from .conftest import TEST_DEVICE_ID, TEST_DEVICE_NAME, TEST_HOST, TEST_PASSWORD
|
||||
|
||||
from tests.common import MockConfigEntry, snapshot_platform
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def platforms() -> list[Platform]:
|
||||
"""Return platforms to load during test."""
|
||||
return [Platform.CAMERA]
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
async def test_all_entities(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_hikcamera: MagicMock,
|
||||
entity_registry: er.EntityRegistry,
|
||||
snapshot: SnapshotAssertion,
|
||||
) -> None:
|
||||
"""Test all camera entities."""
|
||||
with patch("random.SystemRandom.getrandbits", return_value=123123123123):
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
|
||||
await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
async def test_nvr_entities(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_hikcamera: MagicMock,
|
||||
entity_registry: er.EntityRegistry,
|
||||
snapshot: SnapshotAssertion,
|
||||
) -> None:
|
||||
"""Test NVR camera entities with multiple channels."""
|
||||
mock_hikcamera.return_value.get_type = "NVR"
|
||||
mock_hikcamera.return_value.get_channels.return_value = [1, 2]
|
||||
|
||||
with patch("random.SystemRandom.getrandbits", return_value=123123123123):
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
|
||||
await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id)
|
||||
|
||||
|
||||
async def test_camera_device_info(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_hikcamera: MagicMock,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
) -> None:
|
||||
"""Test camera is linked to device."""
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
|
||||
device_entry = device_registry.async_get_device(
|
||||
identifiers={(DOMAIN, TEST_DEVICE_ID)}
|
||||
)
|
||||
assert device_entry is not None
|
||||
assert device_entry.name == TEST_DEVICE_NAME
|
||||
assert device_entry.manufacturer == "Hikvision"
|
||||
assert device_entry.model == "Camera"
|
||||
|
||||
|
||||
async def test_camera_no_channels_creates_single_camera(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_hikcamera: MagicMock,
|
||||
) -> None:
|
||||
"""Test camera created when device returns no channels."""
|
||||
mock_hikcamera.return_value.get_channels.return_value = []
|
||||
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
|
||||
# Single camera should be created for channel 1
|
||||
states = hass.states.async_entity_ids("camera")
|
||||
assert len(states) == 1
|
||||
|
||||
state = hass.states.get("camera.front_camera")
|
||||
assert state is not None
|
||||
|
||||
|
||||
async def test_camera_image(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_hikcamera: MagicMock,
|
||||
) -> None:
|
||||
"""Test getting camera image."""
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
|
||||
image = await async_get_image(hass, "camera.front_camera")
|
||||
assert image.content == b"fake_image_data"
|
||||
|
||||
# Verify get_snapshot was called with channel 1
|
||||
mock_hikcamera.return_value.get_snapshot.assert_called_with(1)
|
||||
|
||||
|
||||
async def test_camera_image_error(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_hikcamera: MagicMock,
|
||||
) -> None:
|
||||
"""Test camera image error handling."""
|
||||
mock_hikcamera.return_value.get_snapshot.side_effect = Exception("Connection error")
|
||||
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
|
||||
with pytest.raises(HomeAssistantError, match="Error getting image"):
|
||||
await async_get_image(hass, "camera.front_camera")
|
||||
|
||||
|
||||
async def test_camera_stream_source(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_hikcamera: MagicMock,
|
||||
) -> None:
|
||||
"""Test camera stream source URL."""
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
|
||||
stream_url = await async_get_stream_source(hass, "camera.front_camera")
|
||||
|
||||
# Verify RTSP URL from library
|
||||
assert stream_url is not None
|
||||
assert stream_url.startswith("rtsp://")
|
||||
assert f"@{TEST_HOST}:554/Streaming/Channels/1" in stream_url
|
||||
|
||||
# Verify get_stream_url was called with channel 1
|
||||
mock_hikcamera.return_value.get_stream_url.assert_called_with(1)
|
||||
|
||||
|
||||
async def test_camera_stream_source_nvr(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_hikcamera: MagicMock,
|
||||
) -> None:
|
||||
"""Test NVR camera stream source URL."""
|
||||
mock_hikcamera.return_value.get_type = "NVR"
|
||||
mock_hikcamera.return_value.get_channels.return_value = [2]
|
||||
mock_hikcamera.return_value.get_stream_url.return_value = (
|
||||
f"rtsp://admin:{TEST_PASSWORD}@{TEST_HOST}:554/Streaming/Channels/201"
|
||||
)
|
||||
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
|
||||
stream_url = await async_get_stream_source(hass, "camera.front_camera_channel_2")
|
||||
|
||||
# NVR channel 2 should use stream channel 201
|
||||
assert stream_url is not None
|
||||
assert f"@{TEST_HOST}:554/Streaming/Channels/201" in stream_url
|
||||
|
||||
# Verify get_stream_url was called with channel 2
|
||||
mock_hikcamera.return_value.get_stream_url.assert_called_with(2)
|
||||
@@ -165,8 +165,8 @@ async def test_light_state_condition_behavior_any(
|
||||
for state in states:
|
||||
for eid in target_switches:
|
||||
set_or_remove_state(hass, eid, state["included"])
|
||||
await hass.async_block_till_done()
|
||||
assert not await has_single_call_after_trigger(hass, service_calls)
|
||||
await hass.async_block_till_done()
|
||||
assert not await has_single_call_after_trigger(hass, service_calls)
|
||||
|
||||
for state in states:
|
||||
included_state = state["included"]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Test the MELCloud ATW zone sensor."""
|
||||
|
||||
from unittest.mock import MagicMock, patch
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -8,45 +8,32 @@ from homeassistant.components.melcloud.sensor import ATW_ZONE_SENSORS, AtwZoneSe
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_coordinator():
|
||||
"""Mock MELCloud coordinator."""
|
||||
with patch(
|
||||
"homeassistant.components.melcloud.coordinator.MelCloudDeviceUpdateCoordinator"
|
||||
) as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_device(mock_coordinator):
|
||||
def mock_device():
|
||||
"""Mock MELCloud device."""
|
||||
mock = MagicMock()
|
||||
mock.name = "name"
|
||||
mock.device.serial = 1234
|
||||
mock.device.mac = "11:11:11:11:11:11"
|
||||
mock.zone_device_info.return_value = {}
|
||||
mock.coordinator = mock_coordinator
|
||||
return mock
|
||||
with patch("homeassistant.components.melcloud.MelCloudDevice") as mock:
|
||||
mock.name = "name"
|
||||
mock.device.serial = 1234
|
||||
mock.device.mac = "11:11:11:11:11:11"
|
||||
yield mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_zone_1():
|
||||
"""Mock zone 1."""
|
||||
mock = MagicMock()
|
||||
mock.zone_index = 1
|
||||
return mock
|
||||
with patch("pymelcloud.atw_device.Zone") as mock:
|
||||
mock.zone_index = 1
|
||||
yield mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_zone_2():
|
||||
"""Mock zone 2."""
|
||||
mock = MagicMock()
|
||||
mock.zone_index = 2
|
||||
return mock
|
||||
with patch("pymelcloud.atw_device.Zone") as mock:
|
||||
mock.zone_index = 2
|
||||
yield mock
|
||||
|
||||
|
||||
def test_zone_unique_ids(
|
||||
mock_coordinator, mock_device, mock_zone_1, mock_zone_2
|
||||
) -> None:
|
||||
def test_zone_unique_ids(mock_device, mock_zone_1, mock_zone_2) -> None:
|
||||
"""Test unique id generation correctness."""
|
||||
sensor_1 = AtwZoneSensor(
|
||||
mock_device,
|
||||
|
||||
@@ -75,11 +75,7 @@ async def test_form(hass: HomeAssistant, mock_login, mock_get_devices) -> None:
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("error", "reason"),
|
||||
[
|
||||
(ClientError(), "cannot_connect"),
|
||||
(TimeoutError(), "cannot_connect"),
|
||||
(AttributeError(), "invalid_auth"),
|
||||
],
|
||||
[(ClientError(), "cannot_connect"), (TimeoutError(), "cannot_connect")],
|
||||
)
|
||||
async def test_form_errors(
|
||||
hass: HomeAssistant, mock_login, mock_get_devices, error, reason
|
||||
|
||||
@@ -1,52 +0,0 @@
|
||||
"""Common fixtures for the Namecheap DynamicDNS tests."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.namecheapdns.const import DOMAIN
|
||||
from homeassistant.const import CONF_DOMAIN, CONF_HOST, CONF_PASSWORD
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
TEST_HOST = "home"
|
||||
TEST_DOMAIN = "example.com"
|
||||
TEST_PASSWORD = "test-password"
|
||||
|
||||
TEST_USER_INPUT = {
|
||||
CONF_HOST: TEST_HOST,
|
||||
CONF_DOMAIN: TEST_DOMAIN,
|
||||
CONF_PASSWORD: TEST_PASSWORD,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_setup_entry() -> Generator[AsyncMock]:
|
||||
"""Override async_setup_entry."""
|
||||
with patch(
|
||||
"homeassistant.components.namecheapdns.async_setup_entry", return_value=True
|
||||
) as mock_setup_entry:
|
||||
yield mock_setup_entry
|
||||
|
||||
|
||||
@pytest.fixture(name="mock_namecheap")
|
||||
def mock_update_namecheapdns() -> Generator[AsyncMock]:
|
||||
"""Mock update_namecheapdns."""
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.namecheapdns.config_flow.update_namecheapdns",
|
||||
return_value=True,
|
||||
) as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
@pytest.fixture(name="config_entry")
|
||||
def mock_config_entry() -> MockConfigEntry:
|
||||
"""Mock Namecheap Dynamic DNS configuration entry."""
|
||||
return MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
title=f"{TEST_HOST}.{TEST_DOMAIN}",
|
||||
data=TEST_USER_INPUT,
|
||||
entry_id="12345",
|
||||
)
|
||||
@@ -1,142 +0,0 @@
|
||||
"""Test the Namecheap DynamicDNS config flow."""
|
||||
|
||||
from unittest.mock import AsyncMock
|
||||
|
||||
from aiohttp import ClientError
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.namecheapdns.const import DOMAIN
|
||||
from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
from .conftest import TEST_USER_INPUT
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_namecheap")
|
||||
async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None:
|
||||
"""Test we get the form."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["errors"] == {}
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], TEST_USER_INPUT
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "home.example.com"
|
||||
assert result["data"] == TEST_USER_INPUT
|
||||
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("side_effect", "text_error"),
|
||||
[
|
||||
(ValueError, "unknown"),
|
||||
(False, "update_failed"),
|
||||
(ClientError, "cannot_connect"),
|
||||
],
|
||||
)
|
||||
async def test_form_errors(
|
||||
hass: HomeAssistant,
|
||||
mock_setup_entry: AsyncMock,
|
||||
mock_namecheap: AsyncMock,
|
||||
side_effect: Exception | bool,
|
||||
text_error: str,
|
||||
) -> None:
|
||||
"""Test we handle errors."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
|
||||
mock_namecheap.side_effect = [side_effect]
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], TEST_USER_INPUT
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["errors"] == {"base": text_error}
|
||||
|
||||
mock_namecheap.side_effect = None
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], TEST_USER_INPUT
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "home.example.com"
|
||||
assert result["data"] == TEST_USER_INPUT
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_namecheap")
|
||||
async def test_import(
|
||||
hass: HomeAssistant,
|
||||
mock_setup_entry: AsyncMock,
|
||||
issue_registry: ir.IssueRegistry,
|
||||
) -> None:
|
||||
"""Test import flow."""
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_IMPORT},
|
||||
data=TEST_USER_INPUT,
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "home.example.com"
|
||||
assert result["data"] == TEST_USER_INPUT
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
assert issue_registry.async_get_issue(
|
||||
domain=HOMEASSISTANT_DOMAIN,
|
||||
issue_id=f"deprecated_yaml_{DOMAIN}",
|
||||
)
|
||||
|
||||
|
||||
async def test_import_exception(
|
||||
hass: HomeAssistant,
|
||||
mock_setup_entry: AsyncMock,
|
||||
issue_registry: ir.IssueRegistry,
|
||||
mock_namecheap: AsyncMock,
|
||||
) -> None:
|
||||
"""Test import flow failed."""
|
||||
mock_namecheap.side_effect = [False]
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_IMPORT},
|
||||
data=TEST_USER_INPUT,
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "update_failed"
|
||||
|
||||
assert len(mock_setup_entry.mock_calls) == 0
|
||||
|
||||
assert issue_registry.async_get_issue(
|
||||
domain=DOMAIN,
|
||||
issue_id="deprecated_yaml_import_issue_error",
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_namecheap")
|
||||
async def test_init_import_flow(
|
||||
hass: HomeAssistant,
|
||||
mock_setup_entry: AsyncMock,
|
||||
) -> None:
|
||||
"""Test yaml triggers import flow."""
|
||||
|
||||
await async_setup_component(
|
||||
hass,
|
||||
DOMAIN,
|
||||
{DOMAIN: TEST_USER_INPUT},
|
||||
)
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
|
||||
@@ -2,79 +2,74 @@
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
from aiohttp import ClientError
|
||||
from freezegun.api import FrozenDateTimeFactory
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.namecheapdns.const import UPDATE_URL
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.components import namecheapdns
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.setup import async_setup_component
|
||||
from homeassistant.util.dt import utcnow
|
||||
|
||||
from .conftest import TEST_USER_INPUT
|
||||
|
||||
from tests.common import MockConfigEntry, async_fire_time_changed
|
||||
from tests.common import async_fire_time_changed
|
||||
from tests.test_util.aiohttp import AiohttpClientMocker
|
||||
|
||||
HOST = "test"
|
||||
DOMAIN = "bla"
|
||||
PASSWORD = "abcdefgh"
|
||||
|
||||
@pytest.mark.freeze_time
|
||||
async def test_setup(
|
||||
hass: HomeAssistant,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
config_entry: MockConfigEntry,
|
||||
freezer: FrozenDateTimeFactory,
|
||||
|
||||
@pytest.fixture
|
||||
async def setup_namecheapdns(
|
||||
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test setup works if update passes."""
|
||||
"""Fixture that sets up NamecheapDNS."""
|
||||
aioclient_mock.get(
|
||||
UPDATE_URL,
|
||||
params=TEST_USER_INPUT,
|
||||
namecheapdns.UPDATE_URL,
|
||||
params={"host": HOST, "domain": DOMAIN, "password": PASSWORD},
|
||||
text="<interface-response><ErrCount>0</ErrCount></interface-response>",
|
||||
)
|
||||
|
||||
config_entry.add_to_hass(hass)
|
||||
assert await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
await async_setup_component(
|
||||
hass,
|
||||
namecheapdns.DOMAIN,
|
||||
{"namecheapdns": {"host": HOST, "domain": DOMAIN, "password": PASSWORD}},
|
||||
)
|
||||
|
||||
assert config_entry.state is ConfigEntryState.LOADED
|
||||
|
||||
async def test_setup(hass: HomeAssistant, aioclient_mock: AiohttpClientMocker) -> None:
|
||||
"""Test setup works if update passes."""
|
||||
aioclient_mock.get(
|
||||
namecheapdns.UPDATE_URL,
|
||||
params={"host": HOST, "domain": DOMAIN, "password": PASSWORD},
|
||||
text="<interface-response><ErrCount>0</ErrCount></interface-response>",
|
||||
)
|
||||
|
||||
result = await async_setup_component(
|
||||
hass,
|
||||
namecheapdns.DOMAIN,
|
||||
{"namecheapdns": {"host": HOST, "domain": DOMAIN, "password": PASSWORD}},
|
||||
)
|
||||
assert result
|
||||
assert aioclient_mock.call_count == 1
|
||||
|
||||
freezer.tick(timedelta(minutes=5))
|
||||
async_fire_time_changed(hass)
|
||||
async_fire_time_changed(hass, utcnow() + timedelta(minutes=5))
|
||||
await hass.async_block_till_done()
|
||||
assert aioclient_mock.call_count == 2
|
||||
|
||||
|
||||
@pytest.mark.freeze_time
|
||||
async def test_setup_fails_if_update_fails(
|
||||
hass: HomeAssistant,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
config_entry: MockConfigEntry,
|
||||
freezer: FrozenDateTimeFactory,
|
||||
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
|
||||
) -> None:
|
||||
"""Test setup fails if first update fails."""
|
||||
aioclient_mock.get(
|
||||
UPDATE_URL,
|
||||
params=TEST_USER_INPUT,
|
||||
namecheapdns.UPDATE_URL,
|
||||
params={"host": HOST, "domain": DOMAIN, "password": PASSWORD},
|
||||
text="<interface-response><ErrCount>1</ErrCount></interface-response>",
|
||||
)
|
||||
|
||||
config_entry.add_to_hass(hass)
|
||||
await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert config_entry.state is ConfigEntryState.SETUP_RETRY
|
||||
|
||||
assert aioclient_mock.call_count == 1
|
||||
|
||||
aioclient_mock.clear_requests()
|
||||
aioclient_mock.get(
|
||||
UPDATE_URL,
|
||||
params=TEST_USER_INPUT,
|
||||
exc=ClientError,
|
||||
result = await async_setup_component(
|
||||
hass,
|
||||
namecheapdns.DOMAIN,
|
||||
{"namecheapdns": {"host": HOST, "domain": DOMAIN, "password": PASSWORD}},
|
||||
)
|
||||
|
||||
freezer.tick(timedelta(minutes=5))
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert config_entry.state is ConfigEntryState.SETUP_RETRY
|
||||
assert not result
|
||||
assert aioclient_mock.call_count == 1
|
||||
|
||||
@@ -82,9 +82,6 @@
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
'sensor.private': dict({
|
||||
'suggested_unit_of_measurement': <UnitOfTime.MINUTES: 'min'>,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.DURATION: 'duration'>,
|
||||
'original_icon': None,
|
||||
@@ -301,9 +298,6 @@
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
'sensor.private': dict({
|
||||
'suggested_unit_of_measurement': <UnitOfEnergy.KILO_WATT_HOUR: 'kWh'>,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENERGY: 'energy'>,
|
||||
'original_icon': None,
|
||||
@@ -360,9 +354,6 @@
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 2,
|
||||
}),
|
||||
'sensor.private': dict({
|
||||
'suggested_unit_of_measurement': <UnitOfEnergy.KILO_WATT_HOUR: 'kWh'>,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.ENERGY: 'energy'>,
|
||||
'original_icon': None,
|
||||
|
||||
@@ -3,11 +3,11 @@
|
||||
from ipaddress import ip_address
|
||||
from unittest.mock import AsyncMock, MagicMock
|
||||
|
||||
from openevsehttp.exceptions import AuthenticationError, MissingSerial
|
||||
from openevsehttp.exceptions import MissingSerial
|
||||
|
||||
from homeassistant.components.openevse.const import DOMAIN
|
||||
from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER, SOURCE_ZEROCONF
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.const import CONF_HOST
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
@@ -22,17 +22,21 @@ async def test_user_flow(
|
||||
) -> None:
|
||||
"""Test user flow create entry with bad charger."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_USER},
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "user"
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], {CONF_HOST: "10.0.0.131"}
|
||||
result["flow_id"],
|
||||
{CONF_HOST: "10.0.0.131"},
|
||||
)
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "OpenEVSE 10.0.0.131"
|
||||
assert result["data"] == {CONF_HOST: "10.0.0.131"}
|
||||
assert result["data"] == {
|
||||
CONF_HOST: "10.0.0.131",
|
||||
}
|
||||
assert result["result"].unique_id == "deadbeeffeed"
|
||||
|
||||
|
||||
@@ -43,25 +47,30 @@ async def test_user_flow_flaky(
|
||||
) -> None:
|
||||
"""Test user flow create entry with flaky charger."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_USER},
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "user"
|
||||
mock_charger.test_and_get.side_effect = TimeoutError
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], {CONF_HOST: "10.0.0.131"}
|
||||
result["flow_id"],
|
||||
{CONF_HOST: "10.0.0.131"},
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "user"
|
||||
assert result["errors"] == {"base": "cannot_connect"}
|
||||
assert result["errors"] == {"host": "cannot_connect"}
|
||||
|
||||
mock_charger.test_and_get.side_effect = None
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], {CONF_HOST: "10.0.0.131"}
|
||||
result["flow_id"],
|
||||
{CONF_HOST: "10.0.0.131"},
|
||||
)
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "OpenEVSE 10.0.0.131"
|
||||
assert result["data"] == {CONF_HOST: "10.0.0.131"}
|
||||
assert result["data"] == {
|
||||
CONF_HOST: "10.0.0.131",
|
||||
}
|
||||
assert result["result"].unique_id == "deadbeeffeed"
|
||||
|
||||
|
||||
@@ -74,67 +83,6 @@ async def test_user_flow_duplicate(
|
||||
"""Test user flow aborts when config entry already exists."""
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "user"
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], {CONF_HOST: "192.168.1.100"}
|
||||
)
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "already_configured"
|
||||
|
||||
|
||||
async def test_user_flow_no_serial(
|
||||
hass: HomeAssistant,
|
||||
mock_charger: MagicMock,
|
||||
mock_setup_entry: AsyncMock,
|
||||
) -> None:
|
||||
"""Test user flow handles missing serial gracefully."""
|
||||
mock_charger.test_and_get.side_effect = [{}, MissingSerial]
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], {CONF_HOST: "10.0.0.131"}
|
||||
)
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "OpenEVSE 10.0.0.131"
|
||||
assert result["result"].unique_id is None
|
||||
|
||||
|
||||
async def test_import_flow_no_serial(
|
||||
hass: HomeAssistant,
|
||||
mock_charger: MagicMock,
|
||||
mock_setup_entry: AsyncMock,
|
||||
) -> None:
|
||||
"""Test import flow handles missing serial gracefully."""
|
||||
mock_charger.test_and_get.side_effect = [{}, MissingSerial]
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_IMPORT}, data={CONF_HOST: "10.0.0.131"}
|
||||
)
|
||||
|
||||
# Assert the flow continued to create the entry
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "OpenEVSE 10.0.0.131"
|
||||
assert result["result"].unique_id is None
|
||||
|
||||
|
||||
async def test_user_flow_with_auth(
|
||||
hass: HomeAssistant,
|
||||
mock_charger: MagicMock,
|
||||
mock_setup_entry: AsyncMock,
|
||||
) -> None:
|
||||
"""Test user flow create entry with authentication."""
|
||||
mock_charger.test_and_get.side_effect = [
|
||||
AuthenticationError,
|
||||
{"serial": "deadbeeffeed"},
|
||||
]
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_USER},
|
||||
@@ -142,90 +90,12 @@ async def test_user_flow_with_auth(
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "user"
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], {CONF_HOST: "10.0.0.131"}
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "auth"
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{CONF_USERNAME: "fakeuser", CONF_PASSWORD: "muchpassword"},
|
||||
{CONF_HOST: "192.168.1.100"},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "OpenEVSE 10.0.0.131"
|
||||
assert result["data"] == {
|
||||
CONF_HOST: "10.0.0.131",
|
||||
CONF_USERNAME: "fakeuser",
|
||||
CONF_PASSWORD: "muchpassword",
|
||||
}
|
||||
assert result["result"].unique_id == "deadbeeffeed"
|
||||
|
||||
|
||||
async def test_user_flow_with_auth_error(
|
||||
hass: HomeAssistant, mock_charger: MagicMock
|
||||
) -> None:
|
||||
"""Test user flow create entry with authentication error."""
|
||||
mock_charger.test_and_get.side_effect = [
|
||||
AuthenticationError,
|
||||
AuthenticationError,
|
||||
{},
|
||||
]
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "user"
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{CONF_HOST: "10.0.0.131"},
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "auth"
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{CONF_USERNAME: "fakeuser", CONF_PASSWORD: "muchpassword"},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["errors"]["base"] == "invalid_auth"
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{CONF_USERNAME: "fakeuser", CONF_PASSWORD: "muchpassword"},
|
||||
)
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
|
||||
|
||||
async def test_user_flow_with_missing_serial(
|
||||
hass: HomeAssistant, mock_charger: MagicMock
|
||||
) -> None:
|
||||
"""Test user flow create entry with authentication error."""
|
||||
mock_charger.test_and_get.side_effect = [AuthenticationError, MissingSerial]
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "user"
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], {CONF_HOST: "10.0.0.131"}
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "auth"
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{CONF_USERNAME: "fakeuser", CONF_PASSWORD: "muchpassword"},
|
||||
)
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "OpenEVSE 10.0.0.131"
|
||||
assert result["data"] == {
|
||||
CONF_HOST: "10.0.0.131",
|
||||
CONF_USERNAME: "fakeuser",
|
||||
CONF_PASSWORD: "muchpassword",
|
||||
}
|
||||
assert result["result"].unique_id is None
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "already_configured"
|
||||
|
||||
|
||||
async def test_import_flow(
|
||||
@@ -239,7 +109,9 @@ async def test_import_flow(
|
||||
)
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "OpenEVSE 10.0.0.131"
|
||||
assert result["data"] == {CONF_HOST: "10.0.0.131"}
|
||||
assert result["data"] == {
|
||||
CONF_HOST: "10.0.0.131",
|
||||
}
|
||||
assert result["result"].unique_id == "deadbeeffeed"
|
||||
|
||||
|
||||
@@ -371,94 +243,7 @@ async def test_zeroconf_connection_error(
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "unavailable_host"
|
||||
|
||||
|
||||
async def test_zeroconf_auth(hass: HomeAssistant, mock_charger: MagicMock) -> None:
|
||||
"""Test zeroconf discovery with connection failure."""
|
||||
mock_charger.test_and_get.side_effect = [AuthenticationError, {}]
|
||||
discovery_info = ZeroconfServiceInfo(
|
||||
ip_address=ip_address("192.168.1.123"),
|
||||
ip_addresses=[ip_address("192.168.1.123"), ip_address("2001:db8::1")],
|
||||
hostname="openevse-deadbeeffeed.local.",
|
||||
name="openevse-deadbeeffeed._openevse._tcp.local.",
|
||||
port=80,
|
||||
properties={"id": "deadbeeffeed", "type": "openevse"},
|
||||
type="_openevse._tcp.local.",
|
||||
)
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_ZEROCONF},
|
||||
data=discovery_info,
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "auth"
|
||||
assert not result["errors"]
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{CONF_USERNAME: "fakeuser", CONF_PASSWORD: "muchpassword"},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["data"] == {
|
||||
CONF_HOST: "192.168.1.123",
|
||||
CONF_USERNAME: "fakeuser",
|
||||
CONF_PASSWORD: "muchpassword",
|
||||
}
|
||||
|
||||
|
||||
async def test_zeroconf_auth_failure(
|
||||
hass: HomeAssistant, mock_charger: MagicMock
|
||||
) -> None:
|
||||
"""Test zeroconf discovery with connection failure."""
|
||||
mock_charger.test_and_get.side_effect = [
|
||||
AuthenticationError,
|
||||
AuthenticationError,
|
||||
{},
|
||||
]
|
||||
discovery_info = ZeroconfServiceInfo(
|
||||
ip_address=ip_address("192.168.1.123"),
|
||||
ip_addresses=[ip_address("192.168.1.123"), ip_address("2001:db8::1")],
|
||||
hostname="openevse-deadbeeffeed.local.",
|
||||
name="openevse-deadbeeffeed._openevse._tcp.local.",
|
||||
port=80,
|
||||
properties={"id": "deadbeeffeed", "type": "openevse"},
|
||||
type="_openevse._tcp.local.",
|
||||
)
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_ZEROCONF},
|
||||
data=discovery_info,
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "auth"
|
||||
assert not result["errors"]
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{CONF_USERNAME: "fakeuser", CONF_PASSWORD: "muchpassword"},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "auth"
|
||||
assert result["errors"] == {"base": "invalid_auth"}
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{CONF_USERNAME: "fakeuser", CONF_PASSWORD: "muchpassword"},
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["data"] == {
|
||||
CONF_HOST: "192.168.1.123",
|
||||
CONF_USERNAME: "fakeuser",
|
||||
CONF_PASSWORD: "muchpassword",
|
||||
}
|
||||
assert result["reason"] == "cannot_connect"
|
||||
|
||||
|
||||
async def test_zeroconf_already_configured_host(
|
||||
@@ -486,3 +271,43 @@ async def test_zeroconf_already_configured_host(
|
||||
# Should abort because the host matches an existing entry
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "already_configured"
|
||||
|
||||
|
||||
async def test_user_flow_no_serial(
|
||||
hass: HomeAssistant,
|
||||
mock_charger: MagicMock,
|
||||
mock_setup_entry: AsyncMock,
|
||||
) -> None:
|
||||
"""Test user flow handles missing serial gracefully."""
|
||||
mock_charger.test_and_get.side_effect = [{}, MissingSerial]
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_USER},
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{CONF_HOST: "10.0.0.131"},
|
||||
)
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "OpenEVSE 10.0.0.131"
|
||||
assert result["result"].unique_id is None
|
||||
|
||||
|
||||
async def test_import_flow_no_serial(
|
||||
hass: HomeAssistant,
|
||||
mock_charger: MagicMock,
|
||||
mock_setup_entry: AsyncMock,
|
||||
) -> None:
|
||||
"""Test import flow handles missing serial gracefully."""
|
||||
mock_charger.test_and_get.side_effect = [{}, MissingSerial]
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_IMPORT}, data={CONF_HOST: "10.0.0.131"}
|
||||
)
|
||||
|
||||
# Assert the flow continued to create the entry
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "OpenEVSE 10.0.0.131"
|
||||
assert result["result"].unique_id is None
|
||||
|
||||
@@ -44,7 +44,7 @@ async def test_disabled_by_default_entities(
|
||||
assert entry.disabled
|
||||
assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION
|
||||
|
||||
state = hass.states.get("sensor.openevse_mock_config_rtc_temperature")
|
||||
state = hass.states.get("sensor.openevse_mock_config_temperature")
|
||||
assert state is None
|
||||
|
||||
entry = entity_registry.async_get("sensor.openevse_mock_config_rtc_temperature")
|
||||
|
||||
@@ -13,7 +13,7 @@ from homeassistant.components.application_credentials import (
|
||||
)
|
||||
from homeassistant.components.recorder import Recorder
|
||||
from homeassistant.components.tibber.const import AUTH_IMPLEMENTATION, DOMAIN
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, Platform
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
@@ -26,77 +26,10 @@ def create_tibber_device(
|
||||
name: str = "Test Device",
|
||||
brand: str = "Tibber",
|
||||
model: str = "Gen1",
|
||||
value: float | None = 72.0,
|
||||
home_id: str = "home-id",
|
||||
state_of_charge: float | None = None,
|
||||
connector_status: str | None = None,
|
||||
charging_status: str | None = None,
|
||||
device_status: str | None = None,
|
||||
) -> tibber.data_api.TibberDevice:
|
||||
"""Create a fake Tibber Data API device.
|
||||
|
||||
Args:
|
||||
device_id: Device ID.
|
||||
external_id: External device ID.
|
||||
name: Device name.
|
||||
brand: Device brand.
|
||||
model: Device model.
|
||||
home_id: Home ID.
|
||||
state_of_charge: Battery state of charge (for regular sensors).
|
||||
connector_status: Connector status (for binary sensors).
|
||||
charging_status: Charging status (for binary sensors).
|
||||
device_status: Device on/off status (for binary sensors).
|
||||
"""
|
||||
capabilities = []
|
||||
|
||||
# Add regular sensor capabilities
|
||||
if state_of_charge is not None:
|
||||
capabilities.append(
|
||||
{
|
||||
"id": "storage.stateOfCharge",
|
||||
"value": state_of_charge,
|
||||
"description": "State of charge",
|
||||
"unit": "%",
|
||||
}
|
||||
)
|
||||
capabilities.append(
|
||||
{
|
||||
"id": "unknown.sensor.id",
|
||||
"value": None,
|
||||
"description": "Unknown",
|
||||
"unit": "",
|
||||
}
|
||||
)
|
||||
|
||||
if connector_status is not None:
|
||||
capabilities.append(
|
||||
{
|
||||
"id": "connector.status",
|
||||
"value": connector_status,
|
||||
"description": "Connector status",
|
||||
"unit": "",
|
||||
}
|
||||
)
|
||||
|
||||
if charging_status is not None:
|
||||
capabilities.append(
|
||||
{
|
||||
"id": "charging.status",
|
||||
"value": charging_status,
|
||||
"description": "Charging status",
|
||||
"unit": "",
|
||||
}
|
||||
)
|
||||
|
||||
if device_status is not None:
|
||||
capabilities.append(
|
||||
{
|
||||
"id": "onOff",
|
||||
"value": device_status,
|
||||
"description": "Device status",
|
||||
"unit": "",
|
||||
}
|
||||
)
|
||||
|
||||
"""Create a fake Tibber Data API device."""
|
||||
device_data = {
|
||||
"id": device_id,
|
||||
"externalId": external_id,
|
||||
@@ -105,7 +38,20 @@ def create_tibber_device(
|
||||
"brand": brand,
|
||||
"model": model,
|
||||
},
|
||||
"capabilities": capabilities,
|
||||
"capabilities": [
|
||||
{
|
||||
"id": "storage.stateOfCharge",
|
||||
"value": value,
|
||||
"description": "State of charge",
|
||||
"unit": "%",
|
||||
},
|
||||
{
|
||||
"id": "unknown.sensor.id",
|
||||
"value": None,
|
||||
"description": "Unknown",
|
||||
"unit": "",
|
||||
},
|
||||
],
|
||||
}
|
||||
return tibber.data_api.TibberDevice(device_data, home_id=home_id)
|
||||
|
||||
@@ -198,16 +144,3 @@ async def setup_credentials(hass: HomeAssistant) -> None:
|
||||
ClientCredential("test-client-id", "test-client-secret"),
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def platforms() -> list[Platform]:
|
||||
"""Fixture to specify platforms to test."""
|
||||
return [Platform.BINARY_SENSOR, Platform.NOTIFY, Platform.SENSOR]
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
async def mock_patch_platforms(platforms: list[Platform]) -> AsyncGenerator[None]:
|
||||
"""Fixture to set up platforms for tests."""
|
||||
with patch(f"homeassistant.components.{DOMAIN}.PLATFORMS", platforms):
|
||||
yield
|
||||
|
||||
@@ -1,148 +0,0 @@
|
||||
# serializer version: 1
|
||||
# name: test_binary_sensor_snapshot[binary_sensor.test_device_charging-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'binary_sensor.test_device_charging',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <BinarySensorDeviceClass.BATTERY_CHARGING: 'battery_charging'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Charging',
|
||||
'platform': 'tibber',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': 'device-id_charging.status',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensor_snapshot[binary_sensor.test_device_charging-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'battery_charging',
|
||||
'friendly_name': 'Test Device Charging',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.test_device_charging',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'on',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensor_snapshot[binary_sensor.test_device_plug-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'binary_sensor.test_device_plug',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <BinarySensorDeviceClass.PLUG: 'plug'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Plug',
|
||||
'platform': 'tibber',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': 'device-id_connector.status',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensor_snapshot[binary_sensor.test_device_plug-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'plug',
|
||||
'friendly_name': 'Test Device Plug',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.test_device_plug',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'on',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensor_snapshot[binary_sensor.test_device_power-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'binary_sensor.test_device_power',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <BinarySensorDeviceClass.POWER: 'power'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Power',
|
||||
'platform': 'tibber',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': 'device-id_onOff',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensor_snapshot[binary_sensor.test_device_power-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'power',
|
||||
'friendly_name': 'Test Device Power',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.test_device_power',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'on',
|
||||
})
|
||||
# ---
|
||||
@@ -1,94 +0,0 @@
|
||||
"""Tests for the Tibber binary sensors."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from unittest.mock import AsyncMock
|
||||
|
||||
import pytest
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
|
||||
from homeassistant.components.recorder import Recorder
|
||||
from homeassistant.const import STATE_OFF, STATE_ON, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from .conftest import create_tibber_device
|
||||
|
||||
from tests.common import MockConfigEntry, snapshot_platform
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def platforms() -> list[Platform]:
|
||||
"""Fixture to specify platforms to test."""
|
||||
return [Platform.BINARY_SENSOR]
|
||||
|
||||
|
||||
async def test_binary_sensor_snapshot(
|
||||
recorder_mock: Recorder,
|
||||
hass: HomeAssistant,
|
||||
config_entry: MockConfigEntry,
|
||||
data_api_client_mock: AsyncMock,
|
||||
setup_credentials: None,
|
||||
entity_registry: er.EntityRegistry,
|
||||
snapshot: SnapshotAssertion,
|
||||
) -> None:
|
||||
"""Test binary sensor entities against snapshot."""
|
||||
device = create_tibber_device(
|
||||
connector_status="connected",
|
||||
charging_status="charging",
|
||||
device_status="on",
|
||||
)
|
||||
data_api_client_mock.get_all_devices = AsyncMock(return_value={"device-id": device})
|
||||
data_api_client_mock.update_devices = AsyncMock(return_value={"device-id": device})
|
||||
|
||||
await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
(
|
||||
"entity_suffix",
|
||||
"connector_status",
|
||||
"charging_status",
|
||||
"device_status",
|
||||
"expected_state",
|
||||
),
|
||||
[
|
||||
("plug", "connected", None, None, STATE_ON),
|
||||
("plug", "disconnected", None, None, STATE_OFF),
|
||||
("charging", None, "charging", None, STATE_ON),
|
||||
("charging", None, "idle", None, STATE_OFF),
|
||||
("power", None, None, "on", STATE_ON),
|
||||
("power", None, None, "off", STATE_OFF),
|
||||
],
|
||||
)
|
||||
async def test_binary_sensor_states(
|
||||
recorder_mock: Recorder,
|
||||
hass: HomeAssistant,
|
||||
config_entry: MockConfigEntry,
|
||||
data_api_client_mock: AsyncMock,
|
||||
setup_credentials: None,
|
||||
entity_suffix: str,
|
||||
connector_status: str | None,
|
||||
charging_status: str | None,
|
||||
device_status: str | None,
|
||||
expected_state: str,
|
||||
) -> None:
|
||||
"""Test binary sensor state values."""
|
||||
device = create_tibber_device(
|
||||
connector_status=connector_status,
|
||||
charging_status=charging_status,
|
||||
device_status=device_status,
|
||||
)
|
||||
data_api_client_mock.get_all_devices = AsyncMock(return_value={"device-id": device})
|
||||
data_api_client_mock.update_devices = AsyncMock(return_value={"device-id": device})
|
||||
|
||||
await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
entity_id = f"binary_sensor.test_device_{entity_suffix}"
|
||||
state = hass.states.get(entity_id)
|
||||
assert state is not None
|
||||
assert state.state == expected_state
|
||||
@@ -24,10 +24,10 @@ async def test_data_api_sensors_are_created(
|
||||
) -> None:
|
||||
"""Ensure Data API sensors are created and expose values from the coordinator."""
|
||||
data_api_client_mock.get_all_devices = AsyncMock(
|
||||
return_value={"device-id": create_tibber_device(state_of_charge=72.0)}
|
||||
return_value={"device-id": create_tibber_device(value=72.0)}
|
||||
)
|
||||
data_api_client_mock.update_devices = AsyncMock(
|
||||
return_value={"device-id": create_tibber_device(state_of_charge=83.0)}
|
||||
return_value={"device-id": create_tibber_device(value=83.0)}
|
||||
)
|
||||
|
||||
await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user