mirror of
https://github.com/home-assistant/core.git
synced 2026-01-23 08:07:00 +01:00
Compare commits
88 Commits
2026.1.0b3
...
rc
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c5b72ac286 | ||
|
|
5f6dce5503 | ||
|
|
6dd8692bb8 | ||
|
|
16f4849e88 | ||
|
|
04809e47f1 | ||
|
|
691cf67f68 | ||
|
|
c2e1646473 | ||
|
|
9ac5560c41 | ||
|
|
68cbdcf3c9 | ||
|
|
6474a1bf63 | ||
|
|
572092d362 | ||
|
|
01ea5a1634 | ||
|
|
b202c8b43e | ||
|
|
79fd98753a | ||
|
|
447da083c0 | ||
|
|
0643b36ed5 | ||
|
|
4ee3ac16af | ||
|
|
6824f38c68 | ||
|
|
e238d67818 | ||
|
|
992a9bdd3b | ||
|
|
ceaae1c1cc | ||
|
|
1c163c92dc | ||
|
|
a42aa9372c | ||
|
|
013592bd54 | ||
|
|
2101bae095 | ||
|
|
cfa1107135 | ||
|
|
a269ef660a | ||
|
|
c43c4f17e9 | ||
|
|
de25e6af51 | ||
|
|
18d3629b6c | ||
|
|
50c477a408 | ||
|
|
ea9cd7d905 | ||
|
|
2bf4ac20ea | ||
|
|
94ff881897 | ||
|
|
2975b3c1b9 | ||
|
|
0143c4ff85 | ||
|
|
f59566d20b | ||
|
|
395f0ad2a7 | ||
|
|
2af1fc6759 | ||
|
|
c1e7122d1c | ||
|
|
e5624b1224 | ||
|
|
6e380bafca | ||
|
|
bb9fd94430 | ||
|
|
07bc5d5c6b | ||
|
|
651b7116dd | ||
|
|
34438bd039 | ||
|
|
7b53b8691c | ||
|
|
8748d6f200 | ||
|
|
8d95511650 | ||
|
|
9aa5953a86 | ||
|
|
5ccdfda747 | ||
|
|
00ad44cb91 | ||
|
|
b7519cd880 | ||
|
|
ac44769539 | ||
|
|
9e95b80805 | ||
|
|
50086ca5c7 | ||
|
|
1f28fe9933 | ||
|
|
4465aa264c | ||
|
|
2c1bc96161 | ||
|
|
7127159a5b | ||
|
|
9f0eb6f077 | ||
|
|
da19cc06e3 | ||
|
|
fd92377cf2 | ||
|
|
c201938b8b | ||
|
|
b3765204b1 | ||
|
|
786257e051 | ||
|
|
9559634151 | ||
|
|
cf12ed8f08 | ||
|
|
e213f49c75 | ||
|
|
09c7cc113a | ||
|
|
e1e7e039a9 | ||
|
|
05a0f0d23f | ||
|
|
d3853019eb | ||
|
|
ccbaac55b3 | ||
|
|
771292ced9 | ||
|
|
5d4262e8b3 | ||
|
|
d96da9a639 | ||
|
|
288a805d0f | ||
|
|
8e55ceea77 | ||
|
|
14f1d9fbad | ||
|
|
eb6582bc24 | ||
|
|
4afe67f33d | ||
|
|
5d7b10f569 | ||
|
|
340c2e48df | ||
|
|
86257b1865 | ||
|
|
eea1adccfd | ||
|
|
242be14f88 | ||
|
|
7e013b723d |
@@ -85,6 +85,22 @@ class AirzoneSystemEntity(AirzoneEntity):
|
||||
value = system[key]
|
||||
return value
|
||||
|
||||
async def _async_update_sys_params(self, params: dict[str, Any]) -> None:
|
||||
"""Send system parameters to API."""
|
||||
_params = {
|
||||
API_SYSTEM_ID: self.system_id,
|
||||
**params,
|
||||
}
|
||||
_LOGGER.debug("update_sys_params=%s", _params)
|
||||
try:
|
||||
await self.coordinator.airzone.set_sys_parameters(_params)
|
||||
except AirzoneError as error:
|
||||
raise HomeAssistantError(
|
||||
f"Failed to set system {self.entity_id}: {error}"
|
||||
) from error
|
||||
|
||||
self.coordinator.async_set_updated_data(self.coordinator.airzone.data())
|
||||
|
||||
|
||||
class AirzoneHotWaterEntity(AirzoneEntity):
|
||||
"""Define an Airzone Hot Water entity."""
|
||||
|
||||
@@ -12,5 +12,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairzone"],
|
||||
"requirements": ["aioairzone==1.0.4"]
|
||||
"requirements": ["aioairzone==1.0.5"]
|
||||
}
|
||||
|
||||
@@ -20,6 +20,7 @@ from aioairzone.const import (
|
||||
AZD_MODES,
|
||||
AZD_Q_ADAPT,
|
||||
AZD_SLEEP,
|
||||
AZD_SYSTEMS,
|
||||
AZD_ZONES,
|
||||
)
|
||||
|
||||
@@ -30,7 +31,7 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import AirzoneConfigEntry, AirzoneUpdateCoordinator
|
||||
from .entity import AirzoneEntity, AirzoneZoneEntity
|
||||
from .entity import AirzoneEntity, AirzoneSystemEntity, AirzoneZoneEntity
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
@@ -85,14 +86,7 @@ def main_zone_options(
|
||||
return [k for k, v in options.items() if v in modes]
|
||||
|
||||
|
||||
MAIN_ZONE_SELECT_TYPES: Final[tuple[AirzoneSelectDescription, ...]] = (
|
||||
AirzoneSelectDescription(
|
||||
api_param=API_MODE,
|
||||
key=AZD_MODE,
|
||||
options_dict=MODE_DICT,
|
||||
options_fn=main_zone_options,
|
||||
translation_key="modes",
|
||||
),
|
||||
SYSTEM_SELECT_TYPES: Final[tuple[AirzoneSelectDescription, ...]] = (
|
||||
AirzoneSelectDescription(
|
||||
api_param=API_Q_ADAPT,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
@@ -104,6 +98,17 @@ MAIN_ZONE_SELECT_TYPES: Final[tuple[AirzoneSelectDescription, ...]] = (
|
||||
)
|
||||
|
||||
|
||||
MAIN_ZONE_SELECT_TYPES: Final[tuple[AirzoneSelectDescription, ...]] = (
|
||||
AirzoneSelectDescription(
|
||||
api_param=API_MODE,
|
||||
key=AZD_MODE,
|
||||
options_dict=MODE_DICT,
|
||||
options_fn=main_zone_options,
|
||||
translation_key="modes",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
ZONE_SELECT_TYPES: Final[tuple[AirzoneSelectDescription, ...]] = (
|
||||
AirzoneSelectDescription(
|
||||
api_param=API_COLD_ANGLE,
|
||||
@@ -140,16 +145,37 @@ async def async_setup_entry(
|
||||
"""Add Airzone select from a config_entry."""
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
added_systems: set[str] = set()
|
||||
added_zones: set[str] = set()
|
||||
|
||||
def _async_entity_listener() -> None:
|
||||
"""Handle additions of select."""
|
||||
|
||||
entities: list[AirzoneBaseSelect] = []
|
||||
|
||||
systems_data = coordinator.data.get(AZD_SYSTEMS, {})
|
||||
received_systems = set(systems_data)
|
||||
new_systems = received_systems - added_systems
|
||||
if new_systems:
|
||||
entities.extend(
|
||||
AirzoneSystemSelect(
|
||||
coordinator,
|
||||
description,
|
||||
entry,
|
||||
system_id,
|
||||
systems_data.get(system_id),
|
||||
)
|
||||
for system_id in new_systems
|
||||
for description in SYSTEM_SELECT_TYPES
|
||||
if description.key in systems_data.get(system_id)
|
||||
)
|
||||
added_systems.update(new_systems)
|
||||
|
||||
zones_data = coordinator.data.get(AZD_ZONES, {})
|
||||
received_zones = set(zones_data)
|
||||
new_zones = received_zones - added_zones
|
||||
if new_zones:
|
||||
entities: list[AirzoneZoneSelect] = [
|
||||
entities.extend(
|
||||
AirzoneZoneSelect(
|
||||
coordinator,
|
||||
description,
|
||||
@@ -161,8 +187,8 @@ async def async_setup_entry(
|
||||
for description in MAIN_ZONE_SELECT_TYPES
|
||||
if description.key in zones_data.get(system_zone_id)
|
||||
and zones_data.get(system_zone_id).get(AZD_MASTER) is True
|
||||
]
|
||||
entities += [
|
||||
)
|
||||
entities.extend(
|
||||
AirzoneZoneSelect(
|
||||
coordinator,
|
||||
description,
|
||||
@@ -173,10 +199,11 @@ async def async_setup_entry(
|
||||
for system_zone_id in new_zones
|
||||
for description in ZONE_SELECT_TYPES
|
||||
if description.key in zones_data.get(system_zone_id)
|
||||
]
|
||||
async_add_entities(entities)
|
||||
)
|
||||
added_zones.update(new_zones)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
entry.async_on_unload(coordinator.async_add_listener(_async_entity_listener))
|
||||
_async_entity_listener()
|
||||
|
||||
@@ -203,6 +230,38 @@ class AirzoneBaseSelect(AirzoneEntity, SelectEntity):
|
||||
self._attr_current_option = self._get_current_option()
|
||||
|
||||
|
||||
class AirzoneSystemSelect(AirzoneSystemEntity, AirzoneBaseSelect):
|
||||
"""Define an Airzone System select."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirzoneUpdateCoordinator,
|
||||
description: AirzoneSelectDescription,
|
||||
entry: ConfigEntry,
|
||||
system_id: str,
|
||||
system_data: dict[str, Any],
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator, entry, system_data)
|
||||
|
||||
self._attr_unique_id = f"{self._attr_unique_id}_{system_id}_{description.key}"
|
||||
self.entity_description = description
|
||||
|
||||
self._attr_options = self.entity_description.options_fn(
|
||||
system_data, description.options_dict
|
||||
)
|
||||
|
||||
self.values_dict = {v: k for k, v in description.options_dict.items()}
|
||||
|
||||
self._async_update_attrs()
|
||||
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Change the selected option."""
|
||||
param = self.entity_description.api_param
|
||||
value = self.entity_description.options_dict[option]
|
||||
await self._async_update_sys_params({param: value})
|
||||
|
||||
|
||||
class AirzoneZoneSelect(AirzoneZoneEntity, AirzoneBaseSelect):
|
||||
"""Define an Airzone Zone select."""
|
||||
|
||||
|
||||
@@ -69,6 +69,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, llm
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.json import json_dumps
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from . import AnthropicConfigEntry
|
||||
@@ -193,7 +194,7 @@ def _convert_content(
|
||||
tool_result_block = ToolResultBlockParam(
|
||||
type="tool_result",
|
||||
tool_use_id=content.tool_call_id,
|
||||
content=json.dumps(content.tool_result),
|
||||
content=json_dumps(content.tool_result),
|
||||
)
|
||||
external_tool = False
|
||||
if not messages or messages[-1]["role"] != (
|
||||
|
||||
@@ -3,9 +3,8 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
import math
|
||||
|
||||
from pysilero_vad import SileroVoiceActivityDetector
|
||||
from pymicro_vad import MicroVad
|
||||
from pyspeex_noise import AudioProcessor
|
||||
|
||||
from .const import BYTES_PER_CHUNK
|
||||
@@ -43,8 +42,8 @@ class AudioEnhancer(ABC):
|
||||
"""Enhance chunk of PCM audio @ 16Khz with 16-bit mono samples."""
|
||||
|
||||
|
||||
class SileroVadSpeexEnhancer(AudioEnhancer):
|
||||
"""Audio enhancer that runs Silero VAD and speex."""
|
||||
class MicroVadSpeexEnhancer(AudioEnhancer):
|
||||
"""Audio enhancer that runs microVAD and speex."""
|
||||
|
||||
def __init__(
|
||||
self, auto_gain: int, noise_suppression: int, is_vad_enabled: bool
|
||||
@@ -70,49 +69,21 @@ class SileroVadSpeexEnhancer(AudioEnhancer):
|
||||
self.noise_suppression,
|
||||
)
|
||||
|
||||
self.vad: SileroVoiceActivityDetector | None = None
|
||||
|
||||
# We get 10ms chunks but Silero works on 32ms chunks, so we have to
|
||||
# buffer audio. The previous speech probability is used until enough
|
||||
# audio has been buffered.
|
||||
self._vad_buffer: bytearray | None = None
|
||||
self._vad_buffer_chunks = 0
|
||||
self._vad_buffer_chunk_idx = 0
|
||||
self._last_speech_probability: float | None = None
|
||||
self.vad: MicroVad | None = None
|
||||
|
||||
if self.is_vad_enabled:
|
||||
self.vad = SileroVoiceActivityDetector()
|
||||
|
||||
# VAD buffer is a multiple of 10ms, but Silero VAD needs 32ms.
|
||||
self._vad_buffer_chunks = int(
|
||||
math.ceil(self.vad.chunk_bytes() / BYTES_PER_CHUNK)
|
||||
)
|
||||
self._vad_leftover_bytes = self.vad.chunk_bytes() - BYTES_PER_CHUNK
|
||||
self._vad_buffer = bytearray(self.vad.chunk_bytes())
|
||||
_LOGGER.debug("Initialized Silero VAD")
|
||||
self.vad = MicroVad()
|
||||
_LOGGER.debug("Initialized microVAD")
|
||||
|
||||
def enhance_chunk(self, audio: bytes, timestamp_ms: int) -> EnhancedAudioChunk:
|
||||
"""Enhance 10ms chunk of PCM audio @ 16Khz with 16-bit mono samples."""
|
||||
speech_probability: float | None = None
|
||||
|
||||
assert len(audio) == BYTES_PER_CHUNK
|
||||
|
||||
if self.vad is not None:
|
||||
# Run VAD
|
||||
assert self._vad_buffer is not None
|
||||
start_idx = self._vad_buffer_chunk_idx * BYTES_PER_CHUNK
|
||||
self._vad_buffer[start_idx : start_idx + BYTES_PER_CHUNK] = audio
|
||||
|
||||
self._vad_buffer_chunk_idx += 1
|
||||
if self._vad_buffer_chunk_idx >= self._vad_buffer_chunks:
|
||||
# We have enough data to run Silero VAD (32 ms)
|
||||
self._last_speech_probability = self.vad.process_chunk(
|
||||
self._vad_buffer[: self.vad.chunk_bytes()]
|
||||
)
|
||||
|
||||
# Copy leftover audio that wasn't processed to start
|
||||
self._vad_buffer[: self._vad_leftover_bytes] = self._vad_buffer[
|
||||
-self._vad_leftover_bytes :
|
||||
]
|
||||
self._vad_buffer_chunk_idx = 0
|
||||
speech_probability = self.vad.Process10ms(audio)
|
||||
|
||||
if self.audio_processor is not None:
|
||||
# Run noise suppression and auto gain
|
||||
@@ -121,5 +92,5 @@ class SileroVadSpeexEnhancer(AudioEnhancer):
|
||||
return EnhancedAudioChunk(
|
||||
audio=audio,
|
||||
timestamp_ms=timestamp_ms,
|
||||
speech_probability=self._last_speech_probability,
|
||||
speech_probability=speech_probability,
|
||||
)
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "local_push",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["pysilero-vad==3.0.1", "pyspeex-noise==1.0.2"]
|
||||
"requirements": ["pymicro-vad==1.0.1", "pyspeex-noise==1.0.2"]
|
||||
}
|
||||
|
||||
@@ -55,7 +55,7 @@ from homeassistant.util import (
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
from homeassistant.util.limited_size_dict import LimitedSizeDict
|
||||
|
||||
from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, SileroVadSpeexEnhancer
|
||||
from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, MicroVadSpeexEnhancer
|
||||
from .const import (
|
||||
ACKNOWLEDGE_PATH,
|
||||
BYTES_PER_CHUNK,
|
||||
@@ -633,7 +633,7 @@ class PipelineRun:
|
||||
# Initialize with audio settings
|
||||
if self.audio_settings.needs_processor and (self.audio_enhancer is None):
|
||||
# Default audio enhancer
|
||||
self.audio_enhancer = SileroVadSpeexEnhancer(
|
||||
self.audio_enhancer = MicroVadSpeexEnhancer(
|
||||
self.audio_settings.auto_gain_dbfs,
|
||||
self.audio_settings.noise_suppression_level,
|
||||
self.audio_settings.is_vad_enabled,
|
||||
|
||||
@@ -7,7 +7,7 @@ import asyncio
|
||||
from collections.abc import Callable, Mapping
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import Any, Protocol, cast
|
||||
from typing import Any, Literal, Protocol, cast
|
||||
|
||||
from propcache.api import cached_property
|
||||
import voluptuous as vol
|
||||
@@ -16,7 +16,10 @@ from homeassistant.components import labs, websocket_api
|
||||
from homeassistant.components.blueprint import CONF_USE_BLUEPRINT
|
||||
from homeassistant.components.labs import async_listen as async_labs_listen
|
||||
from homeassistant.const import (
|
||||
ATTR_AREA_ID,
|
||||
ATTR_ENTITY_ID,
|
||||
ATTR_FLOOR_ID,
|
||||
ATTR_LABEL_ID,
|
||||
ATTR_MODE,
|
||||
ATTR_NAME,
|
||||
CONF_ACTIONS,
|
||||
@@ -30,6 +33,7 @@ from homeassistant.const import (
|
||||
CONF_OPTIONS,
|
||||
CONF_PATH,
|
||||
CONF_PLATFORM,
|
||||
CONF_TARGET,
|
||||
CONF_TRIGGERS,
|
||||
CONF_VARIABLES,
|
||||
CONF_ZONE,
|
||||
@@ -588,20 +592,32 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
"""Return True if entity is on."""
|
||||
return self._async_detach_triggers is not None or self._is_enabled
|
||||
|
||||
@property
|
||||
@cached_property
|
||||
def referenced_labels(self) -> set[str]:
|
||||
"""Return a set of referenced labels."""
|
||||
return self.action_script.referenced_labels
|
||||
referenced = self.action_script.referenced_labels
|
||||
|
||||
@property
|
||||
for conf in self._trigger_config:
|
||||
referenced |= set(_get_targets_from_trigger_config(conf, ATTR_LABEL_ID))
|
||||
return referenced
|
||||
|
||||
@cached_property
|
||||
def referenced_floors(self) -> set[str]:
|
||||
"""Return a set of referenced floors."""
|
||||
return self.action_script.referenced_floors
|
||||
referenced = self.action_script.referenced_floors
|
||||
|
||||
for conf in self._trigger_config:
|
||||
referenced |= set(_get_targets_from_trigger_config(conf, ATTR_FLOOR_ID))
|
||||
return referenced
|
||||
|
||||
@cached_property
|
||||
def referenced_areas(self) -> set[str]:
|
||||
"""Return a set of referenced areas."""
|
||||
return self.action_script.referenced_areas
|
||||
referenced = self.action_script.referenced_areas
|
||||
|
||||
for conf in self._trigger_config:
|
||||
referenced |= set(_get_targets_from_trigger_config(conf, ATTR_AREA_ID))
|
||||
return referenced
|
||||
|
||||
@property
|
||||
def referenced_blueprint(self) -> str | None:
|
||||
@@ -1209,6 +1225,9 @@ def _trigger_extract_devices(trigger_conf: dict) -> list[str]:
|
||||
if trigger_conf[CONF_PLATFORM] == "tag" and CONF_DEVICE_ID in trigger_conf:
|
||||
return trigger_conf[CONF_DEVICE_ID] # type: ignore[no-any-return]
|
||||
|
||||
if target_devices := _get_targets_from_trigger_config(trigger_conf, CONF_DEVICE_ID):
|
||||
return target_devices
|
||||
|
||||
return []
|
||||
|
||||
|
||||
@@ -1239,9 +1258,28 @@ def _trigger_extract_entities(trigger_conf: dict) -> list[str]:
|
||||
):
|
||||
return [trigger_conf[CONF_EVENT_DATA][CONF_ENTITY_ID]]
|
||||
|
||||
if target_entities := _get_targets_from_trigger_config(
|
||||
trigger_conf, CONF_ENTITY_ID
|
||||
):
|
||||
return target_entities
|
||||
|
||||
return []
|
||||
|
||||
|
||||
@callback
|
||||
def _get_targets_from_trigger_config(
|
||||
config: dict,
|
||||
target: Literal["entity_id", "device_id", "area_id", "floor_id", "label_id"],
|
||||
) -> list[str]:
|
||||
"""Extract targets from a target config."""
|
||||
if not (target_conf := config.get(CONF_TARGET)):
|
||||
return []
|
||||
if not (targets := target_conf.get(target)):
|
||||
return []
|
||||
|
||||
return [targets] if isinstance(targets, str) else targets
|
||||
|
||||
|
||||
@websocket_api.websocket_command({"type": "automation/config", "entity_id": str})
|
||||
def websocket_config(
|
||||
hass: HomeAssistant,
|
||||
|
||||
@@ -36,6 +36,10 @@ _LOGGER = logging.getLogger(__name__)
|
||||
# Cache TTL for backup list (in seconds)
|
||||
CACHE_TTL = 300
|
||||
|
||||
# Timeout for upload operations (in seconds)
|
||||
# This prevents uploads from hanging indefinitely
|
||||
UPLOAD_TIMEOUT = 43200 # 12 hours (matches B2 HTTP timeout)
|
||||
|
||||
|
||||
def suggested_filenames(backup: AgentBackup) -> tuple[str, str]:
|
||||
"""Return the suggested filenames for the backup and metadata files."""
|
||||
@@ -329,13 +333,28 @@ class BackblazeBackupAgent(BackupAgent):
|
||||
_LOGGER.debug("Uploading backup file %s with streaming", filename)
|
||||
try:
|
||||
content_type, _ = mimetypes.guess_type(filename)
|
||||
file_version = await self._hass.async_add_executor_job(
|
||||
self._upload_unbound_stream_sync,
|
||||
reader,
|
||||
filename,
|
||||
content_type or "application/x-tar",
|
||||
file_info,
|
||||
file_version = await asyncio.wait_for(
|
||||
self._hass.async_add_executor_job(
|
||||
self._upload_unbound_stream_sync,
|
||||
reader,
|
||||
filename,
|
||||
content_type or "application/x-tar",
|
||||
file_info,
|
||||
),
|
||||
timeout=UPLOAD_TIMEOUT,
|
||||
)
|
||||
except TimeoutError:
|
||||
_LOGGER.error(
|
||||
"Upload of %s timed out after %s seconds", filename, UPLOAD_TIMEOUT
|
||||
)
|
||||
reader.abort()
|
||||
raise BackupAgentError(
|
||||
f"Upload timed out after {UPLOAD_TIMEOUT} seconds"
|
||||
) from None
|
||||
except asyncio.CancelledError:
|
||||
_LOGGER.warning("Upload of %s was cancelled", filename)
|
||||
reader.abort()
|
||||
raise
|
||||
finally:
|
||||
reader.close()
|
||||
|
||||
|
||||
@@ -64,6 +64,7 @@ def _ws_with_blueprint_domain(
|
||||
return with_domain_blueprints
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "blueprint/list",
|
||||
@@ -97,6 +98,7 @@ async def ws_list_blueprints(
|
||||
connection.send_result(msg["id"], results)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "blueprint/import",
|
||||
@@ -150,6 +152,7 @@ async def ws_import_blueprint(
|
||||
)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "blueprint/save",
|
||||
@@ -206,6 +209,7 @@ async def ws_save_blueprint(
|
||||
)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "blueprint/delete",
|
||||
@@ -233,6 +237,7 @@ async def ws_delete_blueprint(
|
||||
)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "blueprint/substitute",
|
||||
|
||||
@@ -11,6 +11,7 @@ from homeassistant.const import CONF_HOST, CONF_MAC, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
|
||||
from .const import CONF_USE_SSL
|
||||
from .coordinator import BraviaTVConfigEntry, BraviaTVCoordinator
|
||||
|
||||
PLATFORMS: Final[list[Platform]] = [
|
||||
@@ -26,11 +27,12 @@ async def async_setup_entry(
|
||||
"""Set up a config entry."""
|
||||
host = config_entry.data[CONF_HOST]
|
||||
mac = config_entry.data[CONF_MAC]
|
||||
ssl = config_entry.data.get(CONF_USE_SSL, False)
|
||||
|
||||
session = async_create_clientsession(
|
||||
hass, cookie_jar=CookieJar(unsafe=True, quote_cookie=False)
|
||||
)
|
||||
client = BraviaClient(host, mac, session=session)
|
||||
client = BraviaClient(host, mac, session=session, ssl=ssl)
|
||||
coordinator = BraviaTVCoordinator(
|
||||
hass=hass,
|
||||
config_entry=config_entry,
|
||||
|
||||
@@ -28,6 +28,7 @@ from .const import (
|
||||
ATTR_MODEL,
|
||||
CONF_NICKNAME,
|
||||
CONF_USE_PSK,
|
||||
CONF_USE_SSL,
|
||||
DOMAIN,
|
||||
NICKNAME_PREFIX,
|
||||
)
|
||||
@@ -46,11 +47,12 @@ class BraviaTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
def create_client(self) -> None:
|
||||
"""Create Bravia TV client from config."""
|
||||
host = self.device_config[CONF_HOST]
|
||||
ssl = self.device_config[CONF_USE_SSL]
|
||||
session = async_create_clientsession(
|
||||
self.hass,
|
||||
cookie_jar=CookieJar(unsafe=True, quote_cookie=False),
|
||||
)
|
||||
self.client = BraviaClient(host=host, session=session)
|
||||
self.client = BraviaClient(host=host, session=session, ssl=ssl)
|
||||
|
||||
async def gen_instance_ids(self) -> tuple[str, str]:
|
||||
"""Generate client_id and nickname."""
|
||||
@@ -123,10 +125,10 @@ class BraviaTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle authorize step."""
|
||||
self.create_client()
|
||||
|
||||
if user_input is not None:
|
||||
self.device_config[CONF_USE_PSK] = user_input[CONF_USE_PSK]
|
||||
self.device_config[CONF_USE_SSL] = user_input[CONF_USE_SSL]
|
||||
self.create_client()
|
||||
if user_input[CONF_USE_PSK]:
|
||||
return await self.async_step_psk()
|
||||
return await self.async_step_pin()
|
||||
@@ -136,6 +138,7 @@ class BraviaTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_USE_PSK, default=False): bool,
|
||||
vol.Required(CONF_USE_SSL, default=False): bool,
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
@@ -12,6 +12,7 @@ ATTR_MODEL: Final = "model"
|
||||
|
||||
CONF_NICKNAME: Final = "nickname"
|
||||
CONF_USE_PSK: Final = "use_psk"
|
||||
CONF_USE_SSL: Final = "use_ssl"
|
||||
|
||||
DOMAIN: Final = "braviatv"
|
||||
LEGACY_CLIENT_ID: Final = "HomeAssistant"
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pybravia"],
|
||||
"requirements": ["pybravia==0.3.4"],
|
||||
"requirements": ["pybravia==0.4.1"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "Sony Corporation",
|
||||
|
||||
@@ -15,9 +15,10 @@
|
||||
"step": {
|
||||
"authorize": {
|
||||
"data": {
|
||||
"use_psk": "Use PSK authentication"
|
||||
"use_psk": "Use PSK authentication",
|
||||
"use_ssl": "Use SSL connection"
|
||||
},
|
||||
"description": "Make sure that «Control remotely» is enabled on your TV, go to: \nSettings -> Network -> Remote device settings -> Control remotely. \n\nThere are two authorization methods: PIN code or PSK (Pre-Shared Key). \nAuthorization via PSK is recommended as more stable.",
|
||||
"description": "Make sure that «Control remotely» is enabled on your TV. Go to: \nSettings -> Network -> Remote device settings -> Control remotely. \n\nThere are two authorization methods: PIN code or PSK (Pre-Shared Key). \nAuthorization via PSK is recommended, as it is more stable. \n\nUse an SSL connection only if your TV supports this connection type.",
|
||||
"title": "Authorize Sony Bravia TV"
|
||||
},
|
||||
"confirm": {
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/bthome",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["bthome-ble==3.17.0"]
|
||||
"requirements": ["bthome-ble==3.16.0"]
|
||||
}
|
||||
|
||||
@@ -33,7 +33,7 @@ HVAC_MODE_CHANGED_TRIGGER_SCHEMA = ENTITY_STATE_TRIGGER_SCHEMA_FIRST_LAST.extend
|
||||
{
|
||||
vol.Required(CONF_OPTIONS): {
|
||||
vol.Required(CONF_HVAC_MODE): vol.All(
|
||||
cv.ensure_list, vol.Length(min=1), [HVACMode]
|
||||
cv.ensure_list, vol.Length(min=1), [vol.Coerce(HVACMode)]
|
||||
),
|
||||
},
|
||||
}
|
||||
|
||||
@@ -19,6 +19,10 @@
|
||||
selector:
|
||||
choose:
|
||||
choices:
|
||||
number:
|
||||
selector:
|
||||
number:
|
||||
mode: box
|
||||
entity:
|
||||
selector:
|
||||
entity:
|
||||
@@ -27,14 +31,11 @@
|
||||
- input_number
|
||||
- number
|
||||
- sensor
|
||||
number:
|
||||
selector:
|
||||
number:
|
||||
mode: box
|
||||
translation_key: number_or_entity
|
||||
|
||||
.trigger_threshold_type: &trigger_threshold_type
|
||||
required: true
|
||||
default: above
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "entity",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==3.5.0", "home-assistant-intents==2026.1.1"]
|
||||
"requirements": ["hassil==3.5.0", "home-assistant-intents==2026.1.6"]
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.11", "deebot-client==17.0.0"]
|
||||
"requirements": ["py-sucks==0.9.11", "deebot-client==17.0.1"]
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
from datetime import datetime
|
||||
import logging
|
||||
from typing import Final
|
||||
|
||||
from aioecowitt import EcoWittSensor, EcoWittSensorTypes
|
||||
@@ -39,6 +40,9 @@ from homeassistant.util.unit_system import METRIC_SYSTEM, US_CUSTOMARY_SYSTEM
|
||||
from . import EcowittConfigEntry
|
||||
from .entity import EcowittEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
_METRIC: Final = (
|
||||
EcoWittSensorTypes.TEMPERATURE_C,
|
||||
EcoWittSensorTypes.RAIN_COUNT_MM,
|
||||
@@ -57,6 +61,40 @@ _IMPERIAL: Final = (
|
||||
)
|
||||
|
||||
|
||||
_RAIN_COUNT_SENSORS_STATE_CLASS_MAPPING: Final = {
|
||||
"eventrainin": SensorStateClass.TOTAL_INCREASING,
|
||||
"hourlyrainin": None,
|
||||
"totalrainin": SensorStateClass.TOTAL_INCREASING,
|
||||
"dailyrainin": SensorStateClass.TOTAL_INCREASING,
|
||||
"weeklyrainin": SensorStateClass.TOTAL_INCREASING,
|
||||
"monthlyrainin": SensorStateClass.TOTAL_INCREASING,
|
||||
"yearlyrainin": SensorStateClass.TOTAL_INCREASING,
|
||||
"last24hrainin": None,
|
||||
"eventrainmm": SensorStateClass.TOTAL_INCREASING,
|
||||
"hourlyrainmm": None,
|
||||
"totalrainmm": SensorStateClass.TOTAL_INCREASING,
|
||||
"dailyrainmm": SensorStateClass.TOTAL_INCREASING,
|
||||
"weeklyrainmm": SensorStateClass.TOTAL_INCREASING,
|
||||
"monthlyrainmm": SensorStateClass.TOTAL_INCREASING,
|
||||
"yearlyrainmm": SensorStateClass.TOTAL_INCREASING,
|
||||
"last24hrainmm": None,
|
||||
"erain_piezo": SensorStateClass.TOTAL_INCREASING,
|
||||
"hrain_piezo": None,
|
||||
"drain_piezo": SensorStateClass.TOTAL_INCREASING,
|
||||
"wrain_piezo": SensorStateClass.TOTAL_INCREASING,
|
||||
"mrain_piezo": SensorStateClass.TOTAL_INCREASING,
|
||||
"yrain_piezo": SensorStateClass.TOTAL_INCREASING,
|
||||
"last24hrain_piezo": None,
|
||||
"erain_piezomm": SensorStateClass.TOTAL_INCREASING,
|
||||
"hrain_piezomm": None,
|
||||
"drain_piezomm": SensorStateClass.TOTAL_INCREASING,
|
||||
"wrain_piezomm": SensorStateClass.TOTAL_INCREASING,
|
||||
"mrain_piezomm": SensorStateClass.TOTAL_INCREASING,
|
||||
"yrain_piezomm": SensorStateClass.TOTAL_INCREASING,
|
||||
"last24hrain_piezomm": None,
|
||||
}
|
||||
|
||||
|
||||
ECOWITT_SENSORS_MAPPING: Final = {
|
||||
EcoWittSensorTypes.HUMIDITY: SensorEntityDescription(
|
||||
key="HUMIDITY",
|
||||
@@ -285,15 +323,15 @@ async def async_setup_entry(
|
||||
name=sensor.name,
|
||||
)
|
||||
|
||||
# Only total rain needs state class for long-term statistics
|
||||
if sensor.key in (
|
||||
"totalrainin",
|
||||
"totalrainmm",
|
||||
if sensor.stype in (
|
||||
EcoWittSensorTypes.RAIN_COUNT_INCHES,
|
||||
EcoWittSensorTypes.RAIN_COUNT_MM,
|
||||
):
|
||||
description = dataclasses.replace(
|
||||
description,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
)
|
||||
if sensor.key not in _RAIN_COUNT_SENSORS_STATE_CLASS_MAPPING:
|
||||
_LOGGER.warning("Unknown rain count sensor: %s", sensor.key)
|
||||
return
|
||||
state_class = _RAIN_COUNT_SENSORS_STATE_CLASS_MAPPING[sensor.key]
|
||||
description = dataclasses.replace(description, state_class=state_class)
|
||||
|
||||
async_add_entities([EcowittSensorEntity(sensor, description)])
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["eheimdigital"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["eheimdigital==1.4.0"],
|
||||
"requirements": ["eheimdigital==1.5.0"],
|
||||
"zeroconf": [
|
||||
{ "name": "eheimdigital._http._tcp.local.", "type": "_http._tcp.local." }
|
||||
]
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyenphase"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pyenphase==2.4.2"],
|
||||
"requirements": ["pyenphase==2.4.3"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_enphase-envoy._tcp.local."
|
||||
|
||||
@@ -783,7 +783,7 @@ ENCHARGE_AGGREGATE_SENSORS = (
|
||||
translation_key="available_energy",
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
device_class=SensorDeviceClass.ENERGY_STORAGE,
|
||||
value_fn=attrgetter("available_energy"),
|
||||
),
|
||||
EnvoyEnchargeAggregateSensorEntityDescription(
|
||||
@@ -791,14 +791,14 @@ ENCHARGE_AGGREGATE_SENSORS = (
|
||||
translation_key="reserve_energy",
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
device_class=SensorDeviceClass.ENERGY_STORAGE,
|
||||
value_fn=attrgetter("backup_reserve"),
|
||||
),
|
||||
EnvoyEnchargeAggregateSensorEntityDescription(
|
||||
key="max_capacity",
|
||||
translation_key="max_capacity",
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
device_class=SensorDeviceClass.ENERGY_STORAGE,
|
||||
value_fn=attrgetter("max_available_capacity"),
|
||||
),
|
||||
)
|
||||
|
||||
@@ -7,7 +7,7 @@ from enum import StrEnum
|
||||
from typing import Final
|
||||
|
||||
DOMAIN: Final = "essent"
|
||||
UPDATE_INTERVAL: Final = timedelta(hours=12)
|
||||
UPDATE_INTERVAL: Final = timedelta(hours=1)
|
||||
ATTRIBUTION: Final = "Data provided by Essent"
|
||||
|
||||
|
||||
|
||||
@@ -461,7 +461,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = (
|
||||
key="sleep/timeInBed",
|
||||
translation_key="sleep_time_in_bed",
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
icon="mdi:hotel",
|
||||
icon="mdi:bed",
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
scope=FitbitScope.SLEEP,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
|
||||
@@ -31,7 +31,7 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
)
|
||||
STEP_SMS_CODE_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_SMS_CODE): int,
|
||||
vol.Required(CONF_SMS_CODE): str,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -75,7 +75,7 @@ class FressnapfTrackerConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return errors, False
|
||||
|
||||
async def _async_verify_sms_code(
|
||||
self, sms_code: int
|
||||
self, sms_code: str
|
||||
) -> tuple[dict[str, str], str | None]:
|
||||
"""Verify SMS code and return errors and access_token."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["fressnapftracker==0.2.0"]
|
||||
"requirements": ["fressnapftracker==0.2.1"]
|
||||
}
|
||||
|
||||
@@ -77,9 +77,14 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
|
||||
)
|
||||
LOGGER.debug("enable smarthome templates: %s", self.has_templates)
|
||||
|
||||
self.has_triggers = await self.hass.async_add_executor_job(
|
||||
self.fritz.has_triggers
|
||||
)
|
||||
try:
|
||||
self.has_triggers = await self.hass.async_add_executor_job(
|
||||
self.fritz.has_triggers
|
||||
)
|
||||
except HTTPError:
|
||||
# Fritz!OS < 7.39 just don't have this api endpoint
|
||||
# so we need to fetch the HTTPError here and assume no triggers
|
||||
self.has_triggers = False
|
||||
LOGGER.debug("enable smarthome triggers: %s", self.has_triggers)
|
||||
|
||||
self.configuration_url = self.fritz.get_prefixed_host()
|
||||
|
||||
@@ -23,5 +23,5 @@
|
||||
"winter_mode": {}
|
||||
},
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20251229.0"]
|
||||
"requirements": ["home-assistant-frontend==20260107.2"]
|
||||
}
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["google_air_quality_api"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["google_air_quality_api==2.0.2"]
|
||||
"requirements": ["google_air_quality_api==2.1.2"]
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/gree",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["greeclimate"],
|
||||
"requirements": ["greeclimate==2.1.0"]
|
||||
"requirements": ["greeclimate==2.1.1"]
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ from __future__ import annotations
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
|
||||
from pyhik.constants import SENSOR_MAP
|
||||
from pyhik.hikvision import HikCamera
|
||||
import requests
|
||||
|
||||
@@ -70,13 +71,33 @@ async def async_setup_entry(hass: HomeAssistant, entry: HikvisionConfigEntry) ->
|
||||
device_type=device_type,
|
||||
)
|
||||
|
||||
_LOGGER.debug(
|
||||
"Device %s (type=%s) initial event_states: %s",
|
||||
device_name,
|
||||
device_type,
|
||||
camera.current_event_states,
|
||||
)
|
||||
|
||||
# For NVRs or devices with no detected events, try to fetch events from ISAPI
|
||||
# Use broader notification methods for NVRs since they often use 'record' etc.
|
||||
if device_type == "NVR" or not camera.current_event_states:
|
||||
nvr_notification_methods = {"center", "HTTP", "record", "email", "beep"}
|
||||
|
||||
def fetch_and_inject_nvr_events() -> None:
|
||||
"""Fetch and inject NVR events in a single executor job."""
|
||||
if nvr_events := camera.get_event_triggers():
|
||||
camera.inject_events(nvr_events)
|
||||
nvr_events = camera.get_event_triggers(nvr_notification_methods)
|
||||
_LOGGER.debug("NVR events fetched with extended methods: %s", nvr_events)
|
||||
if nvr_events:
|
||||
# Map raw event type names to friendly names using SENSOR_MAP
|
||||
mapped_events: dict[str, list[int]] = {}
|
||||
for event_type, channels in nvr_events.items():
|
||||
friendly_name = SENSOR_MAP.get(event_type.lower(), event_type)
|
||||
if friendly_name in mapped_events:
|
||||
mapped_events[friendly_name].extend(channels)
|
||||
else:
|
||||
mapped_events[friendly_name] = list(channels)
|
||||
_LOGGER.debug("Mapped NVR events: %s", mapped_events)
|
||||
camera.inject_events(mapped_events)
|
||||
|
||||
await hass.async_add_executor_job(fetch_and_inject_nvr_events)
|
||||
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyhik"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["pyHik==0.3.4"]
|
||||
"requirements": ["pyHik==0.4.0"]
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/homeassistant_hardware",
|
||||
"integration_type": "system",
|
||||
"requirements": [
|
||||
"serialx==0.5.0",
|
||||
"serialx==0.6.2",
|
||||
"universal-silabs-flasher==0.1.2",
|
||||
"ha-silabs-firmware-client==0.3.0"
|
||||
]
|
||||
|
||||
@@ -13,6 +13,6 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["homewizard_energy"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["python-homewizard-energy==10.0.0"],
|
||||
"requirements": ["python-homewizard-energy==10.0.1"],
|
||||
"zeroconf": ["_hwenergy._tcp.local.", "_homewizard._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -4,7 +4,8 @@
|
||||
"bluetooth": [
|
||||
{
|
||||
"connectable": true,
|
||||
"service_data_uuid": "0000fe0f-0000-1000-8000-00805f9b34fb"
|
||||
"service_data_uuid": "0000fe0f-0000-1000-8000-00805f9b34fb",
|
||||
"service_uuid": "0000fe0f-0000-1000-8000-00805f9b34fb"
|
||||
}
|
||||
],
|
||||
"codeowners": ["@flip-dots"],
|
||||
|
||||
@@ -19,6 +19,10 @@
|
||||
selector:
|
||||
choose:
|
||||
choices:
|
||||
number:
|
||||
selector:
|
||||
number:
|
||||
mode: box
|
||||
entity:
|
||||
selector:
|
||||
entity:
|
||||
@@ -27,14 +31,11 @@
|
||||
- input_number
|
||||
- number
|
||||
- sensor
|
||||
number:
|
||||
selector:
|
||||
number:
|
||||
mode: box
|
||||
translation_key: number_or_entity
|
||||
|
||||
.trigger_threshold_type: &trigger_threshold_type
|
||||
required: true
|
||||
default: above
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
|
||||
@@ -12,5 +12,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["incomfortclient"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["incomfort-client==0.6.10"]
|
||||
"requirements": ["incomfort-client==0.6.11"]
|
||||
}
|
||||
|
||||
@@ -116,6 +116,8 @@ class IsraelRailEntitySensor(
|
||||
@property
|
||||
def native_value(self) -> StateType | datetime:
|
||||
"""Return the state of the sensor."""
|
||||
if self.entity_description.index >= len(self.coordinator.data):
|
||||
return None
|
||||
return self.entity_description.value_fn(
|
||||
self.coordinator.data[self.entity_description.index]
|
||||
)
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
"requirements": [
|
||||
"xknx==3.13.0",
|
||||
"xknxproject==3.8.2",
|
||||
"knx-frontend==2025.12.30.151231"
|
||||
"knx-frontend==2026.1.15.112308"
|
||||
],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -256,6 +256,8 @@ ENTITIES: tuple[LaMarzoccoNumberEntityDescription, ...] = (
|
||||
supported_fn=(
|
||||
lambda coordinator: coordinator.device.dashboard.model_name
|
||||
in (ModelName.LINEA_MINI, ModelName.LINEA_MINI_R)
|
||||
and WidgetType.CM_BREW_BY_WEIGHT_DOSES
|
||||
in coordinator.device.dashboard.config
|
||||
),
|
||||
),
|
||||
LaMarzoccoNumberEntityDescription(
|
||||
@@ -289,6 +291,8 @@ ENTITIES: tuple[LaMarzoccoNumberEntityDescription, ...] = (
|
||||
supported_fn=(
|
||||
lambda coordinator: coordinator.device.dashboard.model_name
|
||||
in (ModelName.LINEA_MINI, ModelName.LINEA_MINI_R)
|
||||
and WidgetType.CM_BREW_BY_WEIGHT_DOSES
|
||||
in coordinator.device.dashboard.config
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
@@ -149,6 +149,8 @@ ENTITIES: tuple[LaMarzoccoSelectEntityDescription, ...] = (
|
||||
supported_fn=(
|
||||
lambda coordinator: coordinator.device.dashboard.model_name
|
||||
in (ModelName.LINEA_MINI, ModelName.LINEA_MINI_R)
|
||||
and WidgetType.CM_BREW_BY_WEIGHT_DOSES
|
||||
in coordinator.device.dashboard.config
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
@@ -19,6 +19,10 @@
|
||||
selector:
|
||||
choose:
|
||||
choices:
|
||||
number:
|
||||
selector:
|
||||
number:
|
||||
mode: box
|
||||
entity:
|
||||
selector:
|
||||
entity:
|
||||
@@ -27,10 +31,6 @@
|
||||
- input_number
|
||||
- number
|
||||
- sensor
|
||||
number:
|
||||
selector:
|
||||
number:
|
||||
mode: box
|
||||
translation_key: number_or_entity
|
||||
|
||||
turned_on: *trigger_common
|
||||
@@ -48,6 +48,7 @@ brightness_crossed_threshold:
|
||||
behavior: *trigger_behavior
|
||||
threshold_type:
|
||||
required: true
|
||||
default: above
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
|
||||
@@ -154,6 +154,7 @@ SUPPORT_FAN_MODE_DEVICES: set[tuple[int, int]] = {
|
||||
(0x1209, 0x8027),
|
||||
(0x1209, 0x8028),
|
||||
(0x1209, 0x8029),
|
||||
(0x131A, 0x1000),
|
||||
}
|
||||
|
||||
SystemModeEnum = clusters.Thermostat.Enums.SystemModeEnum
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiomealie==1.1.1"]
|
||||
"requirements": ["aiomealie==1.2.0"]
|
||||
}
|
||||
|
||||
@@ -16,5 +16,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["nacl"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["PyNaCl==1.6.0"]
|
||||
"requirements": ["PyNaCl==1.6.2"]
|
||||
}
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pynintendoauth", "pynintendoparental"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pynintendoauth==1.0.2", "pynintendoparental==2.3.0"]
|
||||
"requirements": ["pynintendoauth==1.0.2", "pynintendoparental==2.3.2"]
|
||||
}
|
||||
|
||||
@@ -112,45 +112,49 @@ class OpenAIConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
if user_input is None:
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA
|
||||
)
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
self._async_abort_entries_match(user_input)
|
||||
try:
|
||||
await validate_input(self.hass, user_input)
|
||||
except openai.APIConnectionError:
|
||||
errors["base"] = "cannot_connect"
|
||||
except openai.AuthenticationError:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
return self.async_create_entry(
|
||||
title="ChatGPT",
|
||||
data=user_input,
|
||||
subentries=[
|
||||
{
|
||||
"subentry_type": "conversation",
|
||||
"data": RECOMMENDED_CONVERSATION_OPTIONS,
|
||||
"title": DEFAULT_CONVERSATION_NAME,
|
||||
"unique_id": None,
|
||||
},
|
||||
{
|
||||
"subentry_type": "ai_task_data",
|
||||
"data": RECOMMENDED_AI_TASK_OPTIONS,
|
||||
"title": DEFAULT_AI_TASK_NAME,
|
||||
"unique_id": None,
|
||||
},
|
||||
],
|
||||
)
|
||||
if user_input is not None:
|
||||
self._async_abort_entries_match(user_input)
|
||||
try:
|
||||
await validate_input(self.hass, user_input)
|
||||
except openai.APIConnectionError:
|
||||
errors["base"] = "cannot_connect"
|
||||
except openai.AuthenticationError:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
return self.async_create_entry(
|
||||
title="ChatGPT",
|
||||
data=user_input,
|
||||
subentries=[
|
||||
{
|
||||
"subentry_type": "conversation",
|
||||
"data": RECOMMENDED_CONVERSATION_OPTIONS,
|
||||
"title": DEFAULT_CONVERSATION_NAME,
|
||||
"unique_id": None,
|
||||
},
|
||||
{
|
||||
"subentry_type": "ai_task_data",
|
||||
"data": RECOMMENDED_AI_TASK_OPTIONS,
|
||||
"title": DEFAULT_AI_TASK_NAME,
|
||||
"unique_id": None,
|
||||
},
|
||||
],
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
step_id="user",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
STEP_USER_DATA_SCHEMA, user_input
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders={
|
||||
"instructions_url": "https://www.home-assistant.io/integrations/openai_conversation/#generate-an-api-key",
|
||||
},
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -12,7 +12,11 @@
|
||||
"user": {
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]"
|
||||
}
|
||||
},
|
||||
"data_description": {
|
||||
"api_key": "Your OpenAI API key."
|
||||
},
|
||||
"description": "Set up OpenAI Conversation integration by providing your OpenAI API key. Instructions to obtain an API key can be found [here]({instructions_url})."
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -9,5 +9,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["opower"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["opower==0.15.9"]
|
||||
"requirements": ["opower==0.16.3"]
|
||||
}
|
||||
|
||||
@@ -55,7 +55,7 @@ SENSOR_TYPES: dict[str, OSOEnergySensorEntityDescription] = {
|
||||
key="optimization_mode",
|
||||
translation_key="optimization_mode",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=["off", "oso", "gridcompany", "smartcompany", "advanced"],
|
||||
options=["off", "oso", "gridcompany", "smartcompany", "advanced", "nettleie"],
|
||||
value_fn=lambda entity_data: entity_data.state.lower(),
|
||||
),
|
||||
"power_load": OSOEnergySensorEntityDescription(
|
||||
|
||||
@@ -58,6 +58,7 @@
|
||||
"state": {
|
||||
"advanced": "Advanced",
|
||||
"gridcompany": "Grid company",
|
||||
"nettleie": "Nettleie",
|
||||
"off": "[%key:common::state::off%]",
|
||||
"oso": "OSO",
|
||||
"smartcompany": "Smart company"
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/otbr",
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["python-otbr-api==2.7.0"]
|
||||
"requirements": ["python-otbr-api==2.7.1"]
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["boto3", "botocore", "pyhumps", "pyoverkiz", "s3transfer"],
|
||||
"requirements": ["pyoverkiz==1.19.3"],
|
||||
"requirements": ["pyoverkiz==1.19.4"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"name": "gateway*",
|
||||
|
||||
@@ -9,6 +9,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["nacl"],
|
||||
"requirements": ["PyNaCl==1.6.0"],
|
||||
"requirements": ["PyNaCl==1.6.2"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -128,8 +128,9 @@ class RingCam(RingEntity[RingDoorBell], Camera):
|
||||
self._device = self._get_coordinator_data().get_video_device(
|
||||
self._device.device_api_id
|
||||
)
|
||||
|
||||
history_data = self._device.last_history
|
||||
if history_data:
|
||||
if history_data and self._device.has_subscription:
|
||||
self._last_event = history_data[0]
|
||||
# will call async_update to update the attributes and get the
|
||||
# video url from the api
|
||||
@@ -154,8 +155,16 @@ class RingCam(RingEntity[RingDoorBell], Camera):
|
||||
self, width: int | None = None, height: int | None = None
|
||||
) -> bytes | None:
|
||||
"""Return a still image response from the camera."""
|
||||
if self._video_url is None:
|
||||
if not self._device.has_subscription:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="no_subscription",
|
||||
)
|
||||
return None
|
||||
|
||||
key = (width, height)
|
||||
if not (image := self._images.get(key)) and self._video_url is not None:
|
||||
if not (image := self._images.get(key)):
|
||||
image = await ffmpeg.async_get_image(
|
||||
self.hass,
|
||||
self._video_url,
|
||||
|
||||
@@ -151,6 +151,9 @@
|
||||
"api_timeout": {
|
||||
"message": "Timeout communicating with Ring API"
|
||||
},
|
||||
"no_subscription": {
|
||||
"message": "Ring Protect subscription required for snapshots"
|
||||
},
|
||||
"sdp_m_line_index_required": {
|
||||
"message": "Error negotiating stream for {device}"
|
||||
}
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
"loggers": ["roborock"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": [
|
||||
"python-roborock==4.2.0",
|
||||
"python-roborock==4.2.1",
|
||||
"vacuum-map-parser-roborock==0.1.4"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -391,15 +391,6 @@ Q7_B01_SENSOR_DESCRIPTIONS = [
|
||||
translation_key="mop_life_time_left",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
RoborockSensorDescriptionB01(
|
||||
key="total_cleaning_time",
|
||||
value_fn=lambda data: data.real_clean_time,
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
suggested_unit_of_measurement=UnitOfTime.HOURS,
|
||||
translation_key="total_cleaning_time",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -16,5 +16,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pysma"],
|
||||
"requirements": ["pysma==1.0.2"]
|
||||
"requirements": ["pysma==1.1.0"]
|
||||
}
|
||||
|
||||
@@ -46,7 +46,7 @@ SENSOR_TYPES = [
|
||||
key="lifetime_energy",
|
||||
json_key="lifeTimeData",
|
||||
translation_key="lifetime_energy",
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
),
|
||||
@@ -55,6 +55,7 @@ SENSOR_TYPES = [
|
||||
json_key="lastYearData",
|
||||
translation_key="energy_this_year",
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
),
|
||||
@@ -63,6 +64,7 @@ SENSOR_TYPES = [
|
||||
json_key="lastMonthData",
|
||||
translation_key="energy_this_month",
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
),
|
||||
@@ -71,6 +73,7 @@ SENSOR_TYPES = [
|
||||
json_key="lastDayData",
|
||||
translation_key="energy_today",
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
),
|
||||
@@ -123,24 +126,32 @@ SENSOR_TYPES = [
|
||||
json_key="LOAD",
|
||||
translation_key="power_consumption",
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
),
|
||||
SolarEdgeSensorEntityDescription(
|
||||
key="solar_power",
|
||||
json_key="PV",
|
||||
translation_key="solar_power",
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
),
|
||||
SolarEdgeSensorEntityDescription(
|
||||
key="grid_power",
|
||||
json_key="GRID",
|
||||
translation_key="grid_power",
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
),
|
||||
SolarEdgeSensorEntityDescription(
|
||||
key="storage_power",
|
||||
json_key="STORAGE",
|
||||
translation_key="storage_power",
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
),
|
||||
SolarEdgeSensorEntityDescription(
|
||||
key="purchased_energy",
|
||||
@@ -194,6 +205,7 @@ SENSOR_TYPES = [
|
||||
entity_registry_enabled_default=False,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
device_class=SensorDeviceClass.BATTERY,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["solarlog_cli"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["solarlog_cli==0.6.1"]
|
||||
"requirements": ["solarlog_cli==0.7.0"]
|
||||
}
|
||||
|
||||
@@ -11,5 +11,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/sunricher_dali",
|
||||
"iot_class": "local_push",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["PySrDaliGateway==0.18.0"]
|
||||
"requirements": ["PySrDaliGateway==0.19.3"]
|
||||
}
|
||||
|
||||
@@ -15,5 +15,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["PyTado"],
|
||||
"requirements": ["python-tado==0.18.15"]
|
||||
"requirements": ["python-tado==0.18.16"]
|
||||
}
|
||||
|
||||
@@ -80,10 +80,6 @@ class TelegramNotificationService(BaseNotificationService):
|
||||
def send_message(self, message="", **kwargs):
|
||||
"""Send a message to a user."""
|
||||
service_data = {ATTR_TARGET: kwargs.get(ATTR_TARGET, self._chat_id)}
|
||||
if ATTR_TITLE in kwargs:
|
||||
service_data.update({ATTR_TITLE: kwargs.get(ATTR_TITLE)})
|
||||
if message:
|
||||
service_data.update({ATTR_MESSAGE: message})
|
||||
data = kwargs.get(ATTR_DATA)
|
||||
|
||||
# Set message tag
|
||||
@@ -161,6 +157,12 @@ class TelegramNotificationService(BaseNotificationService):
|
||||
)
|
||||
|
||||
# Send message
|
||||
|
||||
if ATTR_TITLE in kwargs:
|
||||
service_data.update({ATTR_TITLE: kwargs.get(ATTR_TITLE)})
|
||||
if message:
|
||||
service_data.update({ATTR_MESSAGE: message})
|
||||
|
||||
_LOGGER.debug(
|
||||
"TELEGRAM NOTIFIER calling %s.send_message with %s",
|
||||
TELEGRAM_BOT_DOMAIN,
|
||||
|
||||
@@ -79,6 +79,7 @@ class OAuth2FlowHandler(
|
||||
|
||||
session = async_get_clientsession(self.hass)
|
||||
self.api = TeslaFleetApi(
|
||||
access_token="",
|
||||
session=session,
|
||||
server=server,
|
||||
partner_scope=True,
|
||||
|
||||
@@ -5,6 +5,7 @@ from collections.abc import Callable
|
||||
from typing import Final
|
||||
|
||||
from aiohttp import ClientResponseError
|
||||
from aiohttp.client_exceptions import ClientError
|
||||
from tesla_fleet_api.const import Scope
|
||||
from tesla_fleet_api.exceptions import (
|
||||
Forbidden,
|
||||
@@ -315,7 +316,7 @@ async def async_migrate_entry(
|
||||
data = await Teslemetry(session, access_token).migrate_to_oauth(
|
||||
CLIENT_ID, access_token, hass.config.location_name
|
||||
)
|
||||
except ClientResponseError as e:
|
||||
except (ClientError, TypeError) as e:
|
||||
raise ConfigEntryAuthFailed from e
|
||||
|
||||
# Add auth_implementation for OAuth2 flow compatibility
|
||||
|
||||
@@ -291,9 +291,7 @@ class TeslemetryStreamingClimateEntity(
|
||||
)
|
||||
)
|
||||
self.async_on_remove(
|
||||
self.vehicle.stream_vehicle.listen_HvacACEnabled(
|
||||
self._async_handle_hvac_ac_enabled
|
||||
)
|
||||
self.vehicle.stream_vehicle.listen_HvacPower(self._async_handle_hvac_power)
|
||||
)
|
||||
self.async_on_remove(
|
||||
self.vehicle.stream_vehicle.listen_ClimateKeeperMode(
|
||||
@@ -335,9 +333,13 @@ class TeslemetryStreamingClimateEntity(
|
||||
self._attr_current_temperature = data
|
||||
self.async_write_ha_state()
|
||||
|
||||
def _async_handle_hvac_ac_enabled(self, data: bool | None):
|
||||
def _async_handle_hvac_power(self, data: str | None):
|
||||
self._attr_hvac_mode = (
|
||||
None if data is None else HVACMode.HEAT_COOL if data else HVACMode.OFF
|
||||
None
|
||||
if data is None
|
||||
else HVACMode.HEAT_COOL
|
||||
if data == "On"
|
||||
else HVACMode.OFF
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/thread",
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["python-otbr-api==2.7.0", "pyroute2==0.7.5"],
|
||||
"requirements": ["python-otbr-api==2.7.1", "pyroute2==0.7.5"],
|
||||
"single_config_entry": true,
|
||||
"zeroconf": ["_meshcop._udp.local."]
|
||||
}
|
||||
|
||||
@@ -250,6 +250,12 @@ class TibberDataAPICoordinator(DataUpdateCoordinator[dict[str, TibberDevice]]):
|
||||
async def _async_update_data(self) -> dict[str, TibberDevice]:
|
||||
"""Fetch the latest device capabilities from the Tibber Data API."""
|
||||
client = await self._async_get_client()
|
||||
devices: dict[str, TibberDevice] = await client.update_devices()
|
||||
try:
|
||||
devices: dict[str, TibberDevice] = await client.update_devices()
|
||||
except tibber.exceptions.RateLimitExceededError as err:
|
||||
raise UpdateFailed(
|
||||
f"Rate limit exceeded, retry after {err.retry_after} seconds",
|
||||
retry_after=err.retry_after,
|
||||
) from err
|
||||
self._build_sensor_lookup(devices)
|
||||
return devices
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/tibber",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["tibber"],
|
||||
"requirements": ["pyTibber==0.33.1"]
|
||||
"requirements": ["pyTibber==0.34.4"]
|
||||
}
|
||||
|
||||
@@ -281,7 +281,7 @@ DATA_API_SENSORS: tuple[SensorEntityDescription, ...] = (
|
||||
key="range.remaining",
|
||||
translation_key="range_remaining",
|
||||
device_class=SensorDeviceClass.DISTANCE,
|
||||
native_unit_of_measurement=UnitOfLength.KILOMETERS,
|
||||
native_unit_of_measurement=UnitOfLength.METERS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
|
||||
@@ -338,8 +338,8 @@ class TractiveClient:
|
||||
# Handle both structures for compatibility
|
||||
data = event.get("content", event)
|
||||
|
||||
activity = data.get("activity", {})
|
||||
sleep = data.get("sleep", {})
|
||||
activity = data.get("activity") or {}
|
||||
sleep = data.get("sleep") or {}
|
||||
|
||||
payload = {
|
||||
ATTR_DAILY_GOAL: activity.get("minutesGoal"),
|
||||
|
||||
@@ -41,7 +41,7 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["uiprotect", "unifi_discovery"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["uiprotect==7.33.3", "unifi-discovery==1.2.0"],
|
||||
"requirements": ["uiprotect==8.0.0", "unifi-discovery==1.2.0"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "Ubiquiti Networks",
|
||||
|
||||
@@ -74,6 +74,8 @@ class VeluxRainSensor(VeluxEntity, BinarySensorEntity):
|
||||
|
||||
self._attr_available = True
|
||||
|
||||
# Velux windows with rain sensors report an opening limitation of 93 or 100 (Velux GPU) when rain is detected.
|
||||
# So far, only 93 and 100 have been observed in practice, documentation on this is non-existent AFAIK.
|
||||
self._attr_is_on = limitation.min_value in {93, 100}
|
||||
# Velux windows with rain sensors report an opening limitation when rain is detected.
|
||||
# So far we've seen 89, 91, 93 (most cases) or 100 (Velux GPU). It probably makes sense to
|
||||
# assume that any large enough limitation (we use >=89) means rain is detected.
|
||||
# Documentation on this is non-existent AFAIK.
|
||||
self._attr_is_on = limitation.min_value >= 89
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "Watts Vision +",
|
||||
"codeowners": ["@theobld-ww", "@devender-verma-ww", "@ssi-spyro"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["application_credentials"],
|
||||
"dependencies": ["application_credentials", "cloud"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/watts",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
|
||||
@@ -9,6 +9,9 @@
|
||||
}
|
||||
},
|
||||
"number": {
|
||||
"segment_speed": {
|
||||
"default": "mdi:speedometer"
|
||||
},
|
||||
"speed": {
|
||||
"default": "mdi:speedometer"
|
||||
}
|
||||
|
||||
@@ -23,7 +23,7 @@
|
||||
"universal_silabs_flasher",
|
||||
"serialx"
|
||||
],
|
||||
"requirements": ["zha==0.0.82", "serialx==0.5.0"],
|
||||
"requirements": ["zha==0.0.84", "serialx==0.6.2"],
|
||||
"usb": [
|
||||
{
|
||||
"description": "*2652*",
|
||||
|
||||
@@ -840,19 +840,26 @@ class NodeEvents:
|
||||
# After ensuring the node is set up in HA, we should check if the node's
|
||||
# device config has changed, and if so, issue a repair registry entry for a
|
||||
# possible reinterview
|
||||
if not node.is_controller_node and await node.async_has_device_config_changed():
|
||||
device_name = device.name_by_user or device.name or "Unnamed device"
|
||||
async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
f"device_config_file_changed.{device.id}",
|
||||
data={"device_id": device.id, "device_name": device_name},
|
||||
is_fixable=True,
|
||||
is_persistent=False,
|
||||
translation_key="device_config_file_changed",
|
||||
translation_placeholders={"device_name": device_name},
|
||||
severity=IssueSeverity.WARNING,
|
||||
)
|
||||
if not node.is_controller_node:
|
||||
issue_id = f"device_config_file_changed.{device.id}"
|
||||
if await node.async_has_device_config_changed():
|
||||
device_name = device.name_by_user or device.name or "Unnamed device"
|
||||
async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
issue_id,
|
||||
data={"device_id": device.id, "device_name": device_name},
|
||||
is_fixable=True,
|
||||
is_persistent=False,
|
||||
translation_key="device_config_file_changed",
|
||||
translation_placeholders={"device_name": device_name},
|
||||
severity=IssueSeverity.WARNING,
|
||||
)
|
||||
else:
|
||||
# Clear any existing repair issue if the device config is not considered
|
||||
# changed. This can happen when the original issue was created by
|
||||
# an upstream bug, or the change has been reverted.
|
||||
async_delete_issue(self.hass, DOMAIN, issue_id)
|
||||
|
||||
async def async_handle_discovery_info(
|
||||
self,
|
||||
|
||||
@@ -654,6 +654,7 @@ DISCOVERY_SCHEMAS: list[NewZWaveDiscoverySchema] = [
|
||||
key=NOTIFICATION_SMOKE_ALARM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
not_states={
|
||||
0,
|
||||
SmokeAlarmNotificationEvent.SENSOR_STATUS_SMOKE_DETECTED_LOCATION_PROVIDED,
|
||||
SmokeAlarmNotificationEvent.SENSOR_STATUS_SMOKE_DETECTED,
|
||||
SmokeAlarmNotificationEvent.MAINTENANCE_STATUS_REPLACEMENT_REQUIRED,
|
||||
|
||||
@@ -17,7 +17,7 @@ if TYPE_CHECKING:
|
||||
APPLICATION_NAME: Final = "HomeAssistant"
|
||||
MAJOR_VERSION: Final = 2026
|
||||
MINOR_VERSION: Final = 1
|
||||
PATCH_VERSION: Final = "0b3"
|
||||
PATCH_VERSION: Final = "2"
|
||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 13, 2)
|
||||
|
||||
1
homeassistant/generated/bluetooth.py
generated
1
homeassistant/generated/bluetooth.py
generated
@@ -323,6 +323,7 @@ BLUETOOTH: Final[list[dict[str, bool | str | int | list[int]]]] = [
|
||||
"connectable": True,
|
||||
"domain": "hue_ble",
|
||||
"service_data_uuid": "0000fe0f-0000-1000-8000-00805f9b34fb",
|
||||
"service_uuid": "0000fe0f-0000-1000-8000-00805f9b34fb",
|
||||
},
|
||||
{
|
||||
"connectable": True,
|
||||
|
||||
@@ -537,7 +537,7 @@ def _validate_range[_T: dict[str, Any]](
|
||||
|
||||
_NUMBER_OR_ENTITY_CHOOSE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required("chosen_selector"): vol.In(["number", "entity"]),
|
||||
vol.Required("active_choice"): vol.In(["number", "entity"]),
|
||||
vol.Optional("entity"): cv.entity_id,
|
||||
vol.Optional("number"): vol.Coerce(float),
|
||||
}
|
||||
@@ -548,7 +548,7 @@ def _validate_number_or_entity(value: dict | float | str) -> float | str:
|
||||
"""Validate number or entity selector result."""
|
||||
if isinstance(value, dict):
|
||||
_NUMBER_OR_ENTITY_CHOOSE_SCHEMA(value)
|
||||
return value[value["chosen_selector"]] # type: ignore[no-any-return]
|
||||
return value[value["active_choice"]] # type: ignore[no-any-return]
|
||||
return value
|
||||
|
||||
|
||||
@@ -683,7 +683,7 @@ NUMERICAL_ATTRIBUTE_CROSSED_THRESHOLD_SCHEMA = ENTITY_STATE_TRIGGER_SCHEMA.exten
|
||||
),
|
||||
vol.Optional(CONF_LOWER_LIMIT): _number_or_entity,
|
||||
vol.Optional(CONF_UPPER_LIMIT): _number_or_entity,
|
||||
vol.Required(CONF_THRESHOLD_TYPE): ThresholdType,
|
||||
vol.Required(CONF_THRESHOLD_TYPE): vol.Coerce(ThresholdType),
|
||||
},
|
||||
_validate_range(CONF_LOWER_LIMIT, CONF_UPPER_LIMIT),
|
||||
_validate_limits_for_threshold_type,
|
||||
|
||||
@@ -39,8 +39,8 @@ habluetooth==5.8.0
|
||||
hass-nabucasa==1.7.0
|
||||
hassil==3.5.0
|
||||
home-assistant-bluetooth==1.13.1
|
||||
home-assistant-frontend==20251229.0
|
||||
home-assistant-intents==2026.1.1
|
||||
home-assistant-frontend==20260107.2
|
||||
home-assistant-intents==2026.1.6
|
||||
httpx==0.28.1
|
||||
ifaddr==0.2.0
|
||||
Jinja2==3.1.6
|
||||
@@ -53,10 +53,10 @@ Pillow==12.0.0
|
||||
propcache==0.4.1
|
||||
psutil-home-assistant==0.0.1
|
||||
PyJWT==2.10.1
|
||||
PyNaCl==1.6.0
|
||||
pymicro-vad==1.0.1
|
||||
PyNaCl==1.6.2
|
||||
pyOpenSSL==25.3.0
|
||||
pyserial==3.5
|
||||
pysilero-vad==3.0.1
|
||||
pyspeex-noise==1.0.2
|
||||
python-slugify==8.0.4
|
||||
PyTurboJPEG==1.8.0
|
||||
@@ -70,9 +70,9 @@ typing-extensions>=4.15.0,<5.0
|
||||
ulid-transform==1.5.2
|
||||
urllib3>=2.0
|
||||
uv==0.9.17
|
||||
voluptuous-openapi==0.3.0
|
||||
voluptuous-openapi==0.2.0
|
||||
voluptuous-serialize==2.7.0
|
||||
voluptuous==0.16.0
|
||||
voluptuous==0.15.2
|
||||
webrtc-models==0.3.0
|
||||
yarl==1.22.0
|
||||
zeroconf==0.148.0
|
||||
@@ -226,3 +226,6 @@ gql<4.0.0
|
||||
|
||||
# Pin pytest-rerunfailures to prevent accidental breaks
|
||||
pytest-rerunfailures==16.0.1
|
||||
|
||||
# Fixes detected blocking call to load_default_certs https://github.com/home-assistant/core/issues/157475
|
||||
aiomqtt>=2.5.0
|
||||
|
||||
@@ -9,8 +9,6 @@ import logging
|
||||
import os
|
||||
from typing import Any
|
||||
|
||||
from packaging.requirements import Requirement
|
||||
|
||||
from .core import HomeAssistant, callback
|
||||
from .exceptions import HomeAssistantError
|
||||
from .helpers import singleton
|
||||
@@ -260,8 +258,13 @@ class RequirementsManager:
|
||||
"""
|
||||
if DEPRECATED_PACKAGES or self.hass.config.skip_pip_packages:
|
||||
all_requirements = {
|
||||
requirement_string: Requirement(requirement_string)
|
||||
requirement_string: requirement_details
|
||||
for requirement_string in requirements
|
||||
if (
|
||||
requirement_details := pkg_util.parse_requirement_safe(
|
||||
requirement_string
|
||||
)
|
||||
)
|
||||
}
|
||||
if DEPRECATED_PACKAGES:
|
||||
for requirement_string, requirement_details in all_requirements.items():
|
||||
@@ -272,9 +275,12 @@ class RequirementsManager:
|
||||
"" if is_built_in else "custom ",
|
||||
name,
|
||||
f"has requirement '{requirement_string}' which {reason}",
|
||||
f"This will stop working in Home Assistant {breaks_in_ha_version}, please"
|
||||
if breaks_in_ha_version
|
||||
else "Please",
|
||||
(
|
||||
"This will stop working in Home Assistant "
|
||||
f"{breaks_in_ha_version}, please"
|
||||
if breaks_in_ha_version
|
||||
else "Please"
|
||||
),
|
||||
async_suggest_report_issue(
|
||||
self.hass, integration_domain=name
|
||||
),
|
||||
|
||||
@@ -44,6 +44,39 @@ def get_installed_versions(specifiers: set[str]) -> set[str]:
|
||||
return {specifier for specifier in specifiers if is_installed(specifier)}
|
||||
|
||||
|
||||
def parse_requirement_safe(requirement_str: str) -> Requirement | None:
|
||||
"""Parse a requirement string into a Requirement object.
|
||||
|
||||
expected input is a pip compatible package specifier (requirement string)
|
||||
e.g. "package==1.0.0" or "package>=1.0.0,<2.0.0" or "package@git+https://..."
|
||||
|
||||
For backward compatibility, it also accepts a URL with a fragment
|
||||
e.g. "git+https://github.com/pypa/pip#pip>=1"
|
||||
|
||||
Returns None on a badly-formed requirement string.
|
||||
"""
|
||||
try:
|
||||
return Requirement(requirement_str)
|
||||
except InvalidRequirement:
|
||||
if "#" not in requirement_str:
|
||||
_LOGGER.error("Invalid requirement '%s'", requirement_str)
|
||||
return None
|
||||
|
||||
# This is likely a URL with a fragment
|
||||
# example: git+https://github.com/pypa/pip#pip>=1
|
||||
|
||||
# fragment support was originally used to install zip files, and
|
||||
# we no longer do this in Home Assistant. However, custom
|
||||
# components started using it to install packages from git
|
||||
# urls which would make it would be a breaking change to
|
||||
# remove it.
|
||||
try:
|
||||
return Requirement(urlparse(requirement_str).fragment)
|
||||
except InvalidRequirement:
|
||||
_LOGGER.error("Invalid requirement '%s'", requirement_str)
|
||||
return None
|
||||
|
||||
|
||||
def is_installed(requirement_str: str) -> bool:
|
||||
"""Check if a package is installed and will be loaded when we import it.
|
||||
|
||||
@@ -56,26 +89,8 @@ def is_installed(requirement_str: str) -> bool:
|
||||
Returns True when the requirement is met.
|
||||
Returns False when the package is not installed or doesn't meet req.
|
||||
"""
|
||||
try:
|
||||
req = Requirement(requirement_str)
|
||||
except InvalidRequirement:
|
||||
if "#" not in requirement_str:
|
||||
_LOGGER.error("Invalid requirement '%s'", requirement_str)
|
||||
return False
|
||||
|
||||
# This is likely a URL with a fragment
|
||||
# example: git+https://github.com/pypa/pip#pip>=1
|
||||
|
||||
# fragment support was originally used to install zip files, and
|
||||
# we no longer do this in Home Assistant. However, custom
|
||||
# components started using it to install packages from git
|
||||
# urls which would make it would be a breaking change to
|
||||
# remove it.
|
||||
try:
|
||||
req = Requirement(urlparse(requirement_str).fragment)
|
||||
except InvalidRequirement:
|
||||
_LOGGER.error("Invalid requirement '%s'", requirement_str)
|
||||
return False
|
||||
if (req := parse_requirement_safe(requirement_str)) is None:
|
||||
return False
|
||||
|
||||
try:
|
||||
if (installed_version := version(req.name)) is None:
|
||||
|
||||
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "homeassistant"
|
||||
version = "2026.1.0b3"
|
||||
version = "2026.1.2"
|
||||
license = "Apache-2.0"
|
||||
license-files = ["LICENSE*", "homeassistant/backports/LICENSE*"]
|
||||
description = "Open-source home automation platform running on Python 3."
|
||||
@@ -76,9 +76,9 @@ dependencies = [
|
||||
"ulid-transform==1.5.2",
|
||||
"urllib3>=2.0",
|
||||
"uv==0.9.17",
|
||||
"voluptuous==0.16.0",
|
||||
"voluptuous==0.15.2",
|
||||
"voluptuous-serialize==2.7.0",
|
||||
"voluptuous-openapi==0.3.0",
|
||||
"voluptuous-openapi==0.2.0",
|
||||
"yarl==1.22.0",
|
||||
"webrtc-models==0.3.0",
|
||||
"zeroconf==0.148.0",
|
||||
|
||||
8
requirements.txt
generated
8
requirements.txt
generated
@@ -27,7 +27,7 @@ ha-ffmpeg==3.2.2
|
||||
hass-nabucasa==1.7.0
|
||||
hassil==3.5.0
|
||||
home-assistant-bluetooth==1.13.1
|
||||
home-assistant-intents==2026.1.1
|
||||
home-assistant-intents==2026.1.6
|
||||
httpx==0.28.1
|
||||
ifaddr==0.2.0
|
||||
Jinja2==3.1.6
|
||||
@@ -39,8 +39,8 @@ Pillow==12.0.0
|
||||
propcache==0.4.1
|
||||
psutil-home-assistant==0.0.1
|
||||
PyJWT==2.10.1
|
||||
pymicro-vad==1.0.1
|
||||
pyOpenSSL==25.3.0
|
||||
pysilero-vad==3.0.1
|
||||
pyspeex-noise==1.0.2
|
||||
python-slugify==8.0.4
|
||||
PyTurboJPEG==1.8.0
|
||||
@@ -54,9 +54,9 @@ typing-extensions>=4.15.0,<5.0
|
||||
ulid-transform==1.5.2
|
||||
urllib3>=2.0
|
||||
uv==0.9.17
|
||||
voluptuous-openapi==0.3.0
|
||||
voluptuous-openapi==0.2.0
|
||||
voluptuous-serialize==2.7.0
|
||||
voluptuous==0.16.0
|
||||
voluptuous==0.15.2
|
||||
webrtc-models==0.3.0
|
||||
yarl==1.22.0
|
||||
zeroconf==0.148.0
|
||||
|
||||
66
requirements_all.txt
generated
66
requirements_all.txt
generated
@@ -70,7 +70,7 @@ PyMicroBot==0.0.23
|
||||
|
||||
# homeassistant.components.mobile_app
|
||||
# homeassistant.components.owntracks
|
||||
PyNaCl==1.6.0
|
||||
PyNaCl==1.6.2
|
||||
|
||||
# homeassistant.auth.mfa_modules.totp
|
||||
# homeassistant.components.homekit
|
||||
@@ -80,7 +80,7 @@ PyQRCode==1.2.1
|
||||
PyRMVtransport==0.3.3
|
||||
|
||||
# homeassistant.components.sunricher_dali
|
||||
PySrDaliGateway==0.18.0
|
||||
PySrDaliGateway==0.19.3
|
||||
|
||||
# homeassistant.components.switchbot
|
||||
PySwitchbot==0.75.0
|
||||
@@ -187,7 +187,7 @@ aioairq==0.4.7
|
||||
aioairzone-cloud==0.7.2
|
||||
|
||||
# homeassistant.components.airzone
|
||||
aioairzone==1.0.4
|
||||
aioairzone==1.0.5
|
||||
|
||||
# homeassistant.components.alexa_devices
|
||||
aioamazondevices==11.0.2
|
||||
@@ -319,7 +319,7 @@ aiolookin==1.0.0
|
||||
aiolyric==2.0.2
|
||||
|
||||
# homeassistant.components.mealie
|
||||
aiomealie==1.1.1
|
||||
aiomealie==1.2.0
|
||||
|
||||
# homeassistant.components.modern_forms
|
||||
aiomodernforms==0.1.8
|
||||
@@ -703,7 +703,7 @@ brottsplatskartan==1.0.5
|
||||
brunt==1.2.0
|
||||
|
||||
# homeassistant.components.bthome
|
||||
bthome-ble==3.17.0
|
||||
bthome-ble==3.16.0
|
||||
|
||||
# homeassistant.components.bt_home_hub_5
|
||||
bthomehub5-devicelist==0.1.1
|
||||
@@ -782,7 +782,7 @@ debugpy==1.8.17
|
||||
decora-wifi==1.4
|
||||
|
||||
# homeassistant.components.ecovacs
|
||||
deebot-client==17.0.0
|
||||
deebot-client==17.0.1
|
||||
|
||||
# homeassistant.components.ihc
|
||||
# homeassistant.components.namecheapdns
|
||||
@@ -854,7 +854,7 @@ ecoaliface==0.4.0
|
||||
egauge-async==0.4.0
|
||||
|
||||
# homeassistant.components.eheimdigital
|
||||
eheimdigital==1.4.0
|
||||
eheimdigital==1.5.0
|
||||
|
||||
# homeassistant.components.ekeybionyx
|
||||
ekey-bionyxpy==1.0.1
|
||||
@@ -1011,7 +1011,7 @@ freebox-api==1.2.2
|
||||
freesms==0.2.0
|
||||
|
||||
# homeassistant.components.fressnapf_tracker
|
||||
fressnapftracker==0.2.0
|
||||
fressnapftracker==0.2.1
|
||||
|
||||
# homeassistant.components.fritz
|
||||
# homeassistant.components.fritzbox_callmonitor
|
||||
@@ -1105,7 +1105,7 @@ google-nest-sdm==9.1.2
|
||||
google-photos-library-api==0.12.1
|
||||
|
||||
# homeassistant.components.google_air_quality
|
||||
google_air_quality_api==2.0.2
|
||||
google_air_quality_api==2.1.2
|
||||
|
||||
# homeassistant.components.slide
|
||||
# homeassistant.components.slide_local
|
||||
@@ -1127,7 +1127,7 @@ gpiozero==1.6.2
|
||||
gps3==0.33.3
|
||||
|
||||
# homeassistant.components.gree
|
||||
greeclimate==2.1.0
|
||||
greeclimate==2.1.1
|
||||
|
||||
# homeassistant.components.greeneye_monitor
|
||||
greeneye_monitor==3.0.3
|
||||
@@ -1213,10 +1213,10 @@ hole==0.9.0
|
||||
holidays==0.84
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20251229.0
|
||||
home-assistant-frontend==20260107.2
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2026.1.1
|
||||
home-assistant-intents==2026.1.6
|
||||
|
||||
# homeassistant.components.gentex_homelink
|
||||
homelink-integration-api==0.0.1
|
||||
@@ -1282,7 +1282,7 @@ imeon_inverter_api==0.4.0
|
||||
imgw_pib==1.6.0
|
||||
|
||||
# homeassistant.components.incomfort
|
||||
incomfort-client==0.6.10
|
||||
incomfort-client==0.6.11
|
||||
|
||||
# homeassistant.components.influxdb
|
||||
influxdb-client==1.48.0
|
||||
@@ -1349,7 +1349,7 @@ kiwiki-client==0.1.1
|
||||
knocki==0.4.2
|
||||
|
||||
# homeassistant.components.knx
|
||||
knx-frontend==2025.12.30.151231
|
||||
knx-frontend==2026.1.15.112308
|
||||
|
||||
# homeassistant.components.konnected
|
||||
konnected==1.2.0
|
||||
@@ -1684,7 +1684,7 @@ openwrt-luci-rpc==1.1.17
|
||||
openwrt-ubus-rpc==0.0.2
|
||||
|
||||
# homeassistant.components.opower
|
||||
opower==0.15.9
|
||||
opower==0.16.3
|
||||
|
||||
# homeassistant.components.oralb
|
||||
oralb-ble==1.0.2
|
||||
@@ -1855,7 +1855,7 @@ pyElectra==1.2.4
|
||||
pyEmby==1.10
|
||||
|
||||
# homeassistant.components.hikvision
|
||||
pyHik==0.3.4
|
||||
pyHik==0.4.0
|
||||
|
||||
# homeassistant.components.homee
|
||||
pyHomee==1.3.8
|
||||
@@ -1867,7 +1867,7 @@ pyRFXtrx==0.31.1
|
||||
pySDCP==1
|
||||
|
||||
# homeassistant.components.tibber
|
||||
pyTibber==0.33.1
|
||||
pyTibber==0.34.4
|
||||
|
||||
# homeassistant.components.dlink
|
||||
pyW215==0.8.0
|
||||
@@ -1931,7 +1931,7 @@ pyblu==2.0.5
|
||||
pybotvac==0.0.28
|
||||
|
||||
# homeassistant.components.braviatv
|
||||
pybravia==0.3.4
|
||||
pybravia==0.4.1
|
||||
|
||||
# homeassistant.components.nissan_leaf
|
||||
pycarwings2==2.14
|
||||
@@ -2022,7 +2022,7 @@ pyegps==0.2.5
|
||||
pyemoncms==0.1.3
|
||||
|
||||
# homeassistant.components.enphase_envoy
|
||||
pyenphase==2.4.2
|
||||
pyenphase==2.4.3
|
||||
|
||||
# homeassistant.components.envisalink
|
||||
pyenvisalink==4.7
|
||||
@@ -2201,6 +2201,9 @@ pymediaroom==0.6.5.4
|
||||
# homeassistant.components.meteoclimatic
|
||||
pymeteoclimatic==0.1.0
|
||||
|
||||
# homeassistant.components.assist_pipeline
|
||||
pymicro-vad==1.0.1
|
||||
|
||||
# homeassistant.components.miele
|
||||
pymiele==0.6.1
|
||||
|
||||
@@ -2238,7 +2241,7 @@ pynina==0.3.6
|
||||
pynintendoauth==1.0.2
|
||||
|
||||
# homeassistant.components.nintendo_parental_controls
|
||||
pynintendoparental==2.3.0
|
||||
pynintendoparental==2.3.2
|
||||
|
||||
# homeassistant.components.nobo_hub
|
||||
pynobo==1.8.1
|
||||
@@ -2291,7 +2294,7 @@ pyotgw==2.2.2
|
||||
pyotp==2.9.0
|
||||
|
||||
# homeassistant.components.overkiz
|
||||
pyoverkiz==1.19.3
|
||||
pyoverkiz==1.19.4
|
||||
|
||||
# homeassistant.components.palazzetti
|
||||
pypalazzetti==0.1.20
|
||||
@@ -2408,14 +2411,11 @@ pysiaalarm==3.1.1
|
||||
# homeassistant.components.signal_messenger
|
||||
pysignalclirestapi==0.3.24
|
||||
|
||||
# homeassistant.components.assist_pipeline
|
||||
pysilero-vad==3.0.1
|
||||
|
||||
# homeassistant.components.sky_hub
|
||||
pyskyqhub==0.1.4
|
||||
|
||||
# homeassistant.components.sma
|
||||
pysma==1.0.2
|
||||
pysma==1.1.0
|
||||
|
||||
# homeassistant.components.smappee
|
||||
pysmappee==0.2.29
|
||||
@@ -2520,7 +2520,7 @@ python-google-weather-api==0.0.4
|
||||
python-homeassistant-analytics==0.9.0
|
||||
|
||||
# homeassistant.components.homewizard
|
||||
python-homewizard-energy==10.0.0
|
||||
python-homewizard-energy==10.0.1
|
||||
|
||||
# homeassistant.components.hp_ilo
|
||||
python-hpilo==4.4.3
|
||||
@@ -2563,7 +2563,7 @@ python-opensky==1.0.1
|
||||
|
||||
# homeassistant.components.otbr
|
||||
# homeassistant.components.thread
|
||||
python-otbr-api==2.7.0
|
||||
python-otbr-api==2.7.1
|
||||
|
||||
# homeassistant.components.overseerr
|
||||
python-overseerr==0.8.0
|
||||
@@ -2581,7 +2581,7 @@ python-rabbitair==0.0.8
|
||||
python-ripple-api==0.0.3
|
||||
|
||||
# homeassistant.components.roborock
|
||||
python-roborock==4.2.0
|
||||
python-roborock==4.2.1
|
||||
|
||||
# homeassistant.components.smarttub
|
||||
python-smarttub==0.0.46
|
||||
@@ -2593,7 +2593,7 @@ python-snoo==0.8.3
|
||||
python-songpal==0.16.2
|
||||
|
||||
# homeassistant.components.tado
|
||||
python-tado==0.18.15
|
||||
python-tado==0.18.16
|
||||
|
||||
# homeassistant.components.technove
|
||||
python-technove==2.0.0
|
||||
@@ -2842,7 +2842,7 @@ sentry-sdk==1.45.1
|
||||
|
||||
# homeassistant.components.homeassistant_hardware
|
||||
# homeassistant.components.zha
|
||||
serialx==0.5.0
|
||||
serialx==0.6.2
|
||||
|
||||
# homeassistant.components.sfr_box
|
||||
sfrbox-api==0.1.0
|
||||
@@ -2896,7 +2896,7 @@ solaredge-local==0.2.3
|
||||
solaredge-web==0.0.1
|
||||
|
||||
# homeassistant.components.solarlog
|
||||
solarlog_cli==0.6.1
|
||||
solarlog_cli==0.7.0
|
||||
|
||||
# homeassistant.components.solax
|
||||
solax==3.2.3
|
||||
@@ -3078,7 +3078,7 @@ typedmonarchmoney==0.4.4
|
||||
uasiren==0.0.1
|
||||
|
||||
# homeassistant.components.unifiprotect
|
||||
uiprotect==7.33.3
|
||||
uiprotect==8.0.0
|
||||
|
||||
# homeassistant.components.landisgyr_heat_meter
|
||||
ultraheat-api==0.5.7
|
||||
@@ -3277,7 +3277,7 @@ zeroconf==0.148.0
|
||||
zeversolar==0.3.2
|
||||
|
||||
# homeassistant.components.zha
|
||||
zha==0.0.82
|
||||
zha==0.0.84
|
||||
|
||||
# homeassistant.components.zhong_hong
|
||||
zhong-hong-hvac==1.0.13
|
||||
|
||||
66
requirements_test_all.txt
generated
66
requirements_test_all.txt
generated
@@ -70,7 +70,7 @@ PyMicroBot==0.0.23
|
||||
|
||||
# homeassistant.components.mobile_app
|
||||
# homeassistant.components.owntracks
|
||||
PyNaCl==1.6.0
|
||||
PyNaCl==1.6.2
|
||||
|
||||
# homeassistant.auth.mfa_modules.totp
|
||||
# homeassistant.components.homekit
|
||||
@@ -80,7 +80,7 @@ PyQRCode==1.2.1
|
||||
PyRMVtransport==0.3.3
|
||||
|
||||
# homeassistant.components.sunricher_dali
|
||||
PySrDaliGateway==0.18.0
|
||||
PySrDaliGateway==0.19.3
|
||||
|
||||
# homeassistant.components.switchbot
|
||||
PySwitchbot==0.75.0
|
||||
@@ -178,7 +178,7 @@ aioairq==0.4.7
|
||||
aioairzone-cloud==0.7.2
|
||||
|
||||
# homeassistant.components.airzone
|
||||
aioairzone==1.0.4
|
||||
aioairzone==1.0.5
|
||||
|
||||
# homeassistant.components.alexa_devices
|
||||
aioamazondevices==11.0.2
|
||||
@@ -304,7 +304,7 @@ aiolookin==1.0.0
|
||||
aiolyric==2.0.2
|
||||
|
||||
# homeassistant.components.mealie
|
||||
aiomealie==1.1.1
|
||||
aiomealie==1.2.0
|
||||
|
||||
# homeassistant.components.modern_forms
|
||||
aiomodernforms==0.1.8
|
||||
@@ -633,7 +633,7 @@ brottsplatskartan==1.0.5
|
||||
brunt==1.2.0
|
||||
|
||||
# homeassistant.components.bthome
|
||||
bthome-ble==3.17.0
|
||||
bthome-ble==3.16.0
|
||||
|
||||
# homeassistant.components.buienradar
|
||||
buienradar==1.0.6
|
||||
@@ -691,7 +691,7 @@ dbus-fast==3.1.2
|
||||
debugpy==1.8.17
|
||||
|
||||
# homeassistant.components.ecovacs
|
||||
deebot-client==17.0.0
|
||||
deebot-client==17.0.1
|
||||
|
||||
# homeassistant.components.ihc
|
||||
# homeassistant.components.namecheapdns
|
||||
@@ -754,7 +754,7 @@ easyenergy==2.1.2
|
||||
egauge-async==0.4.0
|
||||
|
||||
# homeassistant.components.eheimdigital
|
||||
eheimdigital==1.4.0
|
||||
eheimdigital==1.5.0
|
||||
|
||||
# homeassistant.components.ekeybionyx
|
||||
ekey-bionyxpy==1.0.1
|
||||
@@ -890,7 +890,7 @@ forecast-solar==4.2.0
|
||||
freebox-api==1.2.2
|
||||
|
||||
# homeassistant.components.fressnapf_tracker
|
||||
fressnapftracker==0.2.0
|
||||
fressnapftracker==0.2.1
|
||||
|
||||
# homeassistant.components.fritz
|
||||
# homeassistant.components.fritzbox_callmonitor
|
||||
@@ -981,7 +981,7 @@ google-nest-sdm==9.1.2
|
||||
google-photos-library-api==0.12.1
|
||||
|
||||
# homeassistant.components.google_air_quality
|
||||
google_air_quality_api==2.0.2
|
||||
google_air_quality_api==2.1.2
|
||||
|
||||
# homeassistant.components.slide
|
||||
# homeassistant.components.slide_local
|
||||
@@ -1000,7 +1000,7 @@ govee-local-api==2.3.0
|
||||
gps3==0.33.3
|
||||
|
||||
# homeassistant.components.gree
|
||||
greeclimate==2.1.0
|
||||
greeclimate==2.1.1
|
||||
|
||||
# homeassistant.components.greeneye_monitor
|
||||
greeneye_monitor==3.0.3
|
||||
@@ -1071,10 +1071,10 @@ hole==0.9.0
|
||||
holidays==0.84
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20251229.0
|
||||
home-assistant-frontend==20260107.2
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2026.1.1
|
||||
home-assistant-intents==2026.1.6
|
||||
|
||||
# homeassistant.components.gentex_homelink
|
||||
homelink-integration-api==0.0.1
|
||||
@@ -1128,7 +1128,7 @@ imeon_inverter_api==0.4.0
|
||||
imgw_pib==1.6.0
|
||||
|
||||
# homeassistant.components.incomfort
|
||||
incomfort-client==0.6.10
|
||||
incomfort-client==0.6.11
|
||||
|
||||
# homeassistant.components.influxdb
|
||||
influxdb-client==1.48.0
|
||||
@@ -1183,7 +1183,7 @@ kegtron-ble==1.0.2
|
||||
knocki==0.4.2
|
||||
|
||||
# homeassistant.components.knx
|
||||
knx-frontend==2025.12.30.151231
|
||||
knx-frontend==2026.1.15.112308
|
||||
|
||||
# homeassistant.components.konnected
|
||||
konnected==1.2.0
|
||||
@@ -1455,7 +1455,7 @@ openrgb-python==0.3.6
|
||||
openwebifpy==4.3.1
|
||||
|
||||
# homeassistant.components.opower
|
||||
opower==0.15.9
|
||||
opower==0.16.3
|
||||
|
||||
# homeassistant.components.oralb
|
||||
oralb-ble==1.0.2
|
||||
@@ -1586,7 +1586,7 @@ pyDuotecno==2024.10.1
|
||||
pyElectra==1.2.4
|
||||
|
||||
# homeassistant.components.hikvision
|
||||
pyHik==0.3.4
|
||||
pyHik==0.4.0
|
||||
|
||||
# homeassistant.components.homee
|
||||
pyHomee==1.3.8
|
||||
@@ -1595,7 +1595,7 @@ pyHomee==1.3.8
|
||||
pyRFXtrx==0.31.1
|
||||
|
||||
# homeassistant.components.tibber
|
||||
pyTibber==0.33.1
|
||||
pyTibber==0.34.4
|
||||
|
||||
# homeassistant.components.dlink
|
||||
pyW215==0.8.0
|
||||
@@ -1650,7 +1650,7 @@ pyblu==2.0.5
|
||||
pybotvac==0.0.28
|
||||
|
||||
# homeassistant.components.braviatv
|
||||
pybravia==0.3.4
|
||||
pybravia==0.4.1
|
||||
|
||||
# homeassistant.components.cloudflare
|
||||
pycfdns==3.0.0
|
||||
@@ -1714,7 +1714,7 @@ pyegps==0.2.5
|
||||
pyemoncms==0.1.3
|
||||
|
||||
# homeassistant.components.enphase_envoy
|
||||
pyenphase==2.4.2
|
||||
pyenphase==2.4.3
|
||||
|
||||
# homeassistant.components.everlights
|
||||
pyeverlights==0.1.0
|
||||
@@ -1860,6 +1860,9 @@ pymata-express==1.19
|
||||
# homeassistant.components.meteoclimatic
|
||||
pymeteoclimatic==0.1.0
|
||||
|
||||
# homeassistant.components.assist_pipeline
|
||||
pymicro-vad==1.0.1
|
||||
|
||||
# homeassistant.components.miele
|
||||
pymiele==0.6.1
|
||||
|
||||
@@ -1888,7 +1891,7 @@ pynina==0.3.6
|
||||
pynintendoauth==1.0.2
|
||||
|
||||
# homeassistant.components.nintendo_parental_controls
|
||||
pynintendoparental==2.3.0
|
||||
pynintendoparental==2.3.2
|
||||
|
||||
# homeassistant.components.nobo_hub
|
||||
pynobo==1.8.1
|
||||
@@ -1935,7 +1938,7 @@ pyotgw==2.2.2
|
||||
pyotp==2.9.0
|
||||
|
||||
# homeassistant.components.overkiz
|
||||
pyoverkiz==1.19.3
|
||||
pyoverkiz==1.19.4
|
||||
|
||||
# homeassistant.components.palazzetti
|
||||
pypalazzetti==0.1.20
|
||||
@@ -2031,11 +2034,8 @@ pysiaalarm==3.1.1
|
||||
# homeassistant.components.signal_messenger
|
||||
pysignalclirestapi==0.3.24
|
||||
|
||||
# homeassistant.components.assist_pipeline
|
||||
pysilero-vad==3.0.1
|
||||
|
||||
# homeassistant.components.sma
|
||||
pysma==1.0.2
|
||||
pysma==1.1.0
|
||||
|
||||
# homeassistant.components.smappee
|
||||
pysmappee==0.2.29
|
||||
@@ -2113,7 +2113,7 @@ python-google-weather-api==0.0.4
|
||||
python-homeassistant-analytics==0.9.0
|
||||
|
||||
# homeassistant.components.homewizard
|
||||
python-homewizard-energy==10.0.0
|
||||
python-homewizard-energy==10.0.1
|
||||
|
||||
# homeassistant.components.izone
|
||||
python-izone==1.2.9
|
||||
@@ -2150,7 +2150,7 @@ python-opensky==1.0.1
|
||||
|
||||
# homeassistant.components.otbr
|
||||
# homeassistant.components.thread
|
||||
python-otbr-api==2.7.0
|
||||
python-otbr-api==2.7.1
|
||||
|
||||
# homeassistant.components.overseerr
|
||||
python-overseerr==0.8.0
|
||||
@@ -2165,7 +2165,7 @@ python-pooldose==0.8.1
|
||||
python-rabbitair==0.0.8
|
||||
|
||||
# homeassistant.components.roborock
|
||||
python-roborock==4.2.0
|
||||
python-roborock==4.2.1
|
||||
|
||||
# homeassistant.components.smarttub
|
||||
python-smarttub==0.0.46
|
||||
@@ -2177,7 +2177,7 @@ python-snoo==0.8.3
|
||||
python-songpal==0.16.2
|
||||
|
||||
# homeassistant.components.tado
|
||||
python-tado==0.18.15
|
||||
python-tado==0.18.16
|
||||
|
||||
# homeassistant.components.technove
|
||||
python-technove==2.0.0
|
||||
@@ -2381,7 +2381,7 @@ sentry-sdk==1.45.1
|
||||
|
||||
# homeassistant.components.homeassistant_hardware
|
||||
# homeassistant.components.zha
|
||||
serialx==0.5.0
|
||||
serialx==0.6.2
|
||||
|
||||
# homeassistant.components.sfr_box
|
||||
sfrbox-api==0.1.0
|
||||
@@ -2420,7 +2420,7 @@ soco==0.30.13
|
||||
solaredge-web==0.0.1
|
||||
|
||||
# homeassistant.components.solarlog
|
||||
solarlog_cli==0.6.1
|
||||
solarlog_cli==0.7.0
|
||||
|
||||
# homeassistant.components.solax
|
||||
solax==3.2.3
|
||||
@@ -2569,7 +2569,7 @@ typedmonarchmoney==0.4.4
|
||||
uasiren==0.0.1
|
||||
|
||||
# homeassistant.components.unifiprotect
|
||||
uiprotect==7.33.3
|
||||
uiprotect==8.0.0
|
||||
|
||||
# homeassistant.components.landisgyr_heat_meter
|
||||
ultraheat-api==0.5.7
|
||||
@@ -2738,7 +2738,7 @@ zeroconf==0.148.0
|
||||
zeversolar==0.3.2
|
||||
|
||||
# homeassistant.components.zha
|
||||
zha==0.0.82
|
||||
zha==0.0.84
|
||||
|
||||
# homeassistant.components.zwave_js
|
||||
zwave-js-server-python==0.67.1
|
||||
|
||||
@@ -217,6 +217,9 @@ gql<4.0.0
|
||||
|
||||
# Pin pytest-rerunfailures to prevent accidental breaks
|
||||
pytest-rerunfailures==16.0.1
|
||||
|
||||
# Fixes detected blocking call to load_default_certs https://github.com/home-assistant/core/issues/157475
|
||||
aiomqtt>=2.5.0
|
||||
"""
|
||||
|
||||
GENERATED_MESSAGE = (
|
||||
|
||||
@@ -2,15 +2,19 @@
|
||||
"""Helper script to bump the current version."""
|
||||
|
||||
import argparse
|
||||
from copy import replace
|
||||
from pathlib import Path
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
import packaging
|
||||
from packaging.version import Version
|
||||
|
||||
from homeassistant import const
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
_PACKAGING_VERSION_BELOW_26 = Version(packaging.__version__) < Version("26.0dev0")
|
||||
|
||||
|
||||
def _bump_release(release, bump_type):
|
||||
"""Bump a release tuple consisting of 3 numbers."""
|
||||
@@ -25,6 +29,13 @@ def _bump_release(release, bump_type):
|
||||
return major, minor, patch
|
||||
|
||||
|
||||
def _get_dev_change(dev: int) -> int | tuple[str, int]:
|
||||
"""Return the dev change based on packaging version."""
|
||||
if _PACKAGING_VERSION_BELOW_26:
|
||||
return ("dev", dev)
|
||||
return dev
|
||||
|
||||
|
||||
def bump_version(
|
||||
version: Version, bump_type: str, *, nightly_version: str | None = None
|
||||
) -> Version:
|
||||
@@ -58,9 +69,10 @@ def bump_version(
|
||||
# Convert 0.67.3.b5 to 0.67.4.dev0
|
||||
# Convert 0.67.3.dev0 to 0.67.3.dev1
|
||||
if version.is_devrelease:
|
||||
to_change["dev"] = ("dev", version.dev + 1)
|
||||
to_change["dev"] = _get_dev_change(version.dev + 1)
|
||||
else:
|
||||
to_change["pre"] = ("dev", 0)
|
||||
to_change["dev"] = _get_dev_change(0)
|
||||
to_change["pre"] = None
|
||||
to_change["release"] = _bump_release(version.release, "minor")
|
||||
|
||||
elif bump_type == "beta":
|
||||
@@ -99,14 +111,19 @@ def bump_version(
|
||||
raise ValueError("Nightly version must be a dev version")
|
||||
new_dev = new_version.dev
|
||||
|
||||
to_change["dev"] = ("dev", new_dev)
|
||||
if not isinstance(new_dev, int):
|
||||
new_dev = int(new_dev)
|
||||
to_change["dev"] = _get_dev_change(new_dev)
|
||||
|
||||
else:
|
||||
raise ValueError(f"Unsupported type: {bump_type}")
|
||||
|
||||
temp = Version("0")
|
||||
temp._version = version._version._replace(**to_change) # noqa: SLF001
|
||||
return Version(str(temp))
|
||||
if _PACKAGING_VERSION_BELOW_26:
|
||||
temp = Version("0")
|
||||
temp._version = version._version._replace(**to_change) # noqa: SLF001
|
||||
return Version(str(temp))
|
||||
|
||||
return replace(version, **to_change)
|
||||
|
||||
|
||||
def write_version(version):
|
||||
|
||||
@@ -2,12 +2,13 @@
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
from aioairzone.common import OperationMode
|
||||
from aioairzone.common import OperationMode, QAdapt
|
||||
from aioairzone.const import (
|
||||
API_COLD_ANGLE,
|
||||
API_DATA,
|
||||
API_HEAT_ANGLE,
|
||||
API_MODE,
|
||||
API_Q_ADAPT,
|
||||
API_SLEEP,
|
||||
API_SYSTEM_ID,
|
||||
API_ZONE_ID,
|
||||
@@ -17,7 +18,7 @@ import pytest
|
||||
from homeassistant.components.select import ATTR_OPTIONS, DOMAIN as SELECT_DOMAIN
|
||||
from homeassistant.const import ATTR_ENTITY_ID, ATTR_OPTION, SERVICE_SELECT_OPTION
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
|
||||
from .util import async_init_integration
|
||||
|
||||
@@ -27,6 +28,11 @@ async def test_airzone_create_selects(hass: HomeAssistant) -> None:
|
||||
|
||||
await async_init_integration(hass)
|
||||
|
||||
# Systems
|
||||
state = hass.states.get("select.system_1_q_adapt")
|
||||
assert state.state == "standard"
|
||||
|
||||
# Zones
|
||||
state = hass.states.get("select.despacho_cold_angle")
|
||||
assert state.state == "90deg"
|
||||
|
||||
@@ -95,6 +101,71 @@ async def test_airzone_create_selects(hass: HomeAssistant) -> None:
|
||||
assert state.state == "off"
|
||||
|
||||
|
||||
async def test_airzone_select_sys_qadapt(hass: HomeAssistant) -> None:
|
||||
"""Test select system Q-Adapt."""
|
||||
|
||||
await async_init_integration(hass)
|
||||
|
||||
put_q_adapt = {
|
||||
API_DATA: {
|
||||
API_SYSTEM_ID: 1,
|
||||
API_Q_ADAPT: QAdapt.SILENCE,
|
||||
}
|
||||
}
|
||||
|
||||
with pytest.raises(ServiceValidationError):
|
||||
await hass.services.async_call(
|
||||
SELECT_DOMAIN,
|
||||
SERVICE_SELECT_OPTION,
|
||||
{
|
||||
ATTR_ENTITY_ID: "select.system_1_q_adapt",
|
||||
ATTR_OPTION: "Invalid",
|
||||
},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.airzone.AirzoneLocalApi.put_hvac",
|
||||
return_value=put_q_adapt,
|
||||
):
|
||||
await hass.services.async_call(
|
||||
SELECT_DOMAIN,
|
||||
SERVICE_SELECT_OPTION,
|
||||
{
|
||||
ATTR_ENTITY_ID: "select.system_1_q_adapt",
|
||||
ATTR_OPTION: "silence",
|
||||
},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
state = hass.states.get("select.system_1_q_adapt")
|
||||
assert state.state == "silence"
|
||||
|
||||
put_q_adapt = {
|
||||
API_DATA: {
|
||||
API_SYSTEM_ID: 2,
|
||||
API_Q_ADAPT: QAdapt.SILENCE,
|
||||
}
|
||||
}
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.airzone.AirzoneLocalApi.put_hvac",
|
||||
return_value=put_q_adapt,
|
||||
),
|
||||
pytest.raises(HomeAssistantError),
|
||||
):
|
||||
await hass.services.async_call(
|
||||
SELECT_DOMAIN,
|
||||
SERVICE_SELECT_OPTION,
|
||||
{
|
||||
ATTR_ENTITY_ID: "select.system_1_q_adapt",
|
||||
ATTR_OPTION: "silence",
|
||||
},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
|
||||
async def test_airzone_select_sleep(hass: HomeAssistant) -> None:
|
||||
"""Test select sleep."""
|
||||
|
||||
|
||||
@@ -309,12 +309,12 @@
|
||||
'type': 'text',
|
||||
}),
|
||||
dict({
|
||||
'content': '{"success": true, "response": "Lights are off."}',
|
||||
'content': '{"success":true,"response":"Lights are off."}',
|
||||
'tool_use_id': 'mock-tool-call-id',
|
||||
'type': 'tool_result',
|
||||
}),
|
||||
dict({
|
||||
'content': '{"success": false, "response": "Not enough milk."}',
|
||||
'content': '{"success":false,"response":"Not enough milk."}',
|
||||
'tool_use_id': 'mock-tool-call-id-2',
|
||||
'type': 'tool_result',
|
||||
}),
|
||||
@@ -462,6 +462,62 @@
|
||||
}),
|
||||
])
|
||||
# ---
|
||||
# name: test_history_conversion[content6]
|
||||
list([
|
||||
dict({
|
||||
'content': 'What time is it?',
|
||||
'role': 'user',
|
||||
}),
|
||||
dict({
|
||||
'content': list([
|
||||
dict({
|
||||
'text': 'Let me check the time for you.',
|
||||
'type': 'text',
|
||||
}),
|
||||
dict({
|
||||
'id': 'mock-tool-call-id',
|
||||
'input': dict({
|
||||
}),
|
||||
'name': 'GetCurrentTime',
|
||||
'type': 'tool_use',
|
||||
}),
|
||||
]),
|
||||
'role': 'assistant',
|
||||
}),
|
||||
dict({
|
||||
'content': list([
|
||||
dict({
|
||||
'content': '{"speech_slots":{"time":"14:30:00"},"message":"Current time retrieved"}',
|
||||
'tool_use_id': 'mock-tool-call-id',
|
||||
'type': 'tool_result',
|
||||
}),
|
||||
]),
|
||||
'role': 'user',
|
||||
}),
|
||||
dict({
|
||||
'content': list([
|
||||
dict({
|
||||
'text': 'It is currently 2:30 PM.',
|
||||
'type': 'text',
|
||||
}),
|
||||
]),
|
||||
'role': 'assistant',
|
||||
}),
|
||||
dict({
|
||||
'content': 'Are you sure?',
|
||||
'role': 'user',
|
||||
}),
|
||||
dict({
|
||||
'content': list([
|
||||
dict({
|
||||
'text': 'Yes, I am sure!',
|
||||
'type': 'text',
|
||||
}),
|
||||
]),
|
||||
'role': 'assistant',
|
||||
}),
|
||||
])
|
||||
# ---
|
||||
# name: test_redacted_thinking
|
||||
list([
|
||||
dict({
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Tests for the Anthropic integration."""
|
||||
|
||||
import datetime
|
||||
from typing import Any
|
||||
from unittest.mock import AsyncMock, Mock, patch
|
||||
|
||||
@@ -317,7 +318,7 @@ async def test_function_exception(
|
||||
"role": "user",
|
||||
"content": [
|
||||
{
|
||||
"content": '{"error": "HomeAssistantError", "error_text": "Test tool exception"}',
|
||||
"content": '{"error":"HomeAssistantError","error_text":"Test tool exception"}',
|
||||
"tool_use_id": "toolu_0123456789AbCdEfGhIjKlM",
|
||||
"type": "tool_result",
|
||||
}
|
||||
@@ -893,6 +894,34 @@ async def test_web_search(
|
||||
),
|
||||
),
|
||||
],
|
||||
[
|
||||
conversation.chat_log.SystemContent("You are a helpful assistant."),
|
||||
conversation.chat_log.UserContent("What time is it?"),
|
||||
conversation.chat_log.AssistantContent(
|
||||
agent_id="conversation.claude_conversation",
|
||||
content="Let me check the time for you.",
|
||||
tool_calls=[
|
||||
llm.ToolInput(
|
||||
id="mock-tool-call-id",
|
||||
tool_name="GetCurrentTime",
|
||||
tool_args={},
|
||||
),
|
||||
],
|
||||
),
|
||||
conversation.chat_log.ToolResultContent(
|
||||
agent_id="conversation.claude_conversation",
|
||||
tool_call_id="mock-tool-call-id",
|
||||
tool_name="GetCurrentTime",
|
||||
tool_result={
|
||||
"speech_slots": {"time": datetime.time(14, 30, 0)},
|
||||
"message": "Current time retrieved",
|
||||
},
|
||||
),
|
||||
conversation.chat_log.AssistantContent(
|
||||
agent_id="conversation.claude_conversation",
|
||||
content="It is currently 2:30 PM.",
|
||||
),
|
||||
],
|
||||
],
|
||||
)
|
||||
async def test_history_conversion(
|
||||
|
||||
@@ -2232,6 +2232,202 @@ async def test_extraction_functions(
|
||||
assert automation.blueprint_in_automation(hass, "automation.test3") is None
|
||||
|
||||
|
||||
async def test_extraction_functions_with_targets(
|
||||
hass: HomeAssistant,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
) -> None:
|
||||
"""Test extraction functions with targets in triggers.
|
||||
|
||||
This test verifies that targets specified in trigger configurations
|
||||
(using new-style triggers that support target) are properly extracted for
|
||||
entity, device, area, floor, and label references.
|
||||
"""
|
||||
config_entry = MockConfigEntry(domain="fake_integration", data={})
|
||||
config_entry.mock_state(hass, ConfigEntryState.LOADED)
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
trigger_device = device_registry.async_get_or_create(
|
||||
config_entry_id=config_entry.entry_id,
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, "00:00:00:00:00:01")},
|
||||
)
|
||||
|
||||
await async_setup_component(hass, "homeassistant", {})
|
||||
await async_setup_component(
|
||||
hass, "scene", {"scene": {"name": "test", "entities": {}}}
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Enable the new_triggers_conditions feature flag to allow new-style triggers
|
||||
assert await async_setup_component(hass, "labs", {})
|
||||
ws_client = await hass_ws_client(hass)
|
||||
await ws_client.send_json_auto_id(
|
||||
{
|
||||
"type": "labs/update",
|
||||
"domain": "automation",
|
||||
"preview_feature": "new_triggers_conditions",
|
||||
"enabled": True,
|
||||
}
|
||||
)
|
||||
msg = await ws_client.receive_json()
|
||||
assert msg["success"]
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
DOMAIN,
|
||||
{
|
||||
DOMAIN: [
|
||||
{
|
||||
"alias": "test1",
|
||||
"triggers": [
|
||||
# Single entity_id in target
|
||||
{
|
||||
"trigger": "scene.activated",
|
||||
"target": {"entity_id": "scene.target_entity"},
|
||||
},
|
||||
# Multiple entity_ids in target
|
||||
{
|
||||
"trigger": "scene.activated",
|
||||
"target": {
|
||||
"entity_id": [
|
||||
"scene.target_entity_list1",
|
||||
"scene.target_entity_list2",
|
||||
]
|
||||
},
|
||||
},
|
||||
# Single device_id in target
|
||||
{
|
||||
"trigger": "scene.activated",
|
||||
"target": {"device_id": trigger_device.id},
|
||||
},
|
||||
# Multiple device_ids in target
|
||||
{
|
||||
"trigger": "scene.activated",
|
||||
"target": {
|
||||
"device_id": [
|
||||
"target-device-1",
|
||||
"target-device-2",
|
||||
]
|
||||
},
|
||||
},
|
||||
# Single area_id in target
|
||||
{
|
||||
"trigger": "scene.activated",
|
||||
"target": {"area_id": "area-target-single"},
|
||||
},
|
||||
# Multiple area_ids in target
|
||||
{
|
||||
"trigger": "scene.activated",
|
||||
"target": {"area_id": ["area-target-1", "area-target-2"]},
|
||||
},
|
||||
# Single floor_id in target
|
||||
{
|
||||
"trigger": "scene.activated",
|
||||
"target": {"floor_id": "floor-target-single"},
|
||||
},
|
||||
# Multiple floor_ids in target
|
||||
{
|
||||
"trigger": "scene.activated",
|
||||
"target": {
|
||||
"floor_id": ["floor-target-1", "floor-target-2"]
|
||||
},
|
||||
},
|
||||
# Single label_id in target
|
||||
{
|
||||
"trigger": "scene.activated",
|
||||
"target": {"label_id": "label-target-single"},
|
||||
},
|
||||
# Multiple label_ids in target
|
||||
{
|
||||
"trigger": "scene.activated",
|
||||
"target": {
|
||||
"label_id": ["label-target-1", "label-target-2"]
|
||||
},
|
||||
},
|
||||
# Combined targets
|
||||
{
|
||||
"trigger": "scene.activated",
|
||||
"target": {
|
||||
"entity_id": "scene.combined_entity",
|
||||
"device_id": "combined-device",
|
||||
"area_id": "combined-area",
|
||||
"floor_id": "combined-floor",
|
||||
"label_id": "combined-label",
|
||||
},
|
||||
},
|
||||
],
|
||||
"conditions": [],
|
||||
"actions": [
|
||||
{
|
||||
"action": "test.script",
|
||||
"data": {"entity_id": "light.action_entity"},
|
||||
},
|
||||
],
|
||||
},
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
# Test entity extraction from trigger targets
|
||||
assert set(automation.entities_in_automation(hass, "automation.test1")) == {
|
||||
"scene.target_entity",
|
||||
"scene.target_entity_list1",
|
||||
"scene.target_entity_list2",
|
||||
"scene.combined_entity",
|
||||
"light.action_entity",
|
||||
}
|
||||
|
||||
# Test device extraction from trigger targets
|
||||
assert set(automation.devices_in_automation(hass, "automation.test1")) == {
|
||||
trigger_device.id,
|
||||
"target-device-1",
|
||||
"target-device-2",
|
||||
"combined-device",
|
||||
}
|
||||
|
||||
# Test area extraction from trigger targets
|
||||
assert set(automation.areas_in_automation(hass, "automation.test1")) == {
|
||||
"area-target-single",
|
||||
"area-target-1",
|
||||
"area-target-2",
|
||||
"combined-area",
|
||||
}
|
||||
|
||||
# Test floor extraction from trigger targets
|
||||
assert set(automation.floors_in_automation(hass, "automation.test1")) == {
|
||||
"floor-target-single",
|
||||
"floor-target-1",
|
||||
"floor-target-2",
|
||||
"combined-floor",
|
||||
}
|
||||
|
||||
# Test label extraction from trigger targets
|
||||
assert set(automation.labels_in_automation(hass, "automation.test1")) == {
|
||||
"label-target-single",
|
||||
"label-target-1",
|
||||
"label-target-2",
|
||||
"combined-label",
|
||||
}
|
||||
|
||||
# Test automations_with_* functions
|
||||
assert set(automation.automations_with_entity(hass, "scene.target_entity")) == {
|
||||
"automation.test1"
|
||||
}
|
||||
assert set(automation.automations_with_device(hass, trigger_device.id)) == {
|
||||
"automation.test1"
|
||||
}
|
||||
assert set(automation.automations_with_area(hass, "area-target-single")) == {
|
||||
"automation.test1"
|
||||
}
|
||||
assert set(automation.automations_with_floor(hass, "floor-target-single")) == {
|
||||
"automation.test1"
|
||||
}
|
||||
assert set(automation.automations_with_label(hass, "label-target-single")) == {
|
||||
"automation.test1"
|
||||
}
|
||||
|
||||
|
||||
async def test_logbook_humanify_automation_triggered_event(hass: HomeAssistant) -> None:
|
||||
"""Test humanifying Automation Trigger event."""
|
||||
hass.config.components.add("recorder")
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Backblaze B2 backup agent tests."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import AsyncGenerator
|
||||
from io import StringIO
|
||||
import json
|
||||
@@ -863,3 +864,94 @@ async def test_metadata_downloads_are_sequential(
|
||||
assert response["success"]
|
||||
# Verify downloads were sequential (max 1 at a time)
|
||||
assert max_concurrent == 1
|
||||
|
||||
|
||||
async def test_upload_timeout(
|
||||
hass_client: ClientSessionGenerator,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test upload timeout handling."""
|
||||
client = await hass_client()
|
||||
|
||||
mock_file_info = Mock()
|
||||
mock_file_info.delete = Mock()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.backup.manager.BackupManager.async_get_backup",
|
||||
return_value=TEST_BACKUP,
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.backup.manager.read_backup",
|
||||
return_value=TEST_BACKUP,
|
||||
),
|
||||
patch("pathlib.Path.open") as mocked_open,
|
||||
patch(
|
||||
"homeassistant.components.backblaze_b2.backup.BackblazeBackupAgent._upload_unbound_stream_sync",
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.backblaze_b2.backup.asyncio.wait_for",
|
||||
side_effect=TimeoutError,
|
||||
),
|
||||
patch.object(
|
||||
BucketSimulator,
|
||||
"get_file_info_by_name",
|
||||
return_value=mock_file_info,
|
||||
),
|
||||
caplog.at_level(logging.ERROR),
|
||||
):
|
||||
mocked_open.return_value.read = Mock(side_effect=[b"test", b""])
|
||||
resp = await client.post(
|
||||
f"/api/backup/upload?agent_id={DOMAIN}.{mock_config_entry.entry_id}",
|
||||
data={"file": StringIO("test")},
|
||||
)
|
||||
|
||||
assert resp.status == 201
|
||||
assert any("timed out" in msg for msg in caplog.messages)
|
||||
|
||||
|
||||
async def test_upload_cancelled(
|
||||
hass_client: ClientSessionGenerator,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test upload cancellation handling."""
|
||||
client = await hass_client()
|
||||
|
||||
mock_file_info = Mock()
|
||||
mock_file_info.delete = Mock()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.backup.manager.BackupManager.async_get_backup",
|
||||
return_value=TEST_BACKUP,
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.backup.manager.read_backup",
|
||||
return_value=TEST_BACKUP,
|
||||
),
|
||||
patch("pathlib.Path.open") as mocked_open,
|
||||
patch(
|
||||
"homeassistant.components.backblaze_b2.backup.BackblazeBackupAgent._upload_unbound_stream_sync",
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.backblaze_b2.backup.asyncio.wait_for",
|
||||
side_effect=asyncio.CancelledError,
|
||||
),
|
||||
patch.object(
|
||||
BucketSimulator,
|
||||
"get_file_info_by_name",
|
||||
return_value=mock_file_info,
|
||||
),
|
||||
caplog.at_level(logging.WARNING),
|
||||
):
|
||||
mocked_open.return_value.read = Mock(side_effect=[b"test", b""])
|
||||
resp = await client.post(
|
||||
f"/api/backup/upload?agent_id={DOMAIN}.{mock_config_entry.entry_id}",
|
||||
data={"file": StringIO("test")},
|
||||
)
|
||||
|
||||
# CancelledError propagates up and causes a 500 error
|
||||
assert resp.status == 500
|
||||
assert any("cancelled" in msg for msg in caplog.messages)
|
||||
|
||||
@@ -11,6 +11,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.setup import async_setup_component
|
||||
from homeassistant.util.yaml import UndefinedSubstitution, parse_yaml
|
||||
|
||||
from tests.common import MockUser
|
||||
from tests.test_util.aiohttp import AiohttpClientMocker
|
||||
from tests.typing import WebSocketGenerator
|
||||
|
||||
@@ -103,6 +104,51 @@ async def test_list_blueprints_non_existing_domain(
|
||||
assert blueprints == {}
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"message",
|
||||
[
|
||||
{"type": "blueprint/list", "domain": "automation"},
|
||||
{"type": "blueprint/import", "url": "https://example.com/blueprint.yaml"},
|
||||
{
|
||||
"type": "blueprint/save",
|
||||
"path": "test_save",
|
||||
"yaml": "raw_data",
|
||||
"domain": "automation",
|
||||
},
|
||||
{
|
||||
"type": "blueprint/delete",
|
||||
"path": "test_delete",
|
||||
"domain": "automation",
|
||||
},
|
||||
{
|
||||
"type": "blueprint/substitute",
|
||||
"domain": "automation",
|
||||
"path": "test_event_service.yaml",
|
||||
"input": {
|
||||
"trigger_event": "test_event",
|
||||
"service_to_call": "test.automation",
|
||||
"a_number": 5,
|
||||
},
|
||||
},
|
||||
],
|
||||
)
|
||||
async def test_blueprint_ws_command_requires_admin(
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
hass_admin_user: MockUser,
|
||||
message: dict[str, Any],
|
||||
) -> None:
|
||||
"""Test that blueprint websocket commands require admin."""
|
||||
hass_admin_user.groups = [] # Remove admin privileges
|
||||
client = await hass_ws_client(hass)
|
||||
await client.send_json_auto_id(message)
|
||||
|
||||
msg = await client.receive_json()
|
||||
|
||||
assert not msg["success"]
|
||||
assert msg["error"]["code"] == "unauthorized"
|
||||
|
||||
|
||||
async def test_import_blueprint(
|
||||
hass: HomeAssistant,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
|
||||
@@ -13,6 +13,7 @@ import pytest
|
||||
from homeassistant.components.braviatv.const import (
|
||||
CONF_NICKNAME,
|
||||
CONF_USE_PSK,
|
||||
CONF_USE_SSL,
|
||||
DOMAIN,
|
||||
NICKNAME_PREFIX,
|
||||
)
|
||||
@@ -131,7 +132,7 @@ async def test_ssdp_discovery(hass: HomeAssistant) -> None:
|
||||
assert result["step_id"] == "authorize"
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], user_input={CONF_USE_PSK: False}
|
||||
result["flow_id"], user_input={CONF_USE_PSK: False, CONF_USE_SSL: False}
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
@@ -148,6 +149,7 @@ async def test_ssdp_discovery(hass: HomeAssistant) -> None:
|
||||
CONF_HOST: "bravia-host",
|
||||
CONF_PIN: "1234",
|
||||
CONF_USE_PSK: False,
|
||||
CONF_USE_SSL: False,
|
||||
CONF_MAC: "AA:BB:CC:DD:EE:FF",
|
||||
CONF_CLIENT_ID: uuid,
|
||||
CONF_NICKNAME: f"{NICKNAME_PREFIX} {uuid[:6]}",
|
||||
@@ -307,8 +309,17 @@ async def test_duplicate_error(hass: HomeAssistant) -> None:
|
||||
assert result["reason"] == "already_configured"
|
||||
|
||||
|
||||
async def test_create_entry(hass: HomeAssistant) -> None:
|
||||
"""Test that entry is added correctly with PIN auth."""
|
||||
@pytest.mark.parametrize(
|
||||
("use_psk", "use_ssl"),
|
||||
[
|
||||
(True, False),
|
||||
(False, False),
|
||||
(True, True),
|
||||
(False, True),
|
||||
],
|
||||
)
|
||||
async def test_create_entry(hass: HomeAssistant, use_psk, use_ssl) -> None:
|
||||
"""Test that entry is added correctly."""
|
||||
uuid = await instance_id.async_get(hass)
|
||||
|
||||
with (
|
||||
@@ -328,14 +339,14 @@ async def test_create_entry(hass: HomeAssistant) -> None:
|
||||
assert result["step_id"] == "authorize"
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], user_input={CONF_USE_PSK: False}
|
||||
result["flow_id"], user_input={CONF_USE_PSK: use_psk, CONF_USE_SSL: use_ssl}
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "pin"
|
||||
assert result["step_id"] == "psk" if use_psk else "pin"
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], user_input={CONF_PIN: "1234"}
|
||||
result["flow_id"], user_input={CONF_PIN: "secret"}
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
@@ -343,50 +354,18 @@ async def test_create_entry(hass: HomeAssistant) -> None:
|
||||
assert result["title"] == "BRAVIA TV-Model"
|
||||
assert result["data"] == {
|
||||
CONF_HOST: "bravia-host",
|
||||
CONF_PIN: "1234",
|
||||
CONF_USE_PSK: False,
|
||||
CONF_MAC: "AA:BB:CC:DD:EE:FF",
|
||||
CONF_CLIENT_ID: uuid,
|
||||
CONF_NICKNAME: f"{NICKNAME_PREFIX} {uuid[:6]}",
|
||||
}
|
||||
|
||||
|
||||
async def test_create_entry_psk(hass: HomeAssistant) -> None:
|
||||
"""Test that entry is added correctly with PSK auth."""
|
||||
with (
|
||||
patch("pybravia.BraviaClient.connect"),
|
||||
patch("pybravia.BraviaClient.set_wol_mode"),
|
||||
patch(
|
||||
"pybravia.BraviaClient.get_system_info",
|
||||
return_value=BRAVIA_SYSTEM_INFO,
|
||||
),
|
||||
):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}, data={CONF_HOST: "bravia-host"}
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "authorize"
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], user_input={CONF_USE_PSK: True}
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "psk"
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], user_input={CONF_PIN: "mypsk"}
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result["result"].unique_id == "very_unique_string"
|
||||
assert result["title"] == "BRAVIA TV-Model"
|
||||
assert result["data"] == {
|
||||
CONF_HOST: "bravia-host",
|
||||
CONF_PIN: "mypsk",
|
||||
CONF_USE_PSK: True,
|
||||
CONF_PIN: "secret",
|
||||
CONF_USE_PSK: use_psk,
|
||||
CONF_USE_SSL: use_ssl,
|
||||
CONF_MAC: "AA:BB:CC:DD:EE:FF",
|
||||
**(
|
||||
{
|
||||
CONF_CLIENT_ID: uuid,
|
||||
CONF_NICKNAME: f"{NICKNAME_PREFIX} {uuid[:6]}",
|
||||
}
|
||||
if not use_psk
|
||||
else {}
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user