forked from home-assistant/core
Merge branch 'dev' into epenet-20250527-1510
This commit is contained in:
4
.github/workflows/builder.yml
vendored
4
.github/workflows/builder.yml
vendored
@@ -94,7 +94,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@v9
|
||||
uses: dawidd6/action-download-artifact@v10
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: home-assistant/frontend
|
||||
@@ -105,7 +105,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of intents
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@v9
|
||||
uses: dawidd6/action-download-artifact@v10
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: home-assistant/intents-package
|
||||
|
66
homeassistant/components/amazon_devices/diagnostics.py
Normal file
66
homeassistant/components/amazon_devices/diagnostics.py
Normal file
@@ -0,0 +1,66 @@
|
||||
"""Diagnostics support for Amazon Devices integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from aioamazondevices.api import AmazonDevice
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceEntry
|
||||
|
||||
from .coordinator import AmazonConfigEntry
|
||||
|
||||
TO_REDACT = {CONF_PASSWORD, CONF_USERNAME, CONF_NAME, "title"}
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: AmazonConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
devices: list[dict[str, dict[str, Any]]] = [
|
||||
build_device_data(device) for device in coordinator.data.values()
|
||||
]
|
||||
|
||||
return {
|
||||
"entry": async_redact_data(entry.as_dict(), TO_REDACT),
|
||||
"device_info": {
|
||||
"last_update success": coordinator.last_update_success,
|
||||
"last_exception": repr(coordinator.last_exception),
|
||||
"devices": devices,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
async def async_get_device_diagnostics(
|
||||
hass: HomeAssistant, entry: AmazonConfigEntry, device_entry: DeviceEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a device."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
assert device_entry.serial_number
|
||||
|
||||
return build_device_data(coordinator.data[device_entry.serial_number])
|
||||
|
||||
|
||||
def build_device_data(device: AmazonDevice) -> dict[str, Any]:
|
||||
"""Build device data for diagnostics."""
|
||||
return {
|
||||
"account name": device.account_name,
|
||||
"capabilities": device.capabilities,
|
||||
"device family": device.device_family,
|
||||
"device type": device.device_type,
|
||||
"device cluster members": device.device_cluster_members,
|
||||
"online": device.online,
|
||||
"serial number": device.serial_number,
|
||||
"software version": device.software_version,
|
||||
"do not disturb": device.do_not_disturb,
|
||||
"response style": device.response_style,
|
||||
"bluetooth state": device.bluetooth_state,
|
||||
}
|
@@ -118,5 +118,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["aioamazondevices==2.1.1"]
|
||||
"requirements": ["aioamazondevices==3.0.4"]
|
||||
}
|
||||
|
@@ -24,7 +24,7 @@ from homeassistant.components.recorder import (
|
||||
get_instance as get_recorder_instance,
|
||||
)
|
||||
from homeassistant.config_entries import SOURCE_IGNORE
|
||||
from homeassistant.const import ATTR_DOMAIN, __version__ as HA_VERSION
|
||||
from homeassistant.const import ATTR_DOMAIN, BASE_PLATFORMS, __version__ as HA_VERSION
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
@@ -225,7 +225,8 @@ class Analytics:
|
||||
LOGGER.error(err)
|
||||
return
|
||||
|
||||
configuration_set = set(yaml_configuration)
|
||||
configuration_set = _domains_from_yaml_config(yaml_configuration)
|
||||
|
||||
er_platforms = {
|
||||
entity.platform
|
||||
for entity in ent_reg.entities.values()
|
||||
@@ -370,3 +371,13 @@ class Analytics:
|
||||
for entry in entries
|
||||
if entry.source != SOURCE_IGNORE and entry.disabled_by is None
|
||||
)
|
||||
|
||||
|
||||
def _domains_from_yaml_config(yaml_configuration: dict[str, Any]) -> set[str]:
|
||||
"""Extract domains from the YAML configuration."""
|
||||
domains = set(yaml_configuration)
|
||||
for platforms in conf_util.extract_platform_integrations(
|
||||
yaml_configuration, BASE_PLATFORMS
|
||||
).values():
|
||||
domains.update(platforms)
|
||||
return domains
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/camera",
|
||||
"integration_type": "entity",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["PyTurboJPEG==1.7.5"]
|
||||
"requirements": ["PyTurboJPEG==1.8.0"]
|
||||
}
|
||||
|
@@ -5,5 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/compensation",
|
||||
"iot_class": "calculated",
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["numpy==2.2.2"]
|
||||
"requirements": ["numpy==2.2.6"]
|
||||
}
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/google",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["googleapiclient"],
|
||||
"requirements": ["gcal-sync==7.1.0", "oauth2client==4.1.3", "ical==9.2.5"]
|
||||
"requirements": ["gcal-sync==7.1.0", "oauth2client==4.1.3", "ical==10.0.0"]
|
||||
}
|
||||
|
@@ -1 +1,3 @@
|
||||
"""The hddtemp component."""
|
||||
|
||||
DOMAIN = "hddtemp"
|
||||
|
@@ -22,11 +22,14 @@ from homeassistant.const import (
|
||||
CONF_PORT,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, create_issue
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_DEVICE = "device"
|
||||
@@ -56,6 +59,21 @@ def setup_platform(
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the HDDTemp sensor."""
|
||||
create_issue(
|
||||
hass,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_system_packages_yaml_integration_{DOMAIN}",
|
||||
breaks_in_ha_version="2025.12.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_system_packages_yaml_integration",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "hddtemp",
|
||||
},
|
||||
)
|
||||
|
||||
name = config.get(CONF_NAME)
|
||||
host = config.get(CONF_HOST)
|
||||
port = config.get(CONF_PORT)
|
||||
|
@@ -39,14 +39,14 @@ def setup_cors(app: Application, origins: list[str]) -> None:
|
||||
cors = aiohttp_cors.setup(
|
||||
app,
|
||||
defaults={
|
||||
host: aiohttp_cors.ResourceOptions(
|
||||
host: aiohttp_cors.ResourceOptions( # type: ignore[no-untyped-call]
|
||||
allow_headers=ALLOWED_CORS_HEADERS, allow_methods="*"
|
||||
)
|
||||
for host in origins
|
||||
},
|
||||
)
|
||||
|
||||
cors_added = set()
|
||||
cors_added: set[str] = set()
|
||||
|
||||
def _allow_cors(
|
||||
route: AbstractRoute | AbstractResource,
|
||||
@@ -69,13 +69,13 @@ def setup_cors(app: Application, origins: list[str]) -> None:
|
||||
if path_str in cors_added:
|
||||
return
|
||||
|
||||
cors.add(route, config)
|
||||
cors.add(route, config) # type: ignore[arg-type]
|
||||
cors_added.add(path_str)
|
||||
|
||||
app[KEY_ALLOW_ALL_CORS] = lambda route: _allow_cors(
|
||||
route,
|
||||
{
|
||||
"*": aiohttp_cors.ResourceOptions(
|
||||
"*": aiohttp_cors.ResourceOptions( # type: ignore[no-untyped-call]
|
||||
allow_headers=ALLOWED_CORS_HEADERS, allow_methods="*"
|
||||
)
|
||||
},
|
||||
|
@@ -8,5 +8,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioimmich"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aioimmich==0.7.0"]
|
||||
"requirements": ["aioimmich==0.8.0"]
|
||||
}
|
||||
|
@@ -133,10 +133,10 @@ class ImmichMediaSource(MediaSource):
|
||||
identifier=f"{identifier.unique_id}|albums|{album.album_id}",
|
||||
media_class=MediaClass.DIRECTORY,
|
||||
media_content_type=MediaClass.IMAGE,
|
||||
title=album.name,
|
||||
title=album.album_name,
|
||||
can_play=False,
|
||||
can_expand=True,
|
||||
thumbnail=f"/immich/{identifier.unique_id}/{album.thumbnail_asset_id}/thumbnail/image/jpg",
|
||||
thumbnail=f"/immich/{identifier.unique_id}/{album.album_thumbnail_asset_id}/thumbnail/image/jpg",
|
||||
)
|
||||
for album in albums
|
||||
]
|
||||
@@ -160,18 +160,19 @@ class ImmichMediaSource(MediaSource):
|
||||
f"{identifier.unique_id}|albums|"
|
||||
f"{identifier.collection_id}|"
|
||||
f"{asset.asset_id}|"
|
||||
f"{asset.file_name}|"
|
||||
f"{asset.mime_type}"
|
||||
f"{asset.original_file_name}|"
|
||||
f"{mime_type}"
|
||||
),
|
||||
media_class=MediaClass.IMAGE,
|
||||
media_content_type=asset.mime_type,
|
||||
title=asset.file_name,
|
||||
media_content_type=mime_type,
|
||||
title=asset.original_file_name,
|
||||
can_play=False,
|
||||
can_expand=False,
|
||||
thumbnail=f"/immich/{identifier.unique_id}/{asset.asset_id}/thumbnail/{asset.mime_type}",
|
||||
thumbnail=f"/immich/{identifier.unique_id}/{asset.asset_id}/thumbnail/{mime_type}",
|
||||
)
|
||||
for asset in album_info.assets
|
||||
if asset.mime_type.startswith("image/")
|
||||
if (mime_type := asset.original_mime_type)
|
||||
and mime_type.startswith("image/")
|
||||
]
|
||||
|
||||
ret.extend(
|
||||
@@ -181,18 +182,19 @@ class ImmichMediaSource(MediaSource):
|
||||
f"{identifier.unique_id}|albums|"
|
||||
f"{identifier.collection_id}|"
|
||||
f"{asset.asset_id}|"
|
||||
f"{asset.file_name}|"
|
||||
f"{asset.mime_type}"
|
||||
f"{asset.original_file_name}|"
|
||||
f"{mime_type}"
|
||||
),
|
||||
media_class=MediaClass.VIDEO,
|
||||
media_content_type=asset.mime_type,
|
||||
title=asset.file_name,
|
||||
media_content_type=mime_type,
|
||||
title=asset.original_file_name,
|
||||
can_play=True,
|
||||
can_expand=False,
|
||||
thumbnail=f"/immich/{identifier.unique_id}/{asset.asset_id}/thumbnail/image/jpeg",
|
||||
)
|
||||
for asset in album_info.assets
|
||||
if asset.mime_type.startswith("video/")
|
||||
if (mime_type := asset.original_mime_type)
|
||||
and mime_type.startswith("video/")
|
||||
)
|
||||
|
||||
return ret
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyiqvia"],
|
||||
"requirements": ["numpy==2.2.2", "pyiqvia==2022.04.0"]
|
||||
"requirements": ["numpy==2.2.6", "pyiqvia==2022.04.0"]
|
||||
}
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/local_calendar",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["ical"],
|
||||
"requirements": ["ical==9.2.5"]
|
||||
"requirements": ["ical==10.0.0"]
|
||||
}
|
||||
|
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/local_todo",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["ical==9.2.5"]
|
||||
"requirements": ["ical==10.0.0"]
|
||||
}
|
||||
|
@@ -162,7 +162,7 @@ class MatterLight(MatterEntity, LightEntity):
|
||||
|
||||
assert level_control is not None
|
||||
|
||||
level = round( # type: ignore[unreachable]
|
||||
level = round(
|
||||
renormalize(
|
||||
brightness,
|
||||
(0, 255),
|
||||
@@ -249,7 +249,7 @@ class MatterLight(MatterEntity, LightEntity):
|
||||
# We should not get here if brightness is not supported.
|
||||
assert level_control is not None
|
||||
|
||||
LOGGER.debug( # type: ignore[unreachable]
|
||||
LOGGER.debug(
|
||||
"Got brightness %s for %s",
|
||||
level_control.currentLevel,
|
||||
self.entity_id,
|
||||
|
@@ -2,8 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import cast
|
||||
|
||||
from homeassistant.const import ATTR_SW_VERSION
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
@@ -40,7 +38,5 @@ class RadarrEntity(CoordinatorEntity[RadarrDataUpdateCoordinator[T]]):
|
||||
name=self.coordinator.config_entry.title,
|
||||
)
|
||||
if isinstance(self.coordinator, StatusDataUpdateCoordinator):
|
||||
device_info[ATTR_SW_VERSION] = cast(
|
||||
StatusDataUpdateCoordinator, self.coordinator
|
||||
).data.version
|
||||
device_info[ATTR_SW_VERSION] = self.coordinator.data.version
|
||||
return device_info
|
||||
|
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["ical"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["ical==9.2.5"]
|
||||
"requirements": ["ical==10.0.0"]
|
||||
}
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "local_push",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["PyTurboJPEG==1.7.5", "av==13.1.0", "numpy==2.2.2"]
|
||||
"requirements": ["PyTurboJPEG==1.8.0", "av==13.1.0", "numpy==2.2.6"]
|
||||
}
|
||||
|
@@ -16,7 +16,10 @@ from homeassistant.helpers.typing import ConfigType
|
||||
# as we will always load it and we do not want to have
|
||||
# to wait for the import executor when its busy later
|
||||
# in the startup process.
|
||||
from . import sensor as sensor_pre_import # noqa: F401
|
||||
from . import (
|
||||
binary_sensor as binary_sensor_pre_import, # noqa: F401
|
||||
sensor as sensor_pre_import, # noqa: F401
|
||||
)
|
||||
from .const import ( # noqa: F401 # noqa: F401
|
||||
DOMAIN,
|
||||
STATE_ABOVE_HORIZON,
|
||||
@@ -24,6 +27,8 @@ from .const import ( # noqa: F401 # noqa: F401
|
||||
)
|
||||
from .entity import Sun, SunConfigEntry
|
||||
|
||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR]
|
||||
|
||||
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -52,14 +57,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: SunConfigEntry) -> bool:
|
||||
await component.async_add_entities([sun])
|
||||
entry.runtime_data = sun
|
||||
entry.async_on_unload(sun.remove_listeners)
|
||||
await hass.config_entries.async_forward_entry_setups(entry, [Platform.SENSOR])
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: SunConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(
|
||||
entry, [Platform.SENSOR]
|
||||
):
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
await entry.runtime_data.async_remove()
|
||||
return unload_ok
|
||||
|
100
homeassistant/components/sun/binary_sensor.py
Normal file
100
homeassistant/components/sun/binary_sensor.py
Normal file
@@ -0,0 +1,100 @@
|
||||
"""Binary Sensor platform for Sun integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
DOMAIN as BINARY_SENSOR_DOMAIN,
|
||||
BinarySensorEntity,
|
||||
BinarySensorEntityDescription,
|
||||
)
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN, SIGNAL_EVENTS_CHANGED
|
||||
from .entity import Sun, SunConfigEntry
|
||||
|
||||
ENTITY_ID_BINARY_SENSOR_FORMAT = BINARY_SENSOR_DOMAIN + ".sun_{}"
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class SunBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||
"""Describes a Sun binary sensor entity."""
|
||||
|
||||
value_fn: Callable[[Sun], bool | None]
|
||||
signal: str
|
||||
|
||||
|
||||
BINARY_SENSOR_TYPES: tuple[SunBinarySensorEntityDescription, ...] = (
|
||||
SunBinarySensorEntityDescription(
|
||||
key="solar_rising",
|
||||
translation_key="solar_rising",
|
||||
value_fn=lambda data: data.rising,
|
||||
entity_registry_enabled_default=False,
|
||||
signal=SIGNAL_EVENTS_CHANGED,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: SunConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Sun binary sensor platform."""
|
||||
|
||||
sun = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
[
|
||||
SunBinarySensor(sun, description, entry.entry_id)
|
||||
for description in BINARY_SENSOR_TYPES
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class SunBinarySensor(BinarySensorEntity):
|
||||
"""Representation of a Sun binary sensor."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_should_poll = False
|
||||
_attr_entity_category = EntityCategory.DIAGNOSTIC
|
||||
entity_description: SunBinarySensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
sun: Sun,
|
||||
entity_description: SunBinarySensorEntityDescription,
|
||||
entry_id: str,
|
||||
) -> None:
|
||||
"""Initiate Sun Binary Sensor."""
|
||||
self.entity_description = entity_description
|
||||
self.entity_id = ENTITY_ID_BINARY_SENSOR_FORMAT.format(entity_description.key)
|
||||
self._attr_unique_id = f"{entry_id}-{entity_description.key}"
|
||||
self.sun = sun
|
||||
self._attr_device_info = DeviceInfo(
|
||||
name="Sun",
|
||||
identifiers={(DOMAIN, entry_id)},
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
)
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return value of binary sensor."""
|
||||
return self.entity_description.value_fn(self.sun)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register signal listener when added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
self.entity_description.signal,
|
||||
self.async_write_ha_state,
|
||||
)
|
||||
)
|
@@ -28,6 +28,15 @@
|
||||
"solar_rising": {
|
||||
"default": "mdi:sun-clock"
|
||||
}
|
||||
},
|
||||
"binary_sensor": {
|
||||
"solar_rising": {
|
||||
"default": "mdi:weather-sunny-off",
|
||||
"state": {
|
||||
"on": "mdi:weather-sunset-up",
|
||||
"off": "mdi:weather-sunset-down"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -27,6 +27,15 @@
|
||||
"solar_azimuth": { "name": "Solar azimuth" },
|
||||
"solar_elevation": { "name": "Solar elevation" },
|
||||
"solar_rising": { "name": "Solar rising" }
|
||||
},
|
||||
"binary_sensor": {
|
||||
"solar_rising": {
|
||||
"name": "Solar rising",
|
||||
"state": {
|
||||
"on": "Rising",
|
||||
"off": "Setting"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
924
homeassistant/components/telegram_bot/bot.py
Normal file
924
homeassistant/components/telegram_bot/bot.py
Normal file
@@ -0,0 +1,924 @@
|
||||
"""Telegram bot classes and utilities."""
|
||||
|
||||
from abc import abstractmethod
|
||||
import asyncio
|
||||
import io
|
||||
import logging
|
||||
from types import MappingProxyType
|
||||
from typing import Any
|
||||
|
||||
import httpx
|
||||
from telegram import (
|
||||
Bot,
|
||||
CallbackQuery,
|
||||
InlineKeyboardButton,
|
||||
InlineKeyboardMarkup,
|
||||
Message,
|
||||
ReplyKeyboardMarkup,
|
||||
ReplyKeyboardRemove,
|
||||
Update,
|
||||
User,
|
||||
)
|
||||
from telegram.constants import ParseMode
|
||||
from telegram.error import TelegramError
|
||||
from telegram.ext import CallbackContext, filters
|
||||
from telegram.request import HTTPXRequest
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
ATTR_COMMAND,
|
||||
CONF_API_KEY,
|
||||
HTTP_BEARER_AUTHENTICATION,
|
||||
HTTP_DIGEST_AUTHENTICATION,
|
||||
)
|
||||
from homeassistant.core import Context, HomeAssistant
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
from homeassistant.util.ssl import get_default_context, get_default_no_verify_context
|
||||
|
||||
from .const import (
|
||||
ATTR_ARGS,
|
||||
ATTR_AUTHENTICATION,
|
||||
ATTR_CAPTION,
|
||||
ATTR_CHAT_ID,
|
||||
ATTR_CHAT_INSTANCE,
|
||||
ATTR_DATA,
|
||||
ATTR_DATE,
|
||||
ATTR_DISABLE_NOTIF,
|
||||
ATTR_DISABLE_WEB_PREV,
|
||||
ATTR_FILE,
|
||||
ATTR_FROM_FIRST,
|
||||
ATTR_FROM_LAST,
|
||||
ATTR_KEYBOARD,
|
||||
ATTR_KEYBOARD_INLINE,
|
||||
ATTR_MESSAGE,
|
||||
ATTR_MESSAGE_TAG,
|
||||
ATTR_MESSAGE_THREAD_ID,
|
||||
ATTR_MESSAGEID,
|
||||
ATTR_MSG,
|
||||
ATTR_MSGID,
|
||||
ATTR_ONE_TIME_KEYBOARD,
|
||||
ATTR_OPEN_PERIOD,
|
||||
ATTR_PARSER,
|
||||
ATTR_PASSWORD,
|
||||
ATTR_REPLY_TO_MSGID,
|
||||
ATTR_REPLYMARKUP,
|
||||
ATTR_RESIZE_KEYBOARD,
|
||||
ATTR_STICKER_ID,
|
||||
ATTR_TEXT,
|
||||
ATTR_TIMEOUT,
|
||||
ATTR_TITLE,
|
||||
ATTR_URL,
|
||||
ATTR_USER_ID,
|
||||
ATTR_USERNAME,
|
||||
ATTR_VERIFY_SSL,
|
||||
CONF_CHAT_ID,
|
||||
CONF_PROXY_PARAMS,
|
||||
CONF_PROXY_URL,
|
||||
DOMAIN,
|
||||
EVENT_TELEGRAM_CALLBACK,
|
||||
EVENT_TELEGRAM_COMMAND,
|
||||
EVENT_TELEGRAM_SENT,
|
||||
EVENT_TELEGRAM_TEXT,
|
||||
PARSER_HTML,
|
||||
PARSER_MD,
|
||||
PARSER_MD2,
|
||||
PARSER_PLAIN_TEXT,
|
||||
SERVICE_EDIT_CAPTION,
|
||||
SERVICE_EDIT_MESSAGE,
|
||||
SERVICE_SEND_ANIMATION,
|
||||
SERVICE_SEND_DOCUMENT,
|
||||
SERVICE_SEND_PHOTO,
|
||||
SERVICE_SEND_STICKER,
|
||||
SERVICE_SEND_VIDEO,
|
||||
SERVICE_SEND_VOICE,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type TelegramBotConfigEntry = ConfigEntry[TelegramNotificationService]
|
||||
|
||||
|
||||
class BaseTelegramBot:
|
||||
"""The base class for the telegram bot."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TelegramBotConfigEntry) -> None:
|
||||
"""Initialize the bot base class."""
|
||||
self.hass = hass
|
||||
self.config = config
|
||||
|
||||
@abstractmethod
|
||||
async def shutdown(self) -> None:
|
||||
"""Shutdown the bot application."""
|
||||
|
||||
async def handle_update(self, update: Update, context: CallbackContext) -> bool:
|
||||
"""Handle updates from bot application set up by the respective platform."""
|
||||
_LOGGER.debug("Handling update %s", update)
|
||||
if not self.authorize_update(update):
|
||||
return False
|
||||
|
||||
# establish event type: text, command or callback_query
|
||||
if update.callback_query:
|
||||
# NOTE: Check for callback query first since effective message will be populated with the message
|
||||
# in .callback_query (python-telegram-bot docs are wrong)
|
||||
event_type, event_data = self._get_callback_query_event_data(
|
||||
update.callback_query
|
||||
)
|
||||
elif update.effective_message:
|
||||
event_type, event_data = self._get_message_event_data(
|
||||
update.effective_message
|
||||
)
|
||||
else:
|
||||
_LOGGER.warning("Unhandled update: %s", update)
|
||||
return True
|
||||
|
||||
event_context = Context()
|
||||
|
||||
_LOGGER.debug("Firing event %s: %s", event_type, event_data)
|
||||
self.hass.bus.async_fire(event_type, event_data, context=event_context)
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def _get_command_event_data(command_text: str | None) -> dict[str, str | list]:
|
||||
if not command_text or not command_text.startswith("/"):
|
||||
return {}
|
||||
command_parts = command_text.split()
|
||||
command = command_parts[0]
|
||||
args = command_parts[1:]
|
||||
return {ATTR_COMMAND: command, ATTR_ARGS: args}
|
||||
|
||||
def _get_message_event_data(self, message: Message) -> tuple[str, dict[str, Any]]:
|
||||
event_data: dict[str, Any] = {
|
||||
ATTR_MSGID: message.message_id,
|
||||
ATTR_CHAT_ID: message.chat.id,
|
||||
ATTR_DATE: message.date,
|
||||
ATTR_MESSAGE_THREAD_ID: message.message_thread_id,
|
||||
}
|
||||
if filters.COMMAND.filter(message):
|
||||
# This is a command message - set event type to command and split data into command and args
|
||||
event_type = EVENT_TELEGRAM_COMMAND
|
||||
event_data.update(self._get_command_event_data(message.text))
|
||||
else:
|
||||
event_type = EVENT_TELEGRAM_TEXT
|
||||
event_data[ATTR_TEXT] = message.text
|
||||
|
||||
if message.from_user:
|
||||
event_data.update(self._get_user_event_data(message.from_user))
|
||||
|
||||
return event_type, event_data
|
||||
|
||||
def _get_user_event_data(self, user: User) -> dict[str, Any]:
|
||||
return {
|
||||
ATTR_USER_ID: user.id,
|
||||
ATTR_FROM_FIRST: user.first_name,
|
||||
ATTR_FROM_LAST: user.last_name,
|
||||
}
|
||||
|
||||
def _get_callback_query_event_data(
|
||||
self, callback_query: CallbackQuery
|
||||
) -> tuple[str, dict[str, Any]]:
|
||||
event_type = EVENT_TELEGRAM_CALLBACK
|
||||
event_data: dict[str, Any] = {
|
||||
ATTR_MSGID: callback_query.id,
|
||||
ATTR_CHAT_INSTANCE: callback_query.chat_instance,
|
||||
ATTR_DATA: callback_query.data,
|
||||
ATTR_MSG: None,
|
||||
ATTR_CHAT_ID: None,
|
||||
}
|
||||
if callback_query.message:
|
||||
event_data[ATTR_MSG] = callback_query.message.to_dict()
|
||||
event_data[ATTR_CHAT_ID] = callback_query.message.chat.id
|
||||
|
||||
if callback_query.from_user:
|
||||
event_data.update(self._get_user_event_data(callback_query.from_user))
|
||||
|
||||
# Split data into command and args if possible
|
||||
event_data.update(self._get_command_event_data(callback_query.data))
|
||||
|
||||
return event_type, event_data
|
||||
|
||||
def authorize_update(self, update: Update) -> bool:
|
||||
"""Make sure either user or chat is in allowed_chat_ids."""
|
||||
from_user = update.effective_user.id if update.effective_user else None
|
||||
from_chat = update.effective_chat.id if update.effective_chat else None
|
||||
allowed_chat_ids: list[int] = [
|
||||
subentry.data[CONF_CHAT_ID] for subentry in self.config.subentries.values()
|
||||
]
|
||||
if from_user in allowed_chat_ids or from_chat in allowed_chat_ids:
|
||||
return True
|
||||
_LOGGER.error(
|
||||
(
|
||||
"Unauthorized update - neither user id %s nor chat id %s is in allowed"
|
||||
" chats: %s"
|
||||
),
|
||||
from_user,
|
||||
from_chat,
|
||||
allowed_chat_ids,
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
class TelegramNotificationService:
|
||||
"""Implement the notification services for the Telegram Bot domain."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
app: BaseTelegramBot,
|
||||
bot: Bot,
|
||||
config: TelegramBotConfigEntry,
|
||||
parser: str,
|
||||
) -> None:
|
||||
"""Initialize the service."""
|
||||
self.app = app
|
||||
self.config = config
|
||||
self._parsers = {
|
||||
PARSER_HTML: ParseMode.HTML,
|
||||
PARSER_MD: ParseMode.MARKDOWN,
|
||||
PARSER_MD2: ParseMode.MARKDOWN_V2,
|
||||
PARSER_PLAIN_TEXT: None,
|
||||
}
|
||||
self._parse_mode = self._parsers.get(parser)
|
||||
self.bot = bot
|
||||
self.hass = hass
|
||||
|
||||
def _get_allowed_chat_ids(self) -> list[int]:
|
||||
allowed_chat_ids: list[int] = [
|
||||
subentry.data[CONF_CHAT_ID] for subentry in self.config.subentries.values()
|
||||
]
|
||||
|
||||
if not allowed_chat_ids:
|
||||
bot_name: str = self.config.title
|
||||
raise ServiceValidationError(
|
||||
"No allowed chat IDs found for bot",
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="missing_allowed_chat_ids",
|
||||
translation_placeholders={
|
||||
"bot_name": bot_name,
|
||||
},
|
||||
)
|
||||
|
||||
return allowed_chat_ids
|
||||
|
||||
def _get_last_message_id(self):
|
||||
return dict.fromkeys(self._get_allowed_chat_ids())
|
||||
|
||||
def _get_msg_ids(self, msg_data, chat_id):
|
||||
"""Get the message id to edit.
|
||||
|
||||
This can be one of (message_id, inline_message_id) from a msg dict,
|
||||
returning a tuple.
|
||||
**You can use 'last' as message_id** to edit
|
||||
the message last sent in the chat_id.
|
||||
"""
|
||||
message_id = inline_message_id = None
|
||||
if ATTR_MESSAGEID in msg_data:
|
||||
message_id = msg_data[ATTR_MESSAGEID]
|
||||
if (
|
||||
isinstance(message_id, str)
|
||||
and (message_id == "last")
|
||||
and (self._get_last_message_id()[chat_id] is not None)
|
||||
):
|
||||
message_id = self._get_last_message_id()[chat_id]
|
||||
else:
|
||||
inline_message_id = msg_data["inline_message_id"]
|
||||
return message_id, inline_message_id
|
||||
|
||||
def _get_target_chat_ids(self, target):
|
||||
"""Validate chat_id targets or return default target (first).
|
||||
|
||||
:param target: optional list of integers ([12234, -12345])
|
||||
:return list of chat_id targets (integers)
|
||||
"""
|
||||
allowed_chat_ids: list[int] = self._get_allowed_chat_ids()
|
||||
default_user: int = allowed_chat_ids[0]
|
||||
if target is not None:
|
||||
if isinstance(target, int):
|
||||
target = [target]
|
||||
chat_ids = [t for t in target if t in allowed_chat_ids]
|
||||
if chat_ids:
|
||||
return chat_ids
|
||||
_LOGGER.warning(
|
||||
"Disallowed targets: %s, using default: %s", target, default_user
|
||||
)
|
||||
return [default_user]
|
||||
|
||||
def _get_msg_kwargs(self, data):
|
||||
"""Get parameters in message data kwargs."""
|
||||
|
||||
def _make_row_inline_keyboard(row_keyboard):
|
||||
"""Make a list of InlineKeyboardButtons.
|
||||
|
||||
It can accept:
|
||||
- a list of tuples like:
|
||||
`[(text_b1, data_callback_b1),
|
||||
(text_b2, data_callback_b2), ...]
|
||||
- a string like: `/cmd1, /cmd2, /cmd3`
|
||||
- or a string like: `text_b1:/cmd1, text_b2:/cmd2`
|
||||
- also supports urls instead of callback commands
|
||||
"""
|
||||
buttons = []
|
||||
if isinstance(row_keyboard, str):
|
||||
for key in row_keyboard.split(","):
|
||||
if ":/" in key:
|
||||
# check if command or URL
|
||||
if key.startswith("https://"):
|
||||
label = key.split(",")[0]
|
||||
url = key[len(label) + 1 :]
|
||||
buttons.append(InlineKeyboardButton(label, url=url))
|
||||
else:
|
||||
# commands like: 'Label:/cmd' become ('Label', '/cmd')
|
||||
label = key.split(":/")[0]
|
||||
command = key[len(label) + 1 :]
|
||||
buttons.append(
|
||||
InlineKeyboardButton(label, callback_data=command)
|
||||
)
|
||||
else:
|
||||
# commands like: '/cmd' become ('CMD', '/cmd')
|
||||
label = key.strip()[1:].upper()
|
||||
buttons.append(InlineKeyboardButton(label, callback_data=key))
|
||||
elif isinstance(row_keyboard, list):
|
||||
for entry in row_keyboard:
|
||||
text_btn, data_btn = entry
|
||||
if data_btn.startswith("https://"):
|
||||
buttons.append(InlineKeyboardButton(text_btn, url=data_btn))
|
||||
else:
|
||||
buttons.append(
|
||||
InlineKeyboardButton(text_btn, callback_data=data_btn)
|
||||
)
|
||||
else:
|
||||
raise TypeError(str(row_keyboard))
|
||||
return buttons
|
||||
|
||||
# Defaults
|
||||
params = {
|
||||
ATTR_PARSER: self._parse_mode,
|
||||
ATTR_DISABLE_NOTIF: False,
|
||||
ATTR_DISABLE_WEB_PREV: None,
|
||||
ATTR_REPLY_TO_MSGID: None,
|
||||
ATTR_REPLYMARKUP: None,
|
||||
ATTR_TIMEOUT: None,
|
||||
ATTR_MESSAGE_TAG: None,
|
||||
ATTR_MESSAGE_THREAD_ID: None,
|
||||
}
|
||||
if data is not None:
|
||||
if ATTR_PARSER in data:
|
||||
params[ATTR_PARSER] = self._parsers.get(
|
||||
data[ATTR_PARSER], self._parse_mode
|
||||
)
|
||||
if ATTR_TIMEOUT in data:
|
||||
params[ATTR_TIMEOUT] = data[ATTR_TIMEOUT]
|
||||
if ATTR_DISABLE_NOTIF in data:
|
||||
params[ATTR_DISABLE_NOTIF] = data[ATTR_DISABLE_NOTIF]
|
||||
if ATTR_DISABLE_WEB_PREV in data:
|
||||
params[ATTR_DISABLE_WEB_PREV] = data[ATTR_DISABLE_WEB_PREV]
|
||||
if ATTR_REPLY_TO_MSGID in data:
|
||||
params[ATTR_REPLY_TO_MSGID] = data[ATTR_REPLY_TO_MSGID]
|
||||
if ATTR_MESSAGE_TAG in data:
|
||||
params[ATTR_MESSAGE_TAG] = data[ATTR_MESSAGE_TAG]
|
||||
if ATTR_MESSAGE_THREAD_ID in data:
|
||||
params[ATTR_MESSAGE_THREAD_ID] = data[ATTR_MESSAGE_THREAD_ID]
|
||||
# Keyboards:
|
||||
if ATTR_KEYBOARD in data:
|
||||
keys = data.get(ATTR_KEYBOARD)
|
||||
keys = keys if isinstance(keys, list) else [keys]
|
||||
if keys:
|
||||
params[ATTR_REPLYMARKUP] = ReplyKeyboardMarkup(
|
||||
[[key.strip() for key in row.split(",")] for row in keys],
|
||||
resize_keyboard=data.get(ATTR_RESIZE_KEYBOARD, False),
|
||||
one_time_keyboard=data.get(ATTR_ONE_TIME_KEYBOARD, False),
|
||||
)
|
||||
else:
|
||||
params[ATTR_REPLYMARKUP] = ReplyKeyboardRemove(True)
|
||||
|
||||
elif ATTR_KEYBOARD_INLINE in data:
|
||||
keys = data.get(ATTR_KEYBOARD_INLINE)
|
||||
keys = keys if isinstance(keys, list) else [keys]
|
||||
params[ATTR_REPLYMARKUP] = InlineKeyboardMarkup(
|
||||
[_make_row_inline_keyboard(row) for row in keys]
|
||||
)
|
||||
return params
|
||||
|
||||
async def _send_msg(
|
||||
self, func_send, msg_error, message_tag, *args_msg, context=None, **kwargs_msg
|
||||
):
|
||||
"""Send one message."""
|
||||
try:
|
||||
out = await func_send(*args_msg, **kwargs_msg)
|
||||
if not isinstance(out, bool) and hasattr(out, ATTR_MESSAGEID):
|
||||
chat_id = out.chat_id
|
||||
message_id = out[ATTR_MESSAGEID]
|
||||
self._get_last_message_id()[chat_id] = message_id
|
||||
_LOGGER.debug(
|
||||
"Last message ID: %s (from chat_id %s)",
|
||||
self._get_last_message_id(),
|
||||
chat_id,
|
||||
)
|
||||
|
||||
event_data = {
|
||||
ATTR_CHAT_ID: chat_id,
|
||||
ATTR_MESSAGEID: message_id,
|
||||
}
|
||||
if message_tag is not None:
|
||||
event_data[ATTR_MESSAGE_TAG] = message_tag
|
||||
if kwargs_msg.get(ATTR_MESSAGE_THREAD_ID) is not None:
|
||||
event_data[ATTR_MESSAGE_THREAD_ID] = kwargs_msg[
|
||||
ATTR_MESSAGE_THREAD_ID
|
||||
]
|
||||
self.hass.bus.async_fire(
|
||||
EVENT_TELEGRAM_SENT, event_data, context=context
|
||||
)
|
||||
elif not isinstance(out, bool):
|
||||
_LOGGER.warning(
|
||||
"Update last message: out_type:%s, out=%s", type(out), out
|
||||
)
|
||||
except TelegramError as exc:
|
||||
_LOGGER.error(
|
||||
"%s: %s. Args: %s, kwargs: %s", msg_error, exc, args_msg, kwargs_msg
|
||||
)
|
||||
return None
|
||||
return out
|
||||
|
||||
async def send_message(self, message="", target=None, context=None, **kwargs):
|
||||
"""Send a message to one or multiple pre-allowed chat IDs."""
|
||||
title = kwargs.get(ATTR_TITLE)
|
||||
text = f"{title}\n{message}" if title else message
|
||||
params = self._get_msg_kwargs(kwargs)
|
||||
msg_ids = {}
|
||||
for chat_id in self._get_target_chat_ids(target):
|
||||
_LOGGER.debug("Send message in chat ID %s with params: %s", chat_id, params)
|
||||
msg = await self._send_msg(
|
||||
self.bot.send_message,
|
||||
"Error sending message",
|
||||
params[ATTR_MESSAGE_TAG],
|
||||
chat_id,
|
||||
text,
|
||||
parse_mode=params[ATTR_PARSER],
|
||||
disable_web_page_preview=params[ATTR_DISABLE_WEB_PREV],
|
||||
disable_notification=params[ATTR_DISABLE_NOTIF],
|
||||
reply_to_message_id=params[ATTR_REPLY_TO_MSGID],
|
||||
reply_markup=params[ATTR_REPLYMARKUP],
|
||||
read_timeout=params[ATTR_TIMEOUT],
|
||||
message_thread_id=params[ATTR_MESSAGE_THREAD_ID],
|
||||
context=context,
|
||||
)
|
||||
if msg is not None:
|
||||
msg_ids[chat_id] = msg.id
|
||||
return msg_ids
|
||||
|
||||
async def delete_message(self, chat_id=None, context=None, **kwargs):
|
||||
"""Delete a previously sent message."""
|
||||
chat_id = self._get_target_chat_ids(chat_id)[0]
|
||||
message_id, _ = self._get_msg_ids(kwargs, chat_id)
|
||||
_LOGGER.debug("Delete message %s in chat ID %s", message_id, chat_id)
|
||||
deleted = await self._send_msg(
|
||||
self.bot.delete_message,
|
||||
"Error deleting message",
|
||||
None,
|
||||
chat_id,
|
||||
message_id,
|
||||
context=context,
|
||||
)
|
||||
# reduce message_id anyway:
|
||||
if self._get_last_message_id()[chat_id] is not None:
|
||||
# change last msg_id for deque(n_msgs)?
|
||||
self._get_last_message_id()[chat_id] -= 1
|
||||
return deleted
|
||||
|
||||
async def edit_message(self, type_edit, chat_id=None, context=None, **kwargs):
|
||||
"""Edit a previously sent message."""
|
||||
chat_id = self._get_target_chat_ids(chat_id)[0]
|
||||
message_id, inline_message_id = self._get_msg_ids(kwargs, chat_id)
|
||||
params = self._get_msg_kwargs(kwargs)
|
||||
_LOGGER.debug(
|
||||
"Edit message %s in chat ID %s with params: %s",
|
||||
message_id or inline_message_id,
|
||||
chat_id,
|
||||
params,
|
||||
)
|
||||
if type_edit == SERVICE_EDIT_MESSAGE:
|
||||
message = kwargs.get(ATTR_MESSAGE)
|
||||
title = kwargs.get(ATTR_TITLE)
|
||||
text = f"{title}\n{message}" if title else message
|
||||
_LOGGER.debug("Editing message with ID %s", message_id or inline_message_id)
|
||||
return await self._send_msg(
|
||||
self.bot.edit_message_text,
|
||||
"Error editing text message",
|
||||
params[ATTR_MESSAGE_TAG],
|
||||
text,
|
||||
chat_id=chat_id,
|
||||
message_id=message_id,
|
||||
inline_message_id=inline_message_id,
|
||||
parse_mode=params[ATTR_PARSER],
|
||||
disable_web_page_preview=params[ATTR_DISABLE_WEB_PREV],
|
||||
reply_markup=params[ATTR_REPLYMARKUP],
|
||||
read_timeout=params[ATTR_TIMEOUT],
|
||||
context=context,
|
||||
)
|
||||
if type_edit == SERVICE_EDIT_CAPTION:
|
||||
return await self._send_msg(
|
||||
self.bot.edit_message_caption,
|
||||
"Error editing message attributes",
|
||||
params[ATTR_MESSAGE_TAG],
|
||||
chat_id=chat_id,
|
||||
message_id=message_id,
|
||||
inline_message_id=inline_message_id,
|
||||
caption=kwargs.get(ATTR_CAPTION),
|
||||
reply_markup=params[ATTR_REPLYMARKUP],
|
||||
read_timeout=params[ATTR_TIMEOUT],
|
||||
parse_mode=params[ATTR_PARSER],
|
||||
context=context,
|
||||
)
|
||||
|
||||
return await self._send_msg(
|
||||
self.bot.edit_message_reply_markup,
|
||||
"Error editing message attributes",
|
||||
params[ATTR_MESSAGE_TAG],
|
||||
chat_id=chat_id,
|
||||
message_id=message_id,
|
||||
inline_message_id=inline_message_id,
|
||||
reply_markup=params[ATTR_REPLYMARKUP],
|
||||
read_timeout=params[ATTR_TIMEOUT],
|
||||
context=context,
|
||||
)
|
||||
|
||||
async def answer_callback_query(
|
||||
self, message, callback_query_id, show_alert=False, context=None, **kwargs
|
||||
):
|
||||
"""Answer a callback originated with a press in an inline keyboard."""
|
||||
params = self._get_msg_kwargs(kwargs)
|
||||
_LOGGER.debug(
|
||||
"Answer callback query with callback ID %s: %s, alert: %s",
|
||||
callback_query_id,
|
||||
message,
|
||||
show_alert,
|
||||
)
|
||||
await self._send_msg(
|
||||
self.bot.answer_callback_query,
|
||||
"Error sending answer callback query",
|
||||
params[ATTR_MESSAGE_TAG],
|
||||
callback_query_id,
|
||||
text=message,
|
||||
show_alert=show_alert,
|
||||
read_timeout=params[ATTR_TIMEOUT],
|
||||
context=context,
|
||||
)
|
||||
|
||||
async def send_file(
|
||||
self, file_type=SERVICE_SEND_PHOTO, target=None, context=None, **kwargs
|
||||
):
|
||||
"""Send a photo, sticker, video, or document."""
|
||||
params = self._get_msg_kwargs(kwargs)
|
||||
file_content = await load_data(
|
||||
self.hass,
|
||||
url=kwargs.get(ATTR_URL),
|
||||
filepath=kwargs.get(ATTR_FILE),
|
||||
username=kwargs.get(ATTR_USERNAME),
|
||||
password=kwargs.get(ATTR_PASSWORD),
|
||||
authentication=kwargs.get(ATTR_AUTHENTICATION),
|
||||
verify_ssl=(
|
||||
get_default_context()
|
||||
if kwargs.get(ATTR_VERIFY_SSL, False)
|
||||
else get_default_no_verify_context()
|
||||
),
|
||||
)
|
||||
|
||||
msg_ids = {}
|
||||
if file_content:
|
||||
for chat_id in self._get_target_chat_ids(target):
|
||||
_LOGGER.debug("Sending file to chat ID %s", chat_id)
|
||||
|
||||
if file_type == SERVICE_SEND_PHOTO:
|
||||
msg = await self._send_msg(
|
||||
self.bot.send_photo,
|
||||
"Error sending photo",
|
||||
params[ATTR_MESSAGE_TAG],
|
||||
chat_id=chat_id,
|
||||
photo=file_content,
|
||||
caption=kwargs.get(ATTR_CAPTION),
|
||||
disable_notification=params[ATTR_DISABLE_NOTIF],
|
||||
reply_to_message_id=params[ATTR_REPLY_TO_MSGID],
|
||||
reply_markup=params[ATTR_REPLYMARKUP],
|
||||
read_timeout=params[ATTR_TIMEOUT],
|
||||
parse_mode=params[ATTR_PARSER],
|
||||
message_thread_id=params[ATTR_MESSAGE_THREAD_ID],
|
||||
context=context,
|
||||
)
|
||||
|
||||
elif file_type == SERVICE_SEND_STICKER:
|
||||
msg = await self._send_msg(
|
||||
self.bot.send_sticker,
|
||||
"Error sending sticker",
|
||||
params[ATTR_MESSAGE_TAG],
|
||||
chat_id=chat_id,
|
||||
sticker=file_content,
|
||||
disable_notification=params[ATTR_DISABLE_NOTIF],
|
||||
reply_to_message_id=params[ATTR_REPLY_TO_MSGID],
|
||||
reply_markup=params[ATTR_REPLYMARKUP],
|
||||
read_timeout=params[ATTR_TIMEOUT],
|
||||
message_thread_id=params[ATTR_MESSAGE_THREAD_ID],
|
||||
context=context,
|
||||
)
|
||||
|
||||
elif file_type == SERVICE_SEND_VIDEO:
|
||||
msg = await self._send_msg(
|
||||
self.bot.send_video,
|
||||
"Error sending video",
|
||||
params[ATTR_MESSAGE_TAG],
|
||||
chat_id=chat_id,
|
||||
video=file_content,
|
||||
caption=kwargs.get(ATTR_CAPTION),
|
||||
disable_notification=params[ATTR_DISABLE_NOTIF],
|
||||
reply_to_message_id=params[ATTR_REPLY_TO_MSGID],
|
||||
reply_markup=params[ATTR_REPLYMARKUP],
|
||||
read_timeout=params[ATTR_TIMEOUT],
|
||||
parse_mode=params[ATTR_PARSER],
|
||||
message_thread_id=params[ATTR_MESSAGE_THREAD_ID],
|
||||
context=context,
|
||||
)
|
||||
elif file_type == SERVICE_SEND_DOCUMENT:
|
||||
msg = await self._send_msg(
|
||||
self.bot.send_document,
|
||||
"Error sending document",
|
||||
params[ATTR_MESSAGE_TAG],
|
||||
chat_id=chat_id,
|
||||
document=file_content,
|
||||
caption=kwargs.get(ATTR_CAPTION),
|
||||
disable_notification=params[ATTR_DISABLE_NOTIF],
|
||||
reply_to_message_id=params[ATTR_REPLY_TO_MSGID],
|
||||
reply_markup=params[ATTR_REPLYMARKUP],
|
||||
read_timeout=params[ATTR_TIMEOUT],
|
||||
parse_mode=params[ATTR_PARSER],
|
||||
message_thread_id=params[ATTR_MESSAGE_THREAD_ID],
|
||||
context=context,
|
||||
)
|
||||
elif file_type == SERVICE_SEND_VOICE:
|
||||
msg = await self._send_msg(
|
||||
self.bot.send_voice,
|
||||
"Error sending voice",
|
||||
params[ATTR_MESSAGE_TAG],
|
||||
chat_id=chat_id,
|
||||
voice=file_content,
|
||||
caption=kwargs.get(ATTR_CAPTION),
|
||||
disable_notification=params[ATTR_DISABLE_NOTIF],
|
||||
reply_to_message_id=params[ATTR_REPLY_TO_MSGID],
|
||||
reply_markup=params[ATTR_REPLYMARKUP],
|
||||
read_timeout=params[ATTR_TIMEOUT],
|
||||
message_thread_id=params[ATTR_MESSAGE_THREAD_ID],
|
||||
context=context,
|
||||
)
|
||||
elif file_type == SERVICE_SEND_ANIMATION:
|
||||
msg = await self._send_msg(
|
||||
self.bot.send_animation,
|
||||
"Error sending animation",
|
||||
params[ATTR_MESSAGE_TAG],
|
||||
chat_id=chat_id,
|
||||
animation=file_content,
|
||||
caption=kwargs.get(ATTR_CAPTION),
|
||||
disable_notification=params[ATTR_DISABLE_NOTIF],
|
||||
reply_to_message_id=params[ATTR_REPLY_TO_MSGID],
|
||||
reply_markup=params[ATTR_REPLYMARKUP],
|
||||
read_timeout=params[ATTR_TIMEOUT],
|
||||
parse_mode=params[ATTR_PARSER],
|
||||
message_thread_id=params[ATTR_MESSAGE_THREAD_ID],
|
||||
context=context,
|
||||
)
|
||||
|
||||
msg_ids[chat_id] = msg.id
|
||||
file_content.seek(0)
|
||||
else:
|
||||
_LOGGER.error("Can't send file with kwargs: %s", kwargs)
|
||||
|
||||
return msg_ids
|
||||
|
||||
async def send_sticker(self, target=None, context=None, **kwargs) -> dict:
|
||||
"""Send a sticker from a telegram sticker pack."""
|
||||
params = self._get_msg_kwargs(kwargs)
|
||||
stickerid = kwargs.get(ATTR_STICKER_ID)
|
||||
|
||||
msg_ids = {}
|
||||
if stickerid:
|
||||
for chat_id in self._get_target_chat_ids(target):
|
||||
msg = await self._send_msg(
|
||||
self.bot.send_sticker,
|
||||
"Error sending sticker",
|
||||
params[ATTR_MESSAGE_TAG],
|
||||
chat_id=chat_id,
|
||||
sticker=stickerid,
|
||||
disable_notification=params[ATTR_DISABLE_NOTIF],
|
||||
reply_to_message_id=params[ATTR_REPLY_TO_MSGID],
|
||||
reply_markup=params[ATTR_REPLYMARKUP],
|
||||
read_timeout=params[ATTR_TIMEOUT],
|
||||
message_thread_id=params[ATTR_MESSAGE_THREAD_ID],
|
||||
context=context,
|
||||
)
|
||||
msg_ids[chat_id] = msg.id
|
||||
return msg_ids
|
||||
return await self.send_file(SERVICE_SEND_STICKER, target, **kwargs)
|
||||
|
||||
async def send_location(
|
||||
self, latitude, longitude, target=None, context=None, **kwargs
|
||||
):
|
||||
"""Send a location."""
|
||||
latitude = float(latitude)
|
||||
longitude = float(longitude)
|
||||
params = self._get_msg_kwargs(kwargs)
|
||||
msg_ids = {}
|
||||
for chat_id in self._get_target_chat_ids(target):
|
||||
_LOGGER.debug(
|
||||
"Send location %s/%s to chat ID %s", latitude, longitude, chat_id
|
||||
)
|
||||
msg = await self._send_msg(
|
||||
self.bot.send_location,
|
||||
"Error sending location",
|
||||
params[ATTR_MESSAGE_TAG],
|
||||
chat_id=chat_id,
|
||||
latitude=latitude,
|
||||
longitude=longitude,
|
||||
disable_notification=params[ATTR_DISABLE_NOTIF],
|
||||
reply_to_message_id=params[ATTR_REPLY_TO_MSGID],
|
||||
read_timeout=params[ATTR_TIMEOUT],
|
||||
message_thread_id=params[ATTR_MESSAGE_THREAD_ID],
|
||||
context=context,
|
||||
)
|
||||
msg_ids[chat_id] = msg.id
|
||||
return msg_ids
|
||||
|
||||
async def send_poll(
|
||||
self,
|
||||
question,
|
||||
options,
|
||||
is_anonymous,
|
||||
allows_multiple_answers,
|
||||
target=None,
|
||||
context=None,
|
||||
**kwargs,
|
||||
):
|
||||
"""Send a poll."""
|
||||
params = self._get_msg_kwargs(kwargs)
|
||||
openperiod = kwargs.get(ATTR_OPEN_PERIOD)
|
||||
msg_ids = {}
|
||||
for chat_id in self._get_target_chat_ids(target):
|
||||
_LOGGER.debug("Send poll '%s' to chat ID %s", question, chat_id)
|
||||
msg = await self._send_msg(
|
||||
self.bot.send_poll,
|
||||
"Error sending poll",
|
||||
params[ATTR_MESSAGE_TAG],
|
||||
chat_id=chat_id,
|
||||
question=question,
|
||||
options=options,
|
||||
is_anonymous=is_anonymous,
|
||||
allows_multiple_answers=allows_multiple_answers,
|
||||
open_period=openperiod,
|
||||
disable_notification=params[ATTR_DISABLE_NOTIF],
|
||||
reply_to_message_id=params[ATTR_REPLY_TO_MSGID],
|
||||
read_timeout=params[ATTR_TIMEOUT],
|
||||
message_thread_id=params[ATTR_MESSAGE_THREAD_ID],
|
||||
context=context,
|
||||
)
|
||||
msg_ids[chat_id] = msg.id
|
||||
return msg_ids
|
||||
|
||||
async def leave_chat(self, chat_id=None, context=None, **kwargs):
|
||||
"""Remove bot from chat."""
|
||||
chat_id = self._get_target_chat_ids(chat_id)[0]
|
||||
_LOGGER.debug("Leave from chat ID %s", chat_id)
|
||||
return await self._send_msg(
|
||||
self.bot.leave_chat, "Error leaving chat", None, chat_id, context=context
|
||||
)
|
||||
|
||||
|
||||
def initialize_bot(hass: HomeAssistant, p_config: MappingProxyType[str, Any]) -> Bot:
|
||||
"""Initialize telegram bot with proxy support."""
|
||||
api_key: str = p_config[CONF_API_KEY]
|
||||
proxy_url: str | None = p_config.get(CONF_PROXY_URL)
|
||||
proxy_params: dict | None = p_config.get(CONF_PROXY_PARAMS)
|
||||
|
||||
if proxy_url is not None:
|
||||
auth = None
|
||||
if proxy_params is None:
|
||||
# CONF_PROXY_PARAMS has been kept for backwards compatibility.
|
||||
proxy_params = {}
|
||||
elif "username" in proxy_params and "password" in proxy_params:
|
||||
# Auth can actually be stuffed into the URL, but the docs have previously
|
||||
# indicated to put them here.
|
||||
auth = proxy_params.pop("username"), proxy_params.pop("password")
|
||||
ir.create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"proxy_params_auth_deprecation",
|
||||
breaks_in_ha_version="2024.10.0",
|
||||
is_persistent=False,
|
||||
is_fixable=False,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_placeholders={
|
||||
"proxy_params": CONF_PROXY_PARAMS,
|
||||
"proxy_url": CONF_PROXY_URL,
|
||||
"telegram_bot": "Telegram bot",
|
||||
},
|
||||
translation_key="proxy_params_auth_deprecation",
|
||||
learn_more_url="https://github.com/home-assistant/core/pull/112778",
|
||||
)
|
||||
else:
|
||||
ir.create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"proxy_params_deprecation",
|
||||
breaks_in_ha_version="2024.10.0",
|
||||
is_persistent=False,
|
||||
is_fixable=False,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_placeholders={
|
||||
"proxy_params": CONF_PROXY_PARAMS,
|
||||
"proxy_url": CONF_PROXY_URL,
|
||||
"httpx": "httpx",
|
||||
"telegram_bot": "Telegram bot",
|
||||
},
|
||||
translation_key="proxy_params_deprecation",
|
||||
learn_more_url="https://github.com/home-assistant/core/pull/112778",
|
||||
)
|
||||
proxy = httpx.Proxy(proxy_url, auth=auth, **proxy_params)
|
||||
request = HTTPXRequest(connection_pool_size=8, proxy=proxy)
|
||||
else:
|
||||
request = HTTPXRequest(connection_pool_size=8)
|
||||
return Bot(token=api_key, request=request)
|
||||
|
||||
|
||||
async def load_data(
|
||||
hass: HomeAssistant,
|
||||
url=None,
|
||||
filepath=None,
|
||||
username=None,
|
||||
password=None,
|
||||
authentication=None,
|
||||
num_retries=5,
|
||||
verify_ssl=None,
|
||||
):
|
||||
"""Load data into ByteIO/File container from a source."""
|
||||
try:
|
||||
if url is not None:
|
||||
# Load data from URL
|
||||
params: dict[str, Any] = {}
|
||||
headers = {}
|
||||
if authentication == HTTP_BEARER_AUTHENTICATION and password is not None:
|
||||
headers = {"Authorization": f"Bearer {password}"}
|
||||
elif username is not None and password is not None:
|
||||
if authentication == HTTP_DIGEST_AUTHENTICATION:
|
||||
params["auth"] = httpx.DigestAuth(username, password)
|
||||
else:
|
||||
params["auth"] = httpx.BasicAuth(username, password)
|
||||
if verify_ssl is not None:
|
||||
params["verify"] = verify_ssl
|
||||
|
||||
retry_num = 0
|
||||
async with httpx.AsyncClient(
|
||||
timeout=15, headers=headers, **params
|
||||
) as client:
|
||||
while retry_num < num_retries:
|
||||
req = await client.get(url)
|
||||
if req.status_code != 200:
|
||||
_LOGGER.warning(
|
||||
"Status code %s (retry #%s) loading %s",
|
||||
req.status_code,
|
||||
retry_num + 1,
|
||||
url,
|
||||
)
|
||||
else:
|
||||
data = io.BytesIO(req.content)
|
||||
if data.read():
|
||||
data.seek(0)
|
||||
data.name = url
|
||||
return data
|
||||
_LOGGER.warning(
|
||||
"Empty data (retry #%s) in %s)", retry_num + 1, url
|
||||
)
|
||||
retry_num += 1
|
||||
if retry_num < num_retries:
|
||||
await asyncio.sleep(
|
||||
1
|
||||
) # Add a sleep to allow other async operations to proceed
|
||||
_LOGGER.warning(
|
||||
"Can't load data in %s after %s retries", url, retry_num
|
||||
)
|
||||
elif filepath is not None:
|
||||
if hass.config.is_allowed_path(filepath):
|
||||
return await hass.async_add_executor_job(
|
||||
_read_file_as_bytesio, filepath
|
||||
)
|
||||
|
||||
_LOGGER.warning("'%s' are not secure to load data from!", filepath)
|
||||
else:
|
||||
_LOGGER.warning("Can't load data. No data found in params!")
|
||||
|
||||
except (OSError, TypeError) as error:
|
||||
_LOGGER.error("Can't load data into ByteIO: %s", error)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _read_file_as_bytesio(file_path: str) -> io.BytesIO:
|
||||
"""Read a file and return it as a BytesIO object."""
|
||||
with open(file_path, "rb") as file:
|
||||
data = io.BytesIO(file.read())
|
||||
data.name = file_path
|
||||
return data
|
@@ -1,6 +1,14 @@
|
||||
"""Support for Telegram bot to send messages only."""
|
||||
|
||||
from telegram import Bot
|
||||
|
||||
async def async_setup_platform(hass, bot, config):
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .bot import BaseTelegramBot, TelegramBotConfigEntry
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant, bot: Bot, config: TelegramBotConfigEntry
|
||||
) -> type[BaseTelegramBot] | None:
|
||||
"""Set up the Telegram broadcast platform."""
|
||||
return True
|
||||
return None
|
||||
|
620
homeassistant/components/telegram_bot/config_flow.py
Normal file
620
homeassistant/components/telegram_bot/config_flow.py
Normal file
@@ -0,0 +1,620 @@
|
||||
"""Config flow for Telegram Bot."""
|
||||
|
||||
from collections.abc import Mapping
|
||||
from ipaddress import AddressValueError, IPv4Network
|
||||
import logging
|
||||
from types import MappingProxyType
|
||||
from typing import Any
|
||||
|
||||
from telegram import Bot, ChatFullInfo
|
||||
from telegram.error import BadRequest, InvalidToken, NetworkError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_IMPORT,
|
||||
SOURCE_RECONFIGURE,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
ConfigSubentryData,
|
||||
ConfigSubentryFlow,
|
||||
OptionsFlow,
|
||||
SubentryFlowResult,
|
||||
)
|
||||
from homeassistant.const import CONF_API_KEY, CONF_PLATFORM, CONF_URL
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.data_entry_flow import AbortFlow
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
from homeassistant.helpers.network import NoURLAvailableError, get_url
|
||||
from homeassistant.helpers.selector import (
|
||||
SelectSelector,
|
||||
SelectSelectorConfig,
|
||||
TextSelector,
|
||||
TextSelectorConfig,
|
||||
TextSelectorType,
|
||||
)
|
||||
|
||||
from . import initialize_bot
|
||||
from .bot import TelegramBotConfigEntry
|
||||
from .const import (
|
||||
ATTR_PARSER,
|
||||
BOT_NAME,
|
||||
CONF_ALLOWED_CHAT_IDS,
|
||||
CONF_BOT_COUNT,
|
||||
CONF_CHAT_ID,
|
||||
CONF_PROXY_URL,
|
||||
CONF_TRUSTED_NETWORKS,
|
||||
DEFAULT_TRUSTED_NETWORKS,
|
||||
DOMAIN,
|
||||
ERROR_FIELD,
|
||||
ERROR_MESSAGE,
|
||||
ISSUE_DEPRECATED_YAML,
|
||||
ISSUE_DEPRECATED_YAML_HAS_MORE_PLATFORMS,
|
||||
ISSUE_DEPRECATED_YAML_IMPORT_ISSUE_ERROR,
|
||||
PARSER_HTML,
|
||||
PARSER_MD,
|
||||
PARSER_MD2,
|
||||
PLATFORM_BROADCAST,
|
||||
PLATFORM_POLLING,
|
||||
PLATFORM_WEBHOOKS,
|
||||
SUBENTRY_TYPE_ALLOWED_CHAT_IDS,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA: vol.Schema = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PLATFORM): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[
|
||||
PLATFORM_BROADCAST,
|
||||
PLATFORM_POLLING,
|
||||
PLATFORM_WEBHOOKS,
|
||||
],
|
||||
translation_key="platforms",
|
||||
)
|
||||
),
|
||||
vol.Required(CONF_API_KEY): TextSelector(
|
||||
TextSelectorConfig(
|
||||
type=TextSelectorType.PASSWORD,
|
||||
autocomplete="current-password",
|
||||
)
|
||||
),
|
||||
vol.Optional(CONF_PROXY_URL): TextSelector(
|
||||
config=TextSelectorConfig(type=TextSelectorType.URL)
|
||||
),
|
||||
}
|
||||
)
|
||||
STEP_RECONFIGURE_USER_DATA_SCHEMA: vol.Schema = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PLATFORM): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[
|
||||
PLATFORM_BROADCAST,
|
||||
PLATFORM_POLLING,
|
||||
PLATFORM_WEBHOOKS,
|
||||
],
|
||||
translation_key="platforms",
|
||||
)
|
||||
),
|
||||
vol.Optional(CONF_PROXY_URL): TextSelector(
|
||||
config=TextSelectorConfig(type=TextSelectorType.URL)
|
||||
),
|
||||
}
|
||||
)
|
||||
STEP_REAUTH_DATA_SCHEMA: vol.Schema = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_API_KEY): TextSelector(
|
||||
TextSelectorConfig(
|
||||
type=TextSelectorType.PASSWORD,
|
||||
autocomplete="current-password",
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
STEP_WEBHOOKS_DATA_SCHEMA: vol.Schema = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_URL): TextSelector(
|
||||
config=TextSelectorConfig(type=TextSelectorType.URL)
|
||||
),
|
||||
vol.Required(CONF_TRUSTED_NETWORKS): vol.Coerce(str),
|
||||
}
|
||||
)
|
||||
OPTIONS_SCHEMA: vol.Schema = vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
ATTR_PARSER,
|
||||
): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[PARSER_MD, PARSER_MD2, PARSER_HTML],
|
||||
translation_key="parsers",
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class OptionsFlowHandler(OptionsFlow):
|
||||
"""Options flow for webhooks."""
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Manage the options."""
|
||||
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(data=user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="init",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
OPTIONS_SCHEMA,
|
||||
self.config_entry.options,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class TelgramBotConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Telegram."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(
|
||||
config_entry: TelegramBotConfigEntry,
|
||||
) -> OptionsFlowHandler:
|
||||
"""Create the options flow."""
|
||||
return OptionsFlowHandler()
|
||||
|
||||
@classmethod
|
||||
@callback
|
||||
def async_get_supported_subentry_types(
|
||||
cls, config_entry: TelegramBotConfigEntry
|
||||
) -> dict[str, type[ConfigSubentryFlow]]:
|
||||
"""Return subentries supported by this integration."""
|
||||
return {SUBENTRY_TYPE_ALLOWED_CHAT_IDS: AllowedChatIdsSubEntryFlowHandler}
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Create instance of the config flow."""
|
||||
super().__init__()
|
||||
self._bot: Bot | None = None
|
||||
self._bot_name = "Unknown bot"
|
||||
|
||||
# for passing data between steps
|
||||
self._step_user_data: dict[str, Any] = {}
|
||||
|
||||
# triggered by async_setup() from __init__.py
|
||||
async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Handle import of config entry from configuration.yaml."""
|
||||
|
||||
telegram_bot: str = f"{import_data[CONF_PLATFORM]} Telegram bot"
|
||||
bot_count: int = import_data[CONF_BOT_COUNT]
|
||||
|
||||
import_data[CONF_TRUSTED_NETWORKS] = ",".join(
|
||||
import_data[CONF_TRUSTED_NETWORKS]
|
||||
)
|
||||
try:
|
||||
config_flow_result: ConfigFlowResult = await self.async_step_user(
|
||||
import_data
|
||||
)
|
||||
except AbortFlow:
|
||||
# this happens if the config entry is already imported
|
||||
self._create_issue(ISSUE_DEPRECATED_YAML, telegram_bot, bot_count)
|
||||
raise
|
||||
else:
|
||||
errors: dict[str, str] | None = config_flow_result.get("errors")
|
||||
if errors:
|
||||
error: str = errors.get("base", "unknown")
|
||||
self._create_issue(
|
||||
error,
|
||||
telegram_bot,
|
||||
bot_count,
|
||||
config_flow_result["description_placeholders"],
|
||||
)
|
||||
return self.async_abort(reason="import_failed")
|
||||
|
||||
subentries: list[ConfigSubentryData] = []
|
||||
allowed_chat_ids: list[int] = import_data[CONF_ALLOWED_CHAT_IDS]
|
||||
for chat_id in allowed_chat_ids:
|
||||
chat_name: str = await _async_get_chat_name(self._bot, chat_id)
|
||||
subentry: ConfigSubentryData = ConfigSubentryData(
|
||||
data={CONF_CHAT_ID: chat_id},
|
||||
subentry_type=CONF_ALLOWED_CHAT_IDS,
|
||||
title=chat_name,
|
||||
unique_id=str(chat_id),
|
||||
)
|
||||
subentries.append(subentry)
|
||||
config_flow_result["subentries"] = subentries
|
||||
|
||||
self._create_issue(
|
||||
ISSUE_DEPRECATED_YAML,
|
||||
telegram_bot,
|
||||
bot_count,
|
||||
config_flow_result["description_placeholders"],
|
||||
)
|
||||
return config_flow_result
|
||||
|
||||
def _create_issue(
|
||||
self,
|
||||
issue: str,
|
||||
telegram_bot_type: str,
|
||||
bot_count: int,
|
||||
description_placeholders: Mapping[str, str] | None = None,
|
||||
) -> None:
|
||||
translation_key: str = (
|
||||
ISSUE_DEPRECATED_YAML
|
||||
if bot_count == 1
|
||||
else ISSUE_DEPRECATED_YAML_HAS_MORE_PLATFORMS
|
||||
)
|
||||
if issue != ISSUE_DEPRECATED_YAML:
|
||||
translation_key = ISSUE_DEPRECATED_YAML_IMPORT_ISSUE_ERROR
|
||||
|
||||
telegram_bot = (
|
||||
description_placeholders.get(BOT_NAME, telegram_bot_type)
|
||||
if description_placeholders
|
||||
else telegram_bot_type
|
||||
)
|
||||
error_field = (
|
||||
description_placeholders.get(ERROR_FIELD, "Unknown error")
|
||||
if description_placeholders
|
||||
else "Unknown error"
|
||||
)
|
||||
error_message = (
|
||||
description_placeholders.get(ERROR_MESSAGE, "Unknown error")
|
||||
if description_placeholders
|
||||
else "Unknown error"
|
||||
)
|
||||
|
||||
async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
ISSUE_DEPRECATED_YAML,
|
||||
breaks_in_ha_version="2025.12.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key=translation_key,
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "Telegram Bot",
|
||||
"telegram_bot": telegram_bot,
|
||||
ERROR_FIELD: error_field,
|
||||
ERROR_MESSAGE: error_message,
|
||||
},
|
||||
learn_more_url="https://github.com/home-assistant/core/pull/144617",
|
||||
)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow to create a new config entry for a Telegram bot."""
|
||||
|
||||
if not user_input:
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=STEP_USER_DATA_SCHEMA,
|
||||
)
|
||||
|
||||
# prevent duplicates
|
||||
await self.async_set_unique_id(user_input[CONF_API_KEY])
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
# validate connection to Telegram API
|
||||
errors: dict[str, str] = {}
|
||||
description_placeholders: dict[str, str] = {}
|
||||
bot_name = await self._validate_bot(
|
||||
user_input, errors, description_placeholders
|
||||
)
|
||||
|
||||
if errors:
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
STEP_USER_DATA_SCHEMA, user_input
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders=description_placeholders,
|
||||
)
|
||||
|
||||
if user_input[CONF_PLATFORM] != PLATFORM_WEBHOOKS:
|
||||
await self._shutdown_bot()
|
||||
|
||||
return self.async_create_entry(
|
||||
title=bot_name,
|
||||
data={
|
||||
CONF_PLATFORM: user_input[CONF_PLATFORM],
|
||||
CONF_API_KEY: user_input[CONF_API_KEY],
|
||||
CONF_PROXY_URL: user_input.get(CONF_PROXY_URL),
|
||||
},
|
||||
options={
|
||||
# this value may come from yaml import
|
||||
ATTR_PARSER: user_input.get(ATTR_PARSER, PARSER_MD)
|
||||
},
|
||||
description_placeholders=description_placeholders,
|
||||
)
|
||||
|
||||
self._bot_name = bot_name
|
||||
self._step_user_data.update(user_input)
|
||||
|
||||
if self.source == SOURCE_IMPORT:
|
||||
return await self.async_step_webhooks(
|
||||
{
|
||||
CONF_URL: user_input.get(CONF_URL),
|
||||
CONF_TRUSTED_NETWORKS: user_input[CONF_TRUSTED_NETWORKS],
|
||||
}
|
||||
)
|
||||
return await self.async_step_webhooks()
|
||||
|
||||
async def _shutdown_bot(self) -> None:
|
||||
"""Shutdown the bot if it exists."""
|
||||
if self._bot:
|
||||
await self._bot.shutdown()
|
||||
self._bot = None
|
||||
|
||||
async def _validate_bot(
|
||||
self,
|
||||
user_input: dict[str, Any],
|
||||
errors: dict[str, str],
|
||||
placeholders: dict[str, str],
|
||||
) -> str:
|
||||
try:
|
||||
bot = await self.hass.async_add_executor_job(
|
||||
initialize_bot, self.hass, MappingProxyType(user_input)
|
||||
)
|
||||
self._bot = bot
|
||||
|
||||
user = await bot.get_me()
|
||||
except InvalidToken as err:
|
||||
_LOGGER.warning("Invalid API token")
|
||||
errors["base"] = "invalid_api_key"
|
||||
placeholders[ERROR_FIELD] = "API key"
|
||||
placeholders[ERROR_MESSAGE] = str(err)
|
||||
return "Unknown bot"
|
||||
except (ValueError, NetworkError) as err:
|
||||
_LOGGER.warning("Invalid proxy")
|
||||
errors["base"] = "invalid_proxy_url"
|
||||
placeholders["proxy_url_error"] = str(err)
|
||||
placeholders[ERROR_FIELD] = "proxy url"
|
||||
placeholders[ERROR_MESSAGE] = str(err)
|
||||
return "Unknown bot"
|
||||
else:
|
||||
return user.full_name
|
||||
|
||||
async def async_step_webhooks(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle config flow for webhook Telegram bot."""
|
||||
|
||||
if not user_input:
|
||||
if self.source == SOURCE_RECONFIGURE:
|
||||
return self.async_show_form(
|
||||
step_id="webhooks",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
STEP_WEBHOOKS_DATA_SCHEMA,
|
||||
self._get_reconfigure_entry().data,
|
||||
),
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="webhooks",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
STEP_WEBHOOKS_DATA_SCHEMA,
|
||||
{
|
||||
CONF_TRUSTED_NETWORKS: ",".join(
|
||||
[str(network) for network in DEFAULT_TRUSTED_NETWORKS]
|
||||
),
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
description_placeholders: dict[str, str] = {BOT_NAME: self._bot_name}
|
||||
self._validate_webhooks(user_input, errors, description_placeholders)
|
||||
if errors:
|
||||
return self.async_show_form(
|
||||
step_id="webhooks",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
STEP_WEBHOOKS_DATA_SCHEMA,
|
||||
user_input,
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders=description_placeholders,
|
||||
)
|
||||
|
||||
await self._shutdown_bot()
|
||||
|
||||
if self.source == SOURCE_RECONFIGURE:
|
||||
user_input.update(self._step_user_data)
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reconfigure_entry(),
|
||||
title=self._bot_name,
|
||||
data_updates=user_input,
|
||||
)
|
||||
|
||||
return self.async_create_entry(
|
||||
title=self._bot_name,
|
||||
data={
|
||||
CONF_PLATFORM: self._step_user_data[CONF_PLATFORM],
|
||||
CONF_API_KEY: self._step_user_data[CONF_API_KEY],
|
||||
CONF_PROXY_URL: self._step_user_data.get(CONF_PROXY_URL),
|
||||
CONF_URL: user_input.get(CONF_URL),
|
||||
CONF_TRUSTED_NETWORKS: user_input[CONF_TRUSTED_NETWORKS],
|
||||
},
|
||||
options={ATTR_PARSER: self._step_user_data.get(ATTR_PARSER, PARSER_MD)},
|
||||
description_placeholders=description_placeholders,
|
||||
)
|
||||
|
||||
def _validate_webhooks(
|
||||
self,
|
||||
user_input: dict[str, Any],
|
||||
errors: dict[str, str],
|
||||
description_placeholders: dict[str, str],
|
||||
) -> None:
|
||||
# validate URL
|
||||
if CONF_URL in user_input and not user_input[CONF_URL].startswith("https"):
|
||||
errors["base"] = "invalid_url"
|
||||
description_placeholders[ERROR_FIELD] = "URL"
|
||||
description_placeholders[ERROR_MESSAGE] = "URL must start with https"
|
||||
return
|
||||
if CONF_URL not in user_input:
|
||||
try:
|
||||
get_url(self.hass, require_ssl=True, allow_internal=False)
|
||||
except NoURLAvailableError:
|
||||
errors["base"] = "no_url_available"
|
||||
description_placeholders[ERROR_FIELD] = "URL"
|
||||
description_placeholders[ERROR_MESSAGE] = (
|
||||
"URL is required since you have not configured an external URL in Home Assistant"
|
||||
)
|
||||
return
|
||||
|
||||
# validate trusted networks
|
||||
csv_trusted_networks: list[str] = []
|
||||
formatted_trusted_networks: str = (
|
||||
user_input[CONF_TRUSTED_NETWORKS].lstrip("[").rstrip("]")
|
||||
)
|
||||
for trusted_network in cv.ensure_list_csv(formatted_trusted_networks):
|
||||
formatted_trusted_network: str = trusted_network.strip("'")
|
||||
try:
|
||||
IPv4Network(formatted_trusted_network)
|
||||
except (AddressValueError, ValueError) as err:
|
||||
errors["base"] = "invalid_trusted_networks"
|
||||
description_placeholders[ERROR_FIELD] = "trusted networks"
|
||||
description_placeholders[ERROR_MESSAGE] = str(err)
|
||||
return
|
||||
else:
|
||||
csv_trusted_networks.append(formatted_trusted_network)
|
||||
user_input[CONF_TRUSTED_NETWORKS] = csv_trusted_networks
|
||||
|
||||
return
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Reconfigure Telegram bot."""
|
||||
|
||||
api_key: str = self._get_reconfigure_entry().data[CONF_API_KEY]
|
||||
await self.async_set_unique_id(api_key)
|
||||
self._abort_if_unique_id_mismatch()
|
||||
|
||||
if not user_input:
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
STEP_RECONFIGURE_USER_DATA_SCHEMA,
|
||||
self._get_reconfigure_entry().data,
|
||||
),
|
||||
)
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
description_placeholders: dict[str, str] = {}
|
||||
|
||||
user_input[CONF_API_KEY] = api_key
|
||||
bot_name = await self._validate_bot(
|
||||
user_input, errors, description_placeholders
|
||||
)
|
||||
self._bot_name = bot_name
|
||||
|
||||
if errors:
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
STEP_RECONFIGURE_USER_DATA_SCHEMA,
|
||||
user_input,
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders=description_placeholders,
|
||||
)
|
||||
|
||||
if user_input[CONF_PLATFORM] != PLATFORM_WEBHOOKS:
|
||||
await self._shutdown_bot()
|
||||
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reconfigure_entry(), title=bot_name, data_updates=user_input
|
||||
)
|
||||
|
||||
self._step_user_data.update(user_input)
|
||||
return await self.async_step_webhooks()
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Reauth step."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Reauth confirm step."""
|
||||
if user_input is None:
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
STEP_REAUTH_DATA_SCHEMA, self._get_reauth_entry().data
|
||||
),
|
||||
)
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
description_placeholders: dict[str, str] = {}
|
||||
|
||||
bot_name = await self._validate_bot(
|
||||
user_input, errors, description_placeholders
|
||||
)
|
||||
await self._shutdown_bot()
|
||||
|
||||
if errors:
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
STEP_REAUTH_DATA_SCHEMA, self._get_reauth_entry().data
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders=description_placeholders,
|
||||
)
|
||||
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(), title=bot_name, data_updates=user_input
|
||||
)
|
||||
|
||||
|
||||
class AllowedChatIdsSubEntryFlowHandler(ConfigSubentryFlow):
|
||||
"""Handle a subentry flow for creating chat ID."""
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> SubentryFlowResult:
|
||||
"""Create allowed chat ID."""
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
config_entry: TelegramBotConfigEntry = self._get_entry()
|
||||
bot = config_entry.runtime_data.bot
|
||||
|
||||
chat_id: int = user_input[CONF_CHAT_ID]
|
||||
chat_name = await _async_get_chat_name(bot, chat_id)
|
||||
if chat_name:
|
||||
return self.async_create_entry(
|
||||
title=chat_name,
|
||||
data={CONF_CHAT_ID: chat_id},
|
||||
unique_id=str(chat_id),
|
||||
)
|
||||
|
||||
errors["base"] = "chat_not_found"
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema({vol.Required(CONF_CHAT_ID): vol.Coerce(int)}),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
|
||||
async def _async_get_chat_name(bot: Bot | None, chat_id: int) -> str:
|
||||
if not bot:
|
||||
return str(chat_id)
|
||||
|
||||
try:
|
||||
chat_info: ChatFullInfo = await bot.get_chat(chat_id)
|
||||
return chat_info.effective_name or str(chat_id)
|
||||
except BadRequest:
|
||||
return ""
|
109
homeassistant/components/telegram_bot/const.py
Normal file
109
homeassistant/components/telegram_bot/const.py
Normal file
@@ -0,0 +1,109 @@
|
||||
"""Constants for the Telegram Bot integration."""
|
||||
|
||||
from ipaddress import ip_network
|
||||
|
||||
DOMAIN = "telegram_bot"
|
||||
|
||||
PLATFORM_BROADCAST = "broadcast"
|
||||
PLATFORM_POLLING = "polling"
|
||||
PLATFORM_WEBHOOKS = "webhooks"
|
||||
|
||||
SUBENTRY_TYPE_ALLOWED_CHAT_IDS = "allowed_chat_ids"
|
||||
|
||||
CONF_BOT_COUNT = "bot_count"
|
||||
CONF_ALLOWED_CHAT_IDS = "allowed_chat_ids"
|
||||
CONF_CONFIG_ENTRY_ID = "config_entry_id"
|
||||
CONF_PROXY_PARAMS = "proxy_params"
|
||||
|
||||
|
||||
CONF_PROXY_URL = "proxy_url"
|
||||
CONF_TRUSTED_NETWORKS = "trusted_networks"
|
||||
|
||||
# subentry
|
||||
CONF_CHAT_ID = "chat_id"
|
||||
|
||||
BOT_NAME = "telegram_bot"
|
||||
ERROR_FIELD = "error_field"
|
||||
ERROR_MESSAGE = "error_message"
|
||||
|
||||
ISSUE_DEPRECATED_YAML = "deprecated_yaml"
|
||||
ISSUE_DEPRECATED_YAML_HAS_MORE_PLATFORMS = (
|
||||
"deprecated_yaml_import_issue_has_more_platforms"
|
||||
)
|
||||
ISSUE_DEPRECATED_YAML_IMPORT_ISSUE_ERROR = "deprecated_yaml_import_issue_error"
|
||||
|
||||
DEFAULT_TRUSTED_NETWORKS = [ip_network("149.154.160.0/20"), ip_network("91.108.4.0/22")]
|
||||
|
||||
SERVICE_SEND_MESSAGE = "send_message"
|
||||
SERVICE_SEND_PHOTO = "send_photo"
|
||||
SERVICE_SEND_STICKER = "send_sticker"
|
||||
SERVICE_SEND_ANIMATION = "send_animation"
|
||||
SERVICE_SEND_VIDEO = "send_video"
|
||||
SERVICE_SEND_VOICE = "send_voice"
|
||||
SERVICE_SEND_DOCUMENT = "send_document"
|
||||
SERVICE_SEND_LOCATION = "send_location"
|
||||
SERVICE_SEND_POLL = "send_poll"
|
||||
SERVICE_EDIT_MESSAGE = "edit_message"
|
||||
SERVICE_EDIT_CAPTION = "edit_caption"
|
||||
SERVICE_EDIT_REPLYMARKUP = "edit_replymarkup"
|
||||
SERVICE_ANSWER_CALLBACK_QUERY = "answer_callback_query"
|
||||
SERVICE_DELETE_MESSAGE = "delete_message"
|
||||
SERVICE_LEAVE_CHAT = "leave_chat"
|
||||
|
||||
EVENT_TELEGRAM_CALLBACK = "telegram_callback"
|
||||
EVENT_TELEGRAM_COMMAND = "telegram_command"
|
||||
EVENT_TELEGRAM_TEXT = "telegram_text"
|
||||
EVENT_TELEGRAM_SENT = "telegram_sent"
|
||||
|
||||
PARSER_HTML = "html"
|
||||
PARSER_MD = "markdown"
|
||||
PARSER_MD2 = "markdownv2"
|
||||
PARSER_PLAIN_TEXT = "plain_text"
|
||||
|
||||
ATTR_DATA = "data"
|
||||
ATTR_MESSAGE = "message"
|
||||
ATTR_TITLE = "title"
|
||||
|
||||
ATTR_ARGS = "args"
|
||||
ATTR_AUTHENTICATION = "authentication"
|
||||
ATTR_CALLBACK_QUERY = "callback_query"
|
||||
ATTR_CALLBACK_QUERY_ID = "callback_query_id"
|
||||
ATTR_CAPTION = "caption"
|
||||
ATTR_CHAT_ID = "chat_id"
|
||||
ATTR_CHAT_INSTANCE = "chat_instance"
|
||||
ATTR_DATE = "date"
|
||||
ATTR_DISABLE_NOTIF = "disable_notification"
|
||||
ATTR_DISABLE_WEB_PREV = "disable_web_page_preview"
|
||||
ATTR_EDITED_MSG = "edited_message"
|
||||
ATTR_FILE = "file"
|
||||
ATTR_FROM_FIRST = "from_first"
|
||||
ATTR_FROM_LAST = "from_last"
|
||||
ATTR_KEYBOARD = "keyboard"
|
||||
ATTR_RESIZE_KEYBOARD = "resize_keyboard"
|
||||
ATTR_ONE_TIME_KEYBOARD = "one_time_keyboard"
|
||||
ATTR_KEYBOARD_INLINE = "inline_keyboard"
|
||||
ATTR_MESSAGEID = "message_id"
|
||||
ATTR_MSG = "message"
|
||||
ATTR_MSGID = "id"
|
||||
ATTR_PARSER = "parse_mode"
|
||||
ATTR_PASSWORD = "password"
|
||||
ATTR_REPLY_TO_MSGID = "reply_to_message_id"
|
||||
ATTR_REPLYMARKUP = "reply_markup"
|
||||
ATTR_SHOW_ALERT = "show_alert"
|
||||
ATTR_STICKER_ID = "sticker_id"
|
||||
ATTR_TARGET = "target"
|
||||
ATTR_TEXT = "text"
|
||||
ATTR_URL = "url"
|
||||
ATTR_USER_ID = "user_id"
|
||||
ATTR_USERNAME = "username"
|
||||
ATTR_VERIFY_SSL = "verify_ssl"
|
||||
ATTR_TIMEOUT = "timeout"
|
||||
ATTR_MESSAGE_TAG = "message_tag"
|
||||
ATTR_CHANNEL_POST = "channel_post"
|
||||
ATTR_QUESTION = "question"
|
||||
ATTR_OPTIONS = "options"
|
||||
ATTR_ANSWERS = "answers"
|
||||
ATTR_OPEN_PERIOD = "open_period"
|
||||
ATTR_IS_ANONYMOUS = "is_anonymous"
|
||||
ATTR_ALLOWS_MULTIPLE_ANSWERS = "allows_multiple_answers"
|
||||
ATTR_MESSAGE_THREAD_ID = "message_thread_id"
|
@@ -2,6 +2,7 @@
|
||||
"domain": "telegram_bot",
|
||||
"name": "Telegram bot",
|
||||
"codeowners": [],
|
||||
"config_flow": true,
|
||||
"dependencies": ["http"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/telegram_bot",
|
||||
"iot_class": "cloud_push",
|
||||
|
@@ -2,34 +2,35 @@
|
||||
|
||||
import logging
|
||||
|
||||
from telegram import Update
|
||||
from telegram import Bot, Update
|
||||
from telegram.error import NetworkError, RetryAfter, TelegramError, TimedOut
|
||||
from telegram.ext import ApplicationBuilder, CallbackContext, TypeHandler
|
||||
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import BaseTelegramBotEntity
|
||||
from .bot import BaseTelegramBot, TelegramBotConfigEntry
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_platform(hass, bot, config):
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant, bot: Bot, config: TelegramBotConfigEntry
|
||||
) -> BaseTelegramBot | None:
|
||||
"""Set up the Telegram polling platform."""
|
||||
pollbot = PollBot(hass, bot, config)
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, pollbot.start_polling)
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, pollbot.stop_polling)
|
||||
config.async_create_task(hass, pollbot.start_polling(), "polling telegram bot")
|
||||
|
||||
return True
|
||||
return pollbot
|
||||
|
||||
|
||||
async def process_error(update: Update, context: CallbackContext) -> None:
|
||||
async def process_error(update: object, context: CallbackContext) -> None:
|
||||
"""Telegram bot error handler."""
|
||||
if context.error:
|
||||
error_callback(context.error, update)
|
||||
|
||||
|
||||
def error_callback(error: Exception, update: Update | None = None) -> None:
|
||||
def error_callback(error: Exception, update: object | None = None) -> None:
|
||||
"""Log the error."""
|
||||
try:
|
||||
raise error
|
||||
@@ -43,13 +44,15 @@ def error_callback(error: Exception, update: Update | None = None) -> None:
|
||||
_LOGGER.error("%s: %s", error.__class__.__name__, error)
|
||||
|
||||
|
||||
class PollBot(BaseTelegramBotEntity):
|
||||
class PollBot(BaseTelegramBot):
|
||||
"""Controls the Application object that holds the bot and an updater.
|
||||
|
||||
The application is set up to pass telegram updates to `self.handle_update`
|
||||
"""
|
||||
|
||||
def __init__(self, hass, bot, config):
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, bot: Bot, config: TelegramBotConfigEntry
|
||||
) -> None:
|
||||
"""Create Application to poll for updates."""
|
||||
super().__init__(hass, config)
|
||||
self.bot = bot
|
||||
@@ -57,6 +60,10 @@ class PollBot(BaseTelegramBotEntity):
|
||||
self.application.add_handler(TypeHandler(Update, self.handle_update))
|
||||
self.application.add_error_handler(process_error)
|
||||
|
||||
async def shutdown(self) -> None:
|
||||
"""Shutdown the app."""
|
||||
await self.stop_polling()
|
||||
|
||||
async def start_polling(self, event=None):
|
||||
"""Start the polling task."""
|
||||
_LOGGER.debug("Starting polling")
|
||||
|
@@ -2,6 +2,10 @@
|
||||
|
||||
send_message:
|
||||
fields:
|
||||
config_entry_id:
|
||||
selector:
|
||||
config_entry:
|
||||
integration: telegram_bot
|
||||
message:
|
||||
required: true
|
||||
example: The garage door has been open for 10 minutes.
|
||||
@@ -61,6 +65,10 @@ send_message:
|
||||
|
||||
send_photo:
|
||||
fields:
|
||||
config_entry_id:
|
||||
selector:
|
||||
config_entry:
|
||||
integration: telegram_bot
|
||||
url:
|
||||
example: "http://example.org/path/to/the/image.png"
|
||||
selector:
|
||||
@@ -137,6 +145,10 @@ send_photo:
|
||||
|
||||
send_sticker:
|
||||
fields:
|
||||
config_entry_id:
|
||||
selector:
|
||||
config_entry:
|
||||
integration: telegram_bot
|
||||
url:
|
||||
example: "http://example.org/path/to/the/sticker.webp"
|
||||
selector:
|
||||
@@ -205,6 +217,10 @@ send_sticker:
|
||||
|
||||
send_animation:
|
||||
fields:
|
||||
config_entry_id:
|
||||
selector:
|
||||
config_entry:
|
||||
integration: telegram_bot
|
||||
url:
|
||||
example: "http://example.org/path/to/the/animation.gif"
|
||||
selector:
|
||||
@@ -281,6 +297,10 @@ send_animation:
|
||||
|
||||
send_video:
|
||||
fields:
|
||||
config_entry_id:
|
||||
selector:
|
||||
config_entry:
|
||||
integration: telegram_bot
|
||||
url:
|
||||
example: "http://example.org/path/to/the/video.mp4"
|
||||
selector:
|
||||
@@ -357,6 +377,10 @@ send_video:
|
||||
|
||||
send_voice:
|
||||
fields:
|
||||
config_entry_id:
|
||||
selector:
|
||||
config_entry:
|
||||
integration: telegram_bot
|
||||
url:
|
||||
example: "http://example.org/path/to/the/voice.opus"
|
||||
selector:
|
||||
@@ -425,6 +449,10 @@ send_voice:
|
||||
|
||||
send_document:
|
||||
fields:
|
||||
config_entry_id:
|
||||
selector:
|
||||
config_entry:
|
||||
integration: telegram_bot
|
||||
url:
|
||||
example: "http://example.org/path/to/the/document.odf"
|
||||
selector:
|
||||
@@ -501,6 +529,10 @@ send_document:
|
||||
|
||||
send_location:
|
||||
fields:
|
||||
config_entry_id:
|
||||
selector:
|
||||
config_entry:
|
||||
integration: telegram_bot
|
||||
latitude:
|
||||
required: true
|
||||
selector:
|
||||
@@ -555,6 +587,10 @@ send_location:
|
||||
|
||||
send_poll:
|
||||
fields:
|
||||
config_entry_id:
|
||||
selector:
|
||||
config_entry:
|
||||
integration: telegram_bot
|
||||
target:
|
||||
example: "[12345, 67890] or 12345"
|
||||
selector:
|
||||
@@ -603,6 +639,10 @@ send_poll:
|
||||
|
||||
edit_message:
|
||||
fields:
|
||||
config_entry_id:
|
||||
selector:
|
||||
config_entry:
|
||||
integration: telegram_bot
|
||||
message_id:
|
||||
required: true
|
||||
example: "{{ trigger.event.data.message.message_id }}"
|
||||
@@ -641,6 +681,10 @@ edit_message:
|
||||
|
||||
edit_caption:
|
||||
fields:
|
||||
config_entry_id:
|
||||
selector:
|
||||
config_entry:
|
||||
integration: telegram_bot
|
||||
message_id:
|
||||
required: true
|
||||
example: "{{ trigger.event.data.message.message_id }}"
|
||||
@@ -665,6 +709,10 @@ edit_caption:
|
||||
|
||||
edit_replymarkup:
|
||||
fields:
|
||||
config_entry_id:
|
||||
selector:
|
||||
config_entry:
|
||||
integration: telegram_bot
|
||||
message_id:
|
||||
required: true
|
||||
example: "{{ trigger.event.data.message.message_id }}"
|
||||
@@ -685,6 +733,10 @@ edit_replymarkup:
|
||||
|
||||
answer_callback_query:
|
||||
fields:
|
||||
config_entry_id:
|
||||
selector:
|
||||
config_entry:
|
||||
integration: telegram_bot
|
||||
message:
|
||||
required: true
|
||||
example: "OK, I'm listening"
|
||||
@@ -708,6 +760,10 @@ answer_callback_query:
|
||||
|
||||
delete_message:
|
||||
fields:
|
||||
config_entry_id:
|
||||
selector:
|
||||
config_entry:
|
||||
integration: telegram_bot
|
||||
message_id:
|
||||
required: true
|
||||
example: "{{ trigger.event.data.message.message_id }}"
|
||||
|
@@ -1,9 +1,128 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Telegram bot setup",
|
||||
"description": "Create a new Telegram bot",
|
||||
"data": {
|
||||
"platform": "Platform",
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
"proxy_url": "Proxy URL"
|
||||
},
|
||||
"data_description": {
|
||||
"platform": "Telegram bot implementation",
|
||||
"api_key": "The API token of your bot.",
|
||||
"proxy_url": "Proxy URL if working behind one, optionally including username and password.\n(socks5://username:password@proxy_ip:proxy_port)"
|
||||
}
|
||||
},
|
||||
"webhooks": {
|
||||
"title": "Webhooks network configuration",
|
||||
"data": {
|
||||
"url": "[%key:common::config_flow::data::url%]",
|
||||
"trusted_networks": "Trusted networks"
|
||||
},
|
||||
"data_description": {
|
||||
"url": "Allow to overwrite the external URL from the Home Assistant configuration for different setups.",
|
||||
"trusted_networks": "Telegram server access ACL as list.\nDefault: 149.154.160.0/20, 91.108.4.0/22"
|
||||
}
|
||||
},
|
||||
"reconfigure": {
|
||||
"title": "Telegram bot setup",
|
||||
"description": "Reconfigure Telegram bot",
|
||||
"data": {
|
||||
"platform": "[%key:component::telegram_bot::config::step::user::data::platform%]",
|
||||
"proxy_url": "[%key:component::telegram_bot::config::step::user::data::proxy_url%]"
|
||||
},
|
||||
"data_description": {
|
||||
"platform": "[%key:component::telegram_bot::config::step::user::data_description::platform%]",
|
||||
"proxy_url": "[%key:component::telegram_bot::config::step::user::data_description::proxy_url%]"
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "Re-authenticate Telegram bot",
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]"
|
||||
},
|
||||
"data_description": {
|
||||
"api_key": "[%key:component::telegram_bot::config::step::user::data_description::api_key%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"invalid_api_key": "[%key:common::config_flow::error::invalid_api_key%]",
|
||||
"invalid_proxy_url": "{proxy_url_error}",
|
||||
"no_url_available": "URL is required since you have not configured an external URL in Home Assistant",
|
||||
"invalid_url": "URL must start with https",
|
||||
"invalid_trusted_networks": "Invalid trusted network: {error_message}"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
"title": "Configure Telegram bot",
|
||||
"data": {
|
||||
"parse_mode": "Parse mode"
|
||||
},
|
||||
"data_description": {
|
||||
"parse_mode": "Default parse mode for messages if not explicit in message data."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"config_subentries": {
|
||||
"allowed_chat_ids": {
|
||||
"initiate_flow": {
|
||||
"user": "Add allowed chat ID"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Add chat",
|
||||
"data": {
|
||||
"chat_id": "Chat ID"
|
||||
},
|
||||
"data_description": {
|
||||
"chat_id": "ID representing the user or group chat to which messages can be sent."
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"chat_not_found": "Chat not found"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "Chat already configured"
|
||||
}
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"platforms": {
|
||||
"options": {
|
||||
"broadcast": "Broadcast",
|
||||
"polling": "Polling",
|
||||
"webhooks": "Webhooks"
|
||||
}
|
||||
},
|
||||
"parsers": {
|
||||
"options": {
|
||||
"markdown": "Markdown (Legacy)",
|
||||
"markdownv2": "MarkdownV2",
|
||||
"html": "HTML"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"send_message": {
|
||||
"name": "Send message",
|
||||
"description": "Sends a notification.",
|
||||
"fields": {
|
||||
"config_entry_id": {
|
||||
"name": "Config entry ID",
|
||||
"description": "The config entry representing the Telegram bot to send the message."
|
||||
},
|
||||
"message": {
|
||||
"name": "Message",
|
||||
"description": "Message body of the notification."
|
||||
@@ -58,6 +177,10 @@
|
||||
"name": "Send photo",
|
||||
"description": "Sends a photo.",
|
||||
"fields": {
|
||||
"config_entry_id": {
|
||||
"name": "[%key:component::telegram_bot::services::send_message::fields::config_entry_id::name%]",
|
||||
"description": "The config entry representing the Telegram bot to send the photo."
|
||||
},
|
||||
"url": {
|
||||
"name": "[%key:common::config_flow::data::url%]",
|
||||
"description": "Remote path to an image."
|
||||
@@ -128,6 +251,10 @@
|
||||
"name": "Send sticker",
|
||||
"description": "Sends a sticker.",
|
||||
"fields": {
|
||||
"config_entry_id": {
|
||||
"name": "[%key:component::telegram_bot::services::send_message::fields::config_entry_id::name%]",
|
||||
"description": "The config entry representing the Telegram bot to send the sticker."
|
||||
},
|
||||
"url": {
|
||||
"name": "[%key:common::config_flow::data::url%]",
|
||||
"description": "Remote path to a static .webp or animated .tgs sticker."
|
||||
@@ -194,6 +321,10 @@
|
||||
"name": "Send animation",
|
||||
"description": "Sends an animation.",
|
||||
"fields": {
|
||||
"config_entry_id": {
|
||||
"name": "[%key:component::telegram_bot::services::send_message::fields::config_entry_id::name%]",
|
||||
"description": "The config entry representing the Telegram bot to send the animation."
|
||||
},
|
||||
"url": {
|
||||
"name": "[%key:common::config_flow::data::url%]",
|
||||
"description": "Remote path to a GIF or H.264/MPEG-4 AVC video without sound."
|
||||
@@ -264,6 +395,10 @@
|
||||
"name": "Send video",
|
||||
"description": "Sends a video.",
|
||||
"fields": {
|
||||
"config_entry_id": {
|
||||
"name": "[%key:component::telegram_bot::services::send_message::fields::config_entry_id::name%]",
|
||||
"description": "The config entry representing the Telegram bot to send the video."
|
||||
},
|
||||
"url": {
|
||||
"name": "[%key:common::config_flow::data::url%]",
|
||||
"description": "Remote path to a video."
|
||||
@@ -334,6 +469,10 @@
|
||||
"name": "Send voice",
|
||||
"description": "Sends a voice message.",
|
||||
"fields": {
|
||||
"config_entry_id": {
|
||||
"name": "[%key:component::telegram_bot::services::send_message::fields::config_entry_id::name%]",
|
||||
"description": "The config entry representing the Telegram bot to send the voice message."
|
||||
},
|
||||
"url": {
|
||||
"name": "[%key:common::config_flow::data::url%]",
|
||||
"description": "Remote path to a voice message."
|
||||
@@ -400,6 +539,10 @@
|
||||
"name": "Send document",
|
||||
"description": "Sends a document.",
|
||||
"fields": {
|
||||
"config_entry_id": {
|
||||
"name": "[%key:component::telegram_bot::services::send_message::fields::config_entry_id::name%]",
|
||||
"description": "The config entry representing the Telegram bot to send the document."
|
||||
},
|
||||
"url": {
|
||||
"name": "[%key:common::config_flow::data::url%]",
|
||||
"description": "Remote path to a document."
|
||||
@@ -470,6 +613,10 @@
|
||||
"name": "Send location",
|
||||
"description": "Sends a location.",
|
||||
"fields": {
|
||||
"config_entry_id": {
|
||||
"name": "[%key:component::telegram_bot::services::send_message::fields::config_entry_id::name%]",
|
||||
"description": "The config entry representing the Telegram bot to send the location."
|
||||
},
|
||||
"latitude": {
|
||||
"name": "[%key:common::config_flow::data::latitude%]",
|
||||
"description": "The latitude to send."
|
||||
@@ -516,6 +663,10 @@
|
||||
"name": "Send poll",
|
||||
"description": "Sends a poll.",
|
||||
"fields": {
|
||||
"config_entry_id": {
|
||||
"name": "[%key:component::telegram_bot::services::send_message::fields::config_entry_id::name%]",
|
||||
"description": "The config entry representing the Telegram bot to send the poll."
|
||||
},
|
||||
"target": {
|
||||
"name": "Target",
|
||||
"description": "[%key:component::telegram_bot::services::send_location::fields::target::description%]"
|
||||
@@ -566,6 +717,10 @@
|
||||
"name": "Edit message",
|
||||
"description": "Edits a previously sent message.",
|
||||
"fields": {
|
||||
"config_entry_id": {
|
||||
"name": "[%key:component::telegram_bot::services::send_message::fields::config_entry_id::name%]",
|
||||
"description": "The config entry representing the Telegram bot to edit the message."
|
||||
},
|
||||
"message_id": {
|
||||
"name": "Message ID",
|
||||
"description": "ID of the message to edit."
|
||||
@@ -600,6 +755,10 @@
|
||||
"name": "Edit caption",
|
||||
"description": "Edits the caption of a previously sent message.",
|
||||
"fields": {
|
||||
"config_entry_id": {
|
||||
"name": "[%key:component::telegram_bot::services::send_message::fields::config_entry_id::name%]",
|
||||
"description": "The config entry representing the Telegram bot to edit the caption."
|
||||
},
|
||||
"message_id": {
|
||||
"name": "[%key:component::telegram_bot::services::edit_message::fields::message_id::name%]",
|
||||
"description": "[%key:component::telegram_bot::services::edit_message::fields::message_id::description%]"
|
||||
@@ -622,6 +781,10 @@
|
||||
"name": "Edit reply markup",
|
||||
"description": "Edits the inline keyboard of a previously sent message.",
|
||||
"fields": {
|
||||
"config_entry_id": {
|
||||
"name": "[%key:component::telegram_bot::services::send_message::fields::config_entry_id::name%]",
|
||||
"description": "The config entry representing the Telegram bot to edit the reply markup."
|
||||
},
|
||||
"message_id": {
|
||||
"name": "[%key:component::telegram_bot::services::edit_message::fields::message_id::name%]",
|
||||
"description": "[%key:component::telegram_bot::services::edit_message::fields::message_id::description%]"
|
||||
@@ -640,6 +803,10 @@
|
||||
"name": "Answer callback query",
|
||||
"description": "Responds to a callback query originated by clicking on an online keyboard button. The answer will be displayed to the user as a notification at the top of the chat screen or as an alert.",
|
||||
"fields": {
|
||||
"config_entry_id": {
|
||||
"name": "[%key:component::telegram_bot::services::send_message::fields::config_entry_id::name%]",
|
||||
"description": "The config entry representing the Telegram bot to answer the callback query."
|
||||
},
|
||||
"message": {
|
||||
"name": "Message",
|
||||
"description": "Unformatted text message body of the notification."
|
||||
@@ -662,6 +829,10 @@
|
||||
"name": "Delete message",
|
||||
"description": "Deletes a previously sent message.",
|
||||
"fields": {
|
||||
"config_entry_id": {
|
||||
"name": "[%key:component::telegram_bot::services::send_message::fields::config_entry_id::name%]",
|
||||
"description": "The config entry representing the Telegram bot to delete the message."
|
||||
},
|
||||
"message_id": {
|
||||
"name": "[%key:component::telegram_bot::services::edit_message::fields::message_id::name%]",
|
||||
"description": "ID of the message to delete."
|
||||
@@ -673,7 +844,30 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"multiple_config_entry": {
|
||||
"message": "Multiple config entries found. Please specify the Telegram bot to use in the Config entry ID field."
|
||||
},
|
||||
"missing_config_entry": {
|
||||
"message": "No config entries found or setup failed. Please set up the Telegram Bot first."
|
||||
},
|
||||
"missing_allowed_chat_ids": {
|
||||
"message": "No allowed chat IDs found. Please add allowed chat IDs for {bot_name}."
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_yaml": {
|
||||
"title": "The {integration_title} YAML configuration is being removed",
|
||||
"description": "Configuring {integration_title} using YAML is being removed.\n\nYour existing YAML configuration has been imported into the UI automatically.\n\nRemove the `{domain}` configuration from your configuration.yaml file and restart Home Assistant to fix this issue."
|
||||
},
|
||||
"deprecated_yaml_import_issue_has_more_platforms": {
|
||||
"title": "The {integration_title} YAML configuration is being removed",
|
||||
"description": "Configuring {integration_title} using YAML is being removed.\n\nThe last entry of your existing YAML configuration ({telegram_bot}) has been imported into the UI automatically.\n\nRemove the `{domain}` configuration from your configuration.yaml file and restart Home Assistant to fix this issue. The other Telegram bots will need to be configured manually in the UI."
|
||||
},
|
||||
"deprecated_yaml_import_issue_error": {
|
||||
"title": "YAML import failed due to invalid {error_field}",
|
||||
"description": "Configuring {integration_title} using YAML is being removed but there was an error while importing your existing configuration ({telegram_bot}): {error_message}.\nSetup will not proceed.\n\nVerify that your {telegram_bot} is operating correctly and restart Home Assistant to attempt the import again.\n\nAlternatively, you may remove the `{domain}` configuration from your configuration.yaml entirely, restart Home Assistant, and add the {integration_title} integration manually."
|
||||
},
|
||||
"proxy_params_auth_deprecation": {
|
||||
"title": "{telegram_bot}: Proxy authentication should be moved to the URL",
|
||||
"description": "Authentication details for the the proxy configured in the {telegram_bot} integration should be moved into the {proxy_url} instead. Please update your configuration and restart Home Assistant to fix this issue.\n\nThe {proxy_params} config key will be removed in a future release."
|
||||
|
@@ -2,20 +2,23 @@
|
||||
|
||||
import datetime as dt
|
||||
from http import HTTPStatus
|
||||
from ipaddress import ip_address
|
||||
from ipaddress import IPv4Network, ip_address
|
||||
import logging
|
||||
import secrets
|
||||
import string
|
||||
|
||||
from telegram import Update
|
||||
from telegram.error import TimedOut
|
||||
from telegram.ext import Application, TypeHandler
|
||||
from telegram import Bot, Update
|
||||
from telegram.error import NetworkError, TimedOut
|
||||
from telegram.ext import ApplicationBuilder, TypeHandler
|
||||
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.const import CONF_URL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers.network import get_url
|
||||
|
||||
from . import CONF_TRUSTED_NETWORKS, CONF_URL, BaseTelegramBotEntity
|
||||
from .bot import BaseTelegramBot, TelegramBotConfigEntry
|
||||
from .const import CONF_TRUSTED_NETWORKS
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -24,7 +27,9 @@ REMOVE_WEBHOOK_URL = ""
|
||||
SECRET_TOKEN_LENGTH = 32
|
||||
|
||||
|
||||
async def async_setup_platform(hass, bot, config):
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant, bot: Bot, config: TelegramBotConfigEntry
|
||||
) -> BaseTelegramBot | None:
|
||||
"""Set up the Telegram webhooks platform."""
|
||||
|
||||
# Generate an ephemeral secret token
|
||||
@@ -33,46 +38,56 @@ async def async_setup_platform(hass, bot, config):
|
||||
|
||||
pushbot = PushBot(hass, bot, config, secret_token)
|
||||
|
||||
if not pushbot.webhook_url.startswith("https"):
|
||||
_LOGGER.error("Invalid telegram webhook %s must be https", pushbot.webhook_url)
|
||||
return False
|
||||
|
||||
await pushbot.start_application()
|
||||
webhook_registered = await pushbot.register_webhook()
|
||||
if not webhook_registered:
|
||||
return False
|
||||
raise ConfigEntryNotReady("Failed to register webhook with Telegram")
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, pushbot.stop_application)
|
||||
hass.http.register_view(
|
||||
PushBotView(
|
||||
hass,
|
||||
bot,
|
||||
pushbot.application,
|
||||
config[CONF_TRUSTED_NETWORKS],
|
||||
_get_trusted_networks(config),
|
||||
secret_token,
|
||||
)
|
||||
)
|
||||
return True
|
||||
return pushbot
|
||||
|
||||
|
||||
class PushBot(BaseTelegramBotEntity):
|
||||
def _get_trusted_networks(config: TelegramBotConfigEntry) -> list[IPv4Network]:
|
||||
trusted_networks_str: list[str] = config.data[CONF_TRUSTED_NETWORKS]
|
||||
return [IPv4Network(trusted_network) for trusted_network in trusted_networks_str]
|
||||
|
||||
|
||||
class PushBot(BaseTelegramBot):
|
||||
"""Handles all the push/webhook logic and passes telegram updates to `self.handle_update`."""
|
||||
|
||||
def __init__(self, hass, bot, config, secret_token):
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
bot: Bot,
|
||||
config: TelegramBotConfigEntry,
|
||||
secret_token: str,
|
||||
) -> None:
|
||||
"""Create Application before calling super()."""
|
||||
self.bot = bot
|
||||
self.trusted_networks = config[CONF_TRUSTED_NETWORKS]
|
||||
self.trusted_networks = _get_trusted_networks(config)
|
||||
self.secret_token = secret_token
|
||||
# Dumb Application that just gets our updates to our handler callback (self.handle_update)
|
||||
self.application = Application.builder().bot(bot).updater(None).build()
|
||||
self.application = ApplicationBuilder().bot(bot).updater(None).build()
|
||||
self.application.add_handler(TypeHandler(Update, self.handle_update))
|
||||
super().__init__(hass, config)
|
||||
|
||||
self.base_url = config.get(CONF_URL) or get_url(
|
||||
self.base_url = config.data.get(CONF_URL) or get_url(
|
||||
hass, require_ssl=True, allow_internal=False
|
||||
)
|
||||
self.webhook_url = f"{self.base_url}{TELEGRAM_WEBHOOK_URL}"
|
||||
|
||||
async def shutdown(self) -> None:
|
||||
"""Shutdown the app."""
|
||||
await self.stop_application()
|
||||
|
||||
async def _try_to_set_webhook(self):
|
||||
_LOGGER.debug("Registering webhook URL: %s", self.webhook_url)
|
||||
retry_num = 0
|
||||
@@ -127,7 +142,10 @@ class PushBot(BaseTelegramBotEntity):
|
||||
async def deregister_webhook(self):
|
||||
"""Query telegram and deregister the URL for our webhook."""
|
||||
_LOGGER.debug("Deregistering webhook URL")
|
||||
await self.bot.delete_webhook()
|
||||
try:
|
||||
await self.bot.delete_webhook()
|
||||
except NetworkError:
|
||||
_LOGGER.error("Failed to deregister webhook URL")
|
||||
|
||||
|
||||
class PushBotView(HomeAssistantView):
|
||||
@@ -137,7 +155,14 @@ class PushBotView(HomeAssistantView):
|
||||
url = TELEGRAM_WEBHOOK_URL
|
||||
name = "telegram_webhooks"
|
||||
|
||||
def __init__(self, hass, bot, application, trusted_networks, secret_token):
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
bot: Bot,
|
||||
application,
|
||||
trusted_networks: list[IPv4Network],
|
||||
secret_token: str,
|
||||
) -> None:
|
||||
"""Initialize by storing stuff needed for setting up our webhook endpoint."""
|
||||
self.hass = hass
|
||||
self.bot = bot
|
||||
|
@@ -10,7 +10,7 @@
|
||||
"tensorflow==2.5.0",
|
||||
"tf-models-official==2.5.0",
|
||||
"pycocotools==2.0.6",
|
||||
"numpy==2.2.2",
|
||||
"numpy==2.2.6",
|
||||
"Pillow==11.2.1"
|
||||
]
|
||||
}
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"integration_type": "helper",
|
||||
"iot_class": "calculated",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["numpy==2.2.2"]
|
||||
"requirements": ["numpy==2.2.6"]
|
||||
}
|
||||
|
@@ -94,6 +94,7 @@ from .const import (
|
||||
CONF_DATA_COLLECTION_OPTED_IN,
|
||||
CONF_INSTALLER_MODE,
|
||||
CONF_INTEGRATION_CREATED_ADDON,
|
||||
CONF_KEEP_OLD_DEVICES,
|
||||
CONF_LR_S2_ACCESS_CONTROL_KEY,
|
||||
CONF_LR_S2_AUTHENTICATED_KEY,
|
||||
CONF_NETWORK_KEY,
|
||||
@@ -405,9 +406,10 @@ class DriverEvents:
|
||||
|
||||
# Devices that are in the device registry that are not known by the controller
|
||||
# can be removed
|
||||
for device in stored_devices:
|
||||
if device not in known_devices and device not in provisioned_devices:
|
||||
self.dev_reg.async_remove_device(device.id)
|
||||
if not self.config_entry.data.get(CONF_KEEP_OLD_DEVICES):
|
||||
for device in stored_devices:
|
||||
if device not in known_devices and device not in provisioned_devices:
|
||||
self.dev_reg.async_remove_device(device.id)
|
||||
|
||||
# run discovery on controller node
|
||||
if controller.own_node:
|
||||
|
@@ -56,6 +56,7 @@ from .const import (
|
||||
CONF_ADDON_S2_AUTHENTICATED_KEY,
|
||||
CONF_ADDON_S2_UNAUTHENTICATED_KEY,
|
||||
CONF_INTEGRATION_CREATED_ADDON,
|
||||
CONF_KEEP_OLD_DEVICES,
|
||||
CONF_LR_S2_ACCESS_CONTROL_KEY,
|
||||
CONF_LR_S2_AUTHENTICATED_KEY,
|
||||
CONF_S0_LEGACY_KEY,
|
||||
@@ -1383,9 +1384,20 @@ class ZWaveJSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
config_entry = self._reconfigure_config_entry
|
||||
assert config_entry is not None
|
||||
|
||||
# Make sure we keep the old devices
|
||||
# so that user customizations are not lost,
|
||||
# when loading the config entry.
|
||||
self.hass.config_entries.async_update_entry(
|
||||
config_entry, data=config_entry.data | {CONF_KEEP_OLD_DEVICES: True}
|
||||
)
|
||||
|
||||
# Reload the config entry to reconnect the client after the addon restart
|
||||
await self.hass.config_entries.async_reload(config_entry.entry_id)
|
||||
|
||||
data = config_entry.data.copy()
|
||||
data.pop(CONF_KEEP_OLD_DEVICES, None)
|
||||
self.hass.config_entries.async_update_entry(config_entry, data=data)
|
||||
|
||||
@callback
|
||||
def forward_progress(event: dict) -> None:
|
||||
"""Forward progress events to frontend."""
|
||||
@@ -1436,6 +1448,15 @@ class ZWaveJSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
config_entry, unique_id=str(version_info.home_id)
|
||||
)
|
||||
await self.hass.config_entries.async_reload(config_entry.entry_id)
|
||||
|
||||
# Reload the config entry two times to clean up
|
||||
# the stale device entry.
|
||||
# Since both the old and the new controller have the same node id,
|
||||
# but different hardware identifiers, the integration
|
||||
# will create a new device for the new controller, on the first reload,
|
||||
# but not immediately remove the old device.
|
||||
await self.hass.config_entries.async_reload(config_entry.entry_id)
|
||||
|
||||
finally:
|
||||
for unsub in unsubs:
|
||||
unsub()
|
||||
|
@@ -27,6 +27,7 @@ CONF_ADDON_LR_S2_ACCESS_CONTROL_KEY = "lr_s2_access_control_key"
|
||||
CONF_ADDON_LR_S2_AUTHENTICATED_KEY = "lr_s2_authenticated_key"
|
||||
CONF_INSTALLER_MODE = "installer_mode"
|
||||
CONF_INTEGRATION_CREATED_ADDON = "integration_created_addon"
|
||||
CONF_KEEP_OLD_DEVICES = "keep_old_devices"
|
||||
CONF_NETWORK_KEY = "network_key"
|
||||
CONF_S0_LEGACY_KEY = "s0_legacy_key"
|
||||
CONF_S2_ACCESS_CONTROL_KEY = "s2_access_control_key"
|
||||
|
1
homeassistant/generated/config_flows.py
generated
1
homeassistant/generated/config_flows.py
generated
@@ -634,6 +634,7 @@ FLOWS = {
|
||||
"tautulli",
|
||||
"technove",
|
||||
"tedee",
|
||||
"telegram_bot",
|
||||
"tellduslive",
|
||||
"tesla_fleet",
|
||||
"tesla_wall_connector",
|
||||
|
@@ -6578,7 +6578,7 @@
|
||||
},
|
||||
"telegram_bot": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"config_flow": true,
|
||||
"iot_class": "cloud_push",
|
||||
"name": "Telegram bot"
|
||||
}
|
||||
|
@@ -7,7 +7,7 @@ aiohasupervisor==0.3.1
|
||||
aiohttp-asyncmdnsresolver==0.1.1
|
||||
aiohttp-fast-zlib==0.2.3
|
||||
aiohttp==3.12.6
|
||||
aiohttp_cors==0.7.0
|
||||
aiohttp_cors==0.8.1
|
||||
aiousbwatcher==1.1.1
|
||||
aiozoneinfo==0.2.3
|
||||
annotatedyaml==0.4.5
|
||||
@@ -15,11 +15,11 @@ astral==2.2
|
||||
async-interrupt==1.2.2
|
||||
async-upnp-client==0.44.0
|
||||
atomicwrites-homeassistant==1.4.1
|
||||
attrs==25.1.0
|
||||
attrs==25.3.0
|
||||
audioop-lts==0.2.1
|
||||
av==13.1.0
|
||||
awesomeversion==24.6.0
|
||||
bcrypt==4.2.0
|
||||
bcrypt==4.3.0
|
||||
bleak-retry-connector==3.9.0
|
||||
bleak==0.22.3
|
||||
bluetooth-adapters==0.21.4
|
||||
@@ -29,7 +29,7 @@ cached-ipaddress==0.10.0
|
||||
certifi>=2021.5.30
|
||||
ciso8601==2.3.2
|
||||
cronsim==2.6
|
||||
cryptography==45.0.1
|
||||
cryptography==45.0.3
|
||||
dbus-fast==2.43.0
|
||||
fnv-hash-fast==1.5.0
|
||||
go2rtc-client==0.1.3b0
|
||||
@@ -45,7 +45,7 @@ ifaddr==0.2.0
|
||||
Jinja2==3.1.6
|
||||
lru-dict==1.3.0
|
||||
mutagen==1.47.0
|
||||
numpy==2.2.2
|
||||
numpy==2.2.6
|
||||
orjson==3.10.18
|
||||
packaging>=23.1
|
||||
paho-mqtt==2.1.0
|
||||
@@ -59,7 +59,7 @@ pyOpenSSL==25.1.0
|
||||
pyserial==3.5
|
||||
pyspeex-noise==1.0.2
|
||||
python-slugify==8.0.4
|
||||
PyTurboJPEG==1.7.5
|
||||
PyTurboJPEG==1.8.0
|
||||
PyYAML==6.0.2
|
||||
requests==2.32.3
|
||||
securetar==2025.2.1
|
||||
@@ -119,7 +119,7 @@ httpcore==1.0.9
|
||||
hyperframe>=5.2.0
|
||||
|
||||
# Ensure we run compatible with musllinux build env
|
||||
numpy==2.2.2
|
||||
numpy==2.2.6
|
||||
pandas~=2.2.3
|
||||
|
||||
# Constrain multidict to avoid typing issues
|
||||
@@ -130,7 +130,7 @@ multidict>=6.0.2
|
||||
backoff>=2.0
|
||||
|
||||
# ensure pydantic version does not float since it might have breaking changes
|
||||
pydantic==2.11.3
|
||||
pydantic==2.11.5
|
||||
|
||||
# Required for Python 3.12.4 compatibility (#119223).
|
||||
mashumaro>=3.13.1
|
||||
|
@@ -233,6 +233,11 @@ class HassImportsFormatChecker(BaseChecker):
|
||||
"hass-import-constant-alias",
|
||||
"Used when a constant should be imported as an alias",
|
||||
),
|
||||
"W7427": (
|
||||
"`%s` alias is unnecessary for `%s`",
|
||||
"hass-import-constant-unnecessary-alias",
|
||||
"Used when a constant alias is unnecessary",
|
||||
),
|
||||
}
|
||||
options = ()
|
||||
|
||||
@@ -274,16 +279,24 @@ class HassImportsFormatChecker(BaseChecker):
|
||||
self, current_package: str, node: nodes.ImportFrom
|
||||
) -> None:
|
||||
"""Check for improper 'from ._ import _' invocations."""
|
||||
if node.level <= 1 or (
|
||||
not current_package.startswith("homeassistant.components.")
|
||||
and not current_package.startswith("tests.components.")
|
||||
if not current_package.startswith(
|
||||
("homeassistant.components.", "tests.components.")
|
||||
):
|
||||
return
|
||||
|
||||
split_package = current_package.split(".")
|
||||
current_component = split_package[2]
|
||||
|
||||
self._check_for_constant_alias(node, current_component, current_component)
|
||||
|
||||
if node.level <= 1:
|
||||
# No need to check relative import
|
||||
return
|
||||
|
||||
if not node.modname and len(split_package) == node.level + 1:
|
||||
for name in node.names:
|
||||
# Allow relative import to component root
|
||||
if name[0] != split_package[2]:
|
||||
if name[0] != current_component:
|
||||
self.add_message("hass-absolute-import", node=node)
|
||||
return
|
||||
return
|
||||
@@ -298,6 +311,15 @@ class HassImportsFormatChecker(BaseChecker):
|
||||
) -> bool:
|
||||
"""Check for hass-import-constant-alias."""
|
||||
if current_component == imported_component:
|
||||
# Check for `from homeassistant.components.self import DOMAIN as XYZ`
|
||||
for name, alias in node.names:
|
||||
if name == "DOMAIN" and (alias is not None and alias != "DOMAIN"):
|
||||
self.add_message(
|
||||
"hass-import-constant-unnecessary-alias",
|
||||
node=node,
|
||||
args=(alias, "DOMAIN"),
|
||||
)
|
||||
return False
|
||||
return True
|
||||
|
||||
# Check for `from homeassistant.components.other import DOMAIN`
|
||||
|
@@ -29,18 +29,18 @@ dependencies = [
|
||||
# Lib can be removed with 2025.11
|
||||
"aiohasupervisor==0.3.1",
|
||||
"aiohttp==3.12.6",
|
||||
"aiohttp_cors==0.7.0",
|
||||
"aiohttp_cors==0.8.1",
|
||||
"aiohttp-fast-zlib==0.2.3",
|
||||
"aiohttp-asyncmdnsresolver==0.1.1",
|
||||
"aiozoneinfo==0.2.3",
|
||||
"annotatedyaml==0.4.5",
|
||||
"astral==2.2",
|
||||
"async-interrupt==1.2.2",
|
||||
"attrs==25.1.0",
|
||||
"attrs==25.3.0",
|
||||
"atomicwrites-homeassistant==1.4.1",
|
||||
"audioop-lts==0.2.1",
|
||||
"awesomeversion==24.6.0",
|
||||
"bcrypt==4.2.0",
|
||||
"bcrypt==4.3.0",
|
||||
"certifi>=2021.5.30",
|
||||
"ciso8601==2.3.2",
|
||||
"cronsim==2.6",
|
||||
@@ -79,10 +79,10 @@ dependencies = [
|
||||
# onboarding->cloud->alexa->camera->stream->numpy. Onboarding needs
|
||||
# to be setup in stage 0, but we don't want to also promote cloud with all its
|
||||
# dependencies to stage 0.
|
||||
"numpy==2.2.2",
|
||||
"numpy==2.2.6",
|
||||
"PyJWT==2.10.1",
|
||||
# PyJWT has loose dependency. We want the latest one.
|
||||
"cryptography==45.0.1",
|
||||
"cryptography==45.0.3",
|
||||
"Pillow==11.2.1",
|
||||
"propcache==0.3.1",
|
||||
"pyOpenSSL==25.1.0",
|
||||
@@ -104,7 +104,7 @@ dependencies = [
|
||||
# onboarding->cloud->camera->pyturbojpeg. Onboarding needs
|
||||
# to be setup in stage 0, but we don't want to also promote cloud with all its
|
||||
# dependencies to stage 0.
|
||||
"PyTurboJPEG==1.7.5",
|
||||
"PyTurboJPEG==1.8.0",
|
||||
"PyYAML==6.0.2",
|
||||
"requests==2.32.3",
|
||||
"securetar==2025.2.1",
|
||||
|
12
requirements.txt
generated
12
requirements.txt
generated
@@ -6,18 +6,18 @@
|
||||
aiodns==3.4.0
|
||||
aiohasupervisor==0.3.1
|
||||
aiohttp==3.12.6
|
||||
aiohttp_cors==0.7.0
|
||||
aiohttp_cors==0.8.1
|
||||
aiohttp-fast-zlib==0.2.3
|
||||
aiohttp-asyncmdnsresolver==0.1.1
|
||||
aiozoneinfo==0.2.3
|
||||
annotatedyaml==0.4.5
|
||||
astral==2.2
|
||||
async-interrupt==1.2.2
|
||||
attrs==25.1.0
|
||||
attrs==25.3.0
|
||||
atomicwrites-homeassistant==1.4.1
|
||||
audioop-lts==0.2.1
|
||||
awesomeversion==24.6.0
|
||||
bcrypt==4.2.0
|
||||
bcrypt==4.3.0
|
||||
certifi>=2021.5.30
|
||||
ciso8601==2.3.2
|
||||
cronsim==2.6
|
||||
@@ -32,9 +32,9 @@ ifaddr==0.2.0
|
||||
Jinja2==3.1.6
|
||||
lru-dict==1.3.0
|
||||
mutagen==1.47.0
|
||||
numpy==2.2.2
|
||||
numpy==2.2.6
|
||||
PyJWT==2.10.1
|
||||
cryptography==45.0.1
|
||||
cryptography==45.0.3
|
||||
Pillow==11.2.1
|
||||
propcache==0.3.1
|
||||
pyOpenSSL==25.1.0
|
||||
@@ -44,7 +44,7 @@ psutil-home-assistant==0.0.1
|
||||
pymicro-vad==1.0.1
|
||||
pyspeex-noise==1.0.2
|
||||
python-slugify==8.0.4
|
||||
PyTurboJPEG==1.7.5
|
||||
PyTurboJPEG==1.8.0
|
||||
PyYAML==6.0.2
|
||||
requests==2.32.3
|
||||
securetar==2025.2.1
|
||||
|
10
requirements_all.txt
generated
10
requirements_all.txt
generated
@@ -94,7 +94,7 @@ PyTransportNSW==0.1.1
|
||||
|
||||
# homeassistant.components.camera
|
||||
# homeassistant.components.stream
|
||||
PyTurboJPEG==1.7.5
|
||||
PyTurboJPEG==1.8.0
|
||||
|
||||
# homeassistant.components.vicare
|
||||
PyViCare==2.44.0
|
||||
@@ -182,7 +182,7 @@ aioairzone-cloud==0.6.12
|
||||
aioairzone==1.0.0
|
||||
|
||||
# homeassistant.components.amazon_devices
|
||||
aioamazondevices==2.1.1
|
||||
aioamazondevices==3.0.4
|
||||
|
||||
# homeassistant.components.ambient_network
|
||||
# homeassistant.components.ambient_station
|
||||
@@ -280,7 +280,7 @@ aiohue==4.7.4
|
||||
aioimaplib==2.0.1
|
||||
|
||||
# homeassistant.components.immich
|
||||
aioimmich==0.7.0
|
||||
aioimmich==0.8.0
|
||||
|
||||
# homeassistant.components.apache_kafka
|
||||
aiokafka==0.10.0
|
||||
@@ -1203,7 +1203,7 @@ ibmiotf==0.3.4
|
||||
# homeassistant.components.local_calendar
|
||||
# homeassistant.components.local_todo
|
||||
# homeassistant.components.remote_calendar
|
||||
ical==9.2.5
|
||||
ical==10.0.0
|
||||
|
||||
# homeassistant.components.caldav
|
||||
icalendar==6.1.0
|
||||
@@ -1548,7 +1548,7 @@ numato-gpio==0.13.0
|
||||
# homeassistant.components.stream
|
||||
# homeassistant.components.tensorflow
|
||||
# homeassistant.components.trend
|
||||
numpy==2.2.2
|
||||
numpy==2.2.6
|
||||
|
||||
# homeassistant.components.nyt_games
|
||||
nyt_games==0.4.4
|
||||
|
@@ -8,46 +8,46 @@
|
||||
-c homeassistant/package_constraints.txt
|
||||
-r requirements_test_pre_commit.txt
|
||||
astroid==3.3.10
|
||||
coverage==7.6.12
|
||||
freezegun==1.5.1
|
||||
coverage==7.8.2
|
||||
freezegun==1.5.2
|
||||
go2rtc-client==0.1.3b0
|
||||
license-expression==30.4.1
|
||||
mock-open==1.4.0
|
||||
mypy-dev==1.16.0a8
|
||||
pre-commit==4.0.0
|
||||
pydantic==2.11.3
|
||||
mypy-dev==1.17.0a2
|
||||
pre-commit==4.2.0
|
||||
pydantic==2.11.5
|
||||
pylint==3.3.7
|
||||
pylint-per-file-ignores==1.4.0
|
||||
pipdeptree==2.26.1
|
||||
pytest-asyncio==0.26.0
|
||||
pytest-asyncio==1.0.0
|
||||
pytest-aiohttp==1.1.0
|
||||
pytest-cov==6.0.0
|
||||
pytest-cov==6.1.1
|
||||
pytest-freezer==0.4.9
|
||||
pytest-github-actions-annotate-failures==0.3.0
|
||||
pytest-socket==0.7.0
|
||||
pytest-sugar==1.0.0
|
||||
pytest-timeout==2.3.1
|
||||
pytest-timeout==2.4.0
|
||||
pytest-unordered==0.6.1
|
||||
pytest-picked==0.5.1
|
||||
pytest-xdist==3.6.1
|
||||
pytest-xdist==3.7.0
|
||||
pytest==8.3.5
|
||||
requests-mock==1.12.1
|
||||
respx==0.22.0
|
||||
syrupy==4.8.1
|
||||
syrupy==4.9.1
|
||||
tqdm==4.67.1
|
||||
types-aiofiles==24.1.0.20250326
|
||||
types-aiofiles==24.1.0.20250516
|
||||
types-atomicwrites==1.4.5.1
|
||||
types-croniter==6.0.0.20250411
|
||||
types-caldav==1.3.0.20241107
|
||||
types-caldav==1.3.0.20250516
|
||||
types-chardet==0.1.5
|
||||
types-decorator==5.2.0.20250324
|
||||
types-pexpect==4.9.0.20241208
|
||||
types-protobuf==5.29.1.20250403
|
||||
types-psutil==7.0.0.20250401
|
||||
types-pexpect==4.9.0.20250516
|
||||
types-protobuf==6.30.2.20250516
|
||||
types-psutil==7.0.0.20250601
|
||||
types-pyserial==3.5.0.20250326
|
||||
types-python-dateutil==2.9.0.20241206
|
||||
types-python-dateutil==2.9.0.20250516
|
||||
types-python-slugify==8.0.2.20240310
|
||||
types-pytz==2025.2.0.20250326
|
||||
types-PyYAML==6.0.12.20250402
|
||||
types-pytz==2025.2.0.20250516
|
||||
types-PyYAML==6.0.12.20250516
|
||||
types-requests==2.31.0.3
|
||||
types-xmltodict==0.13.0.3
|
||||
|
10
requirements_test_all.txt
generated
10
requirements_test_all.txt
generated
@@ -88,7 +88,7 @@ PyTransportNSW==0.1.1
|
||||
|
||||
# homeassistant.components.camera
|
||||
# homeassistant.components.stream
|
||||
PyTurboJPEG==1.7.5
|
||||
PyTurboJPEG==1.8.0
|
||||
|
||||
# homeassistant.components.vicare
|
||||
PyViCare==2.44.0
|
||||
@@ -170,7 +170,7 @@ aioairzone-cloud==0.6.12
|
||||
aioairzone==1.0.0
|
||||
|
||||
# homeassistant.components.amazon_devices
|
||||
aioamazondevices==2.1.1
|
||||
aioamazondevices==3.0.4
|
||||
|
||||
# homeassistant.components.ambient_network
|
||||
# homeassistant.components.ambient_station
|
||||
@@ -265,7 +265,7 @@ aiohue==4.7.4
|
||||
aioimaplib==2.0.1
|
||||
|
||||
# homeassistant.components.immich
|
||||
aioimmich==0.7.0
|
||||
aioimmich==0.8.0
|
||||
|
||||
# homeassistant.components.apache_kafka
|
||||
aiokafka==0.10.0
|
||||
@@ -1037,7 +1037,7 @@ ibeacon-ble==1.2.0
|
||||
# homeassistant.components.local_calendar
|
||||
# homeassistant.components.local_todo
|
||||
# homeassistant.components.remote_calendar
|
||||
ical==9.2.5
|
||||
ical==10.0.0
|
||||
|
||||
# homeassistant.components.caldav
|
||||
icalendar==6.1.0
|
||||
@@ -1316,7 +1316,7 @@ numato-gpio==0.13.0
|
||||
# homeassistant.components.stream
|
||||
# homeassistant.components.tensorflow
|
||||
# homeassistant.components.trend
|
||||
numpy==2.2.2
|
||||
numpy==2.2.6
|
||||
|
||||
# homeassistant.components.nyt_games
|
||||
nyt_games==0.4.4
|
||||
|
@@ -144,7 +144,7 @@ httpcore==1.0.9
|
||||
hyperframe>=5.2.0
|
||||
|
||||
# Ensure we run compatible with musllinux build env
|
||||
numpy==2.2.2
|
||||
numpy==2.2.6
|
||||
pandas~=2.2.3
|
||||
|
||||
# Constrain multidict to avoid typing issues
|
||||
@@ -155,7 +155,7 @@ multidict>=6.0.2
|
||||
backoff>=2.0
|
||||
|
||||
# ensure pydantic version does not float since it might have breaking changes
|
||||
pydantic==2.11.3
|
||||
pydantic==2.11.5
|
||||
|
||||
# Required for Python 3.12.4 compatibility (#119223).
|
||||
mashumaro>=3.13.1
|
||||
|
2
script/hassfest/docker/Dockerfile
generated
2
script/hassfest/docker/Dockerfile
generated
@@ -25,7 +25,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.7.1,source=/uv,target=/bin/uv \
|
||||
-c /usr/src/homeassistant/homeassistant/package_constraints.txt \
|
||||
-r /usr/src/homeassistant/requirements.txt \
|
||||
stdlib-list==0.10.0 pipdeptree==2.26.1 tqdm==4.67.1 ruff==0.11.0 \
|
||||
PyTurboJPEG==1.7.5 go2rtc-client==0.1.3b0 ha-ffmpeg==3.2.2 hassil==2.2.3 home-assistant-intents==2025.5.28 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2
|
||||
PyTurboJPEG==1.8.0 go2rtc-client==0.1.3b0 ha-ffmpeg==3.2.2 hassil==2.2.3 home-assistant-intents==2025.5.28 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2
|
||||
|
||||
LABEL "name"="hassfest"
|
||||
LABEL "maintainer"="Home Assistant <hello@home-assistant.io>"
|
||||
|
@@ -0,0 +1,74 @@
|
||||
# serializer version: 1
|
||||
# name: test_device_diagnostics
|
||||
dict({
|
||||
'account name': 'Echo Test',
|
||||
'bluetooth state': True,
|
||||
'capabilities': list([
|
||||
'AUDIO_PLAYER',
|
||||
'MICROPHONE',
|
||||
]),
|
||||
'device cluster members': list([
|
||||
'echo_test_serial_number',
|
||||
]),
|
||||
'device family': 'mine',
|
||||
'device type': 'echo',
|
||||
'do not disturb': False,
|
||||
'online': True,
|
||||
'response style': None,
|
||||
'serial number': 'echo_test_serial_number',
|
||||
'software version': 'echo_test_software_version',
|
||||
})
|
||||
# ---
|
||||
# name: test_entry_diagnostics
|
||||
dict({
|
||||
'device_info': dict({
|
||||
'devices': list([
|
||||
dict({
|
||||
'account name': 'Echo Test',
|
||||
'bluetooth state': True,
|
||||
'capabilities': list([
|
||||
'AUDIO_PLAYER',
|
||||
'MICROPHONE',
|
||||
]),
|
||||
'device cluster members': list([
|
||||
'echo_test_serial_number',
|
||||
]),
|
||||
'device family': 'mine',
|
||||
'device type': 'echo',
|
||||
'do not disturb': False,
|
||||
'online': True,
|
||||
'response style': None,
|
||||
'serial number': 'echo_test_serial_number',
|
||||
'software version': 'echo_test_software_version',
|
||||
}),
|
||||
]),
|
||||
'last_exception': 'None',
|
||||
'last_update success': True,
|
||||
}),
|
||||
'entry': dict({
|
||||
'data': dict({
|
||||
'country': 'IT',
|
||||
'login_data': dict({
|
||||
'session': 'test-session',
|
||||
}),
|
||||
'password': '**REDACTED**',
|
||||
'username': '**REDACTED**',
|
||||
}),
|
||||
'disabled_by': None,
|
||||
'discovery_keys': dict({
|
||||
}),
|
||||
'domain': 'amazon_devices',
|
||||
'minor_version': 1,
|
||||
'options': dict({
|
||||
}),
|
||||
'pref_disable_new_entities': False,
|
||||
'pref_disable_polling': False,
|
||||
'source': 'user',
|
||||
'subentries': list([
|
||||
]),
|
||||
'title': '**REDACTED**',
|
||||
'unique_id': 'fake_email@gmail.com',
|
||||
'version': 1,
|
||||
}),
|
||||
})
|
||||
# ---
|
70
tests/components/amazon_devices/test_diagnostics.py
Normal file
70
tests/components/amazon_devices/test_diagnostics.py
Normal file
@@ -0,0 +1,70 @@
|
||||
"""Tests for Amazon Devices diagnostics platform."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from unittest.mock import AsyncMock
|
||||
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
from syrupy.filters import props
|
||||
|
||||
from homeassistant.components.amazon_devices.const import DOMAIN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
|
||||
from . import setup_integration
|
||||
from .const import TEST_SERIAL_NUMBER
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
from tests.components.diagnostics import (
|
||||
get_diagnostics_for_config_entry,
|
||||
get_diagnostics_for_device,
|
||||
)
|
||||
from tests.typing import ClientSessionGenerator
|
||||
|
||||
|
||||
async def test_entry_diagnostics(
|
||||
hass: HomeAssistant,
|
||||
mock_amazon_devices_client: AsyncMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
hass_client: ClientSessionGenerator,
|
||||
snapshot: SnapshotAssertion,
|
||||
) -> None:
|
||||
"""Test Amazon config entry diagnostics."""
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
|
||||
assert await get_diagnostics_for_config_entry(
|
||||
hass, hass_client, mock_config_entry
|
||||
) == snapshot(
|
||||
exclude=props(
|
||||
"entry_id",
|
||||
"created_at",
|
||||
"modified_at",
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
async def test_device_diagnostics(
|
||||
hass: HomeAssistant,
|
||||
mock_amazon_devices_client: AsyncMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
hass_client: ClientSessionGenerator,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
snapshot: SnapshotAssertion,
|
||||
) -> None:
|
||||
"""Test Amazon device diagnostics."""
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
|
||||
device = device_registry.async_get_device(
|
||||
identifiers={(DOMAIN, TEST_SERIAL_NUMBER)}
|
||||
)
|
||||
assert device, repr(device_registry.devices)
|
||||
|
||||
assert await get_diagnostics_for_device(
|
||||
hass, hass_client, mock_config_entry, device
|
||||
) == snapshot(
|
||||
exclude=props(
|
||||
"entry_id",
|
||||
"created_at",
|
||||
"modified_at",
|
||||
)
|
||||
)
|
@@ -222,3 +222,16 @@
|
||||
'version': '1970.1.0',
|
||||
})
|
||||
# ---
|
||||
# name: test_submitting_legacy_integrations
|
||||
dict({
|
||||
'certificate': False,
|
||||
'custom_integrations': list([
|
||||
]),
|
||||
'installation_type': 'Home Assistant Tests',
|
||||
'integrations': list([
|
||||
'legacy_binary_sensor',
|
||||
]),
|
||||
'uuid': 'abcdefg',
|
||||
'version': '1970.1.0',
|
||||
})
|
||||
# ---
|
||||
|
@@ -920,3 +920,49 @@ async def test_not_check_config_entries_if_yaml(
|
||||
assert submitted_data["integrations"] == ["default_config"]
|
||||
assert submitted_data == logged_data
|
||||
assert snapshot == submitted_data
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("installation_type_mock", "supervisor_client")
|
||||
async def test_submitting_legacy_integrations(
|
||||
hass: HomeAssistant,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
snapshot: SnapshotAssertion,
|
||||
) -> None:
|
||||
"""Test submitting legacy integrations."""
|
||||
hass.http = Mock(ssl_certificate=None)
|
||||
aioclient_mock.post(ANALYTICS_ENDPOINT_URL, status=200)
|
||||
analytics = Analytics(hass)
|
||||
|
||||
await analytics.save_preferences({ATTR_BASE: True, ATTR_USAGE: True})
|
||||
assert analytics.preferences[ATTR_BASE]
|
||||
assert analytics.preferences[ATTR_USAGE]
|
||||
hass.config.components = ["binary_sensor"]
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.analytics.analytics.async_get_integrations",
|
||||
return_value={
|
||||
"default_config": mock_integration(
|
||||
hass,
|
||||
MockModule(
|
||||
"legacy_binary_sensor",
|
||||
async_setup=AsyncMock(return_value=True),
|
||||
partial_manifest={"config_flow": False},
|
||||
),
|
||||
),
|
||||
},
|
||||
),
|
||||
patch(
|
||||
"homeassistant.config.async_hass_config_yaml",
|
||||
return_value={"binary_sensor": [{"platform": "legacy_binary_sensor"}]},
|
||||
),
|
||||
):
|
||||
await analytics.send_analytics()
|
||||
|
||||
logged_data = caplog.records[-1].args
|
||||
submitted_data = _last_call_payload(aioclient_mock)
|
||||
|
||||
assert submitted_data["integrations"] == ["legacy_binary_sensor"]
|
||||
assert submitted_data == logged_data
|
||||
assert snapshot == submitted_data
|
||||
|
@@ -1,7 +1,5 @@
|
||||
"""Test configuration for auth."""
|
||||
|
||||
from asyncio import AbstractEventLoop
|
||||
|
||||
import pytest
|
||||
|
||||
from tests.typing import ClientSessionGenerator
|
||||
@@ -9,7 +7,6 @@ from tests.typing import ClientSessionGenerator
|
||||
|
||||
@pytest.fixture
|
||||
def aiohttp_client(
|
||||
event_loop: AbstractEventLoop,
|
||||
aiohttp_client: ClientSessionGenerator,
|
||||
socket_enabled: None,
|
||||
) -> ClientSessionGenerator:
|
||||
|
@@ -2,7 +2,7 @@
|
||||
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
from homeassistant.components.decora import DOMAIN as DECORA_DOMAIN
|
||||
from homeassistant.components.decora import DOMAIN
|
||||
from homeassistant.components.light import DOMAIN as PLATFORM_DOMAIN
|
||||
from homeassistant.const import CONF_PLATFORM
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
|
||||
@@ -22,7 +22,7 @@ async def test_repair_issue_is_created(
|
||||
{
|
||||
PLATFORM_DOMAIN: [
|
||||
{
|
||||
CONF_PLATFORM: DECORA_DOMAIN,
|
||||
CONF_PLATFORM: DOMAIN,
|
||||
}
|
||||
],
|
||||
},
|
||||
@@ -30,5 +30,5 @@ async def test_repair_issue_is_created(
|
||||
await hass.async_block_till_done()
|
||||
assert (
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_system_packages_yaml_integration_{DECORA_DOMAIN}",
|
||||
f"deprecated_system_packages_yaml_integration_{DOMAIN}",
|
||||
) in issue_registry.issues
|
||||
|
@@ -1,6 +1,5 @@
|
||||
"""The tests for the emulated Hue component."""
|
||||
|
||||
from asyncio import AbstractEventLoop
|
||||
from collections.abc import Generator
|
||||
from http import HTTPStatus
|
||||
import json
|
||||
@@ -38,7 +37,6 @@ class MockTransport:
|
||||
|
||||
@pytest.fixture
|
||||
def aiohttp_client(
|
||||
event_loop: AbstractEventLoop,
|
||||
aiohttp_client: ClientSessionGenerator,
|
||||
socket_enabled: None,
|
||||
) -> ClientSessionGenerator:
|
||||
|
@@ -1,6 +1,5 @@
|
||||
"""The tests for Home Assistant frontend."""
|
||||
|
||||
from asyncio import AbstractEventLoop
|
||||
from collections.abc import Generator
|
||||
from http import HTTPStatus
|
||||
from pathlib import Path
|
||||
@@ -95,7 +94,6 @@ async def frontend_themes(hass: HomeAssistant) -> None:
|
||||
|
||||
@pytest.fixture
|
||||
def aiohttp_client(
|
||||
event_loop: AbstractEventLoop,
|
||||
aiohttp_client: ClientSessionGenerator,
|
||||
socket_enabled: None,
|
||||
) -> ClientSessionGenerator:
|
||||
|
@@ -1,12 +1,15 @@
|
||||
"""The tests for the hddtemp platform."""
|
||||
|
||||
import socket
|
||||
from unittest.mock import patch
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.hddtemp import DOMAIN
|
||||
from homeassistant.components.sensor import DOMAIN as PLATFORM_DOMAIN
|
||||
from homeassistant.const import UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
VALID_CONFIG_MINIMAL = {"sensor": {"platform": "hddtemp"}}
|
||||
@@ -192,3 +195,17 @@ async def test_hddtemp_host_unreachable(hass: HomeAssistant, telnetmock) -> None
|
||||
assert await async_setup_component(hass, "sensor", VALID_CONFIG_HOST_UNREACHABLE)
|
||||
await hass.async_block_till_done()
|
||||
assert len(hass.states.async_all()) == 0
|
||||
|
||||
|
||||
@patch.dict("sys.modules", gsp=Mock())
|
||||
async def test_repair_issue_is_created(
|
||||
hass: HomeAssistant,
|
||||
issue_registry: ir.IssueRegistry,
|
||||
) -> None:
|
||||
"""Test repair issue is created."""
|
||||
assert await async_setup_component(hass, PLATFORM_DOMAIN, VALID_CONFIG_MINIMAL)
|
||||
await hass.async_block_till_done()
|
||||
assert (
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_system_packages_yaml_integration_{DOMAIN}",
|
||||
) in issue_registry.issues
|
||||
|
@@ -1,6 +1,6 @@
|
||||
"""HomeKit session fixtures."""
|
||||
|
||||
from asyncio import AbstractEventLoop
|
||||
import asyncio
|
||||
from collections.abc import Generator
|
||||
from contextlib import suppress
|
||||
import os
|
||||
@@ -26,12 +26,13 @@ def iid_storage(hass: HomeAssistant) -> Generator[AccessoryIIDStorage]:
|
||||
|
||||
@pytest.fixture
|
||||
def run_driver(
|
||||
hass: HomeAssistant, event_loop: AbstractEventLoop, iid_storage: AccessoryIIDStorage
|
||||
hass: HomeAssistant, iid_storage: AccessoryIIDStorage
|
||||
) -> Generator[HomeDriver]:
|
||||
"""Return a custom AccessoryDriver instance for HomeKit accessory init.
|
||||
|
||||
This mock does not mock async_stop, so the driver will not be stopped
|
||||
"""
|
||||
event_loop = asyncio.get_event_loop()
|
||||
with (
|
||||
patch("pyhap.accessory_driver.AsyncZeroconf"),
|
||||
patch("pyhap.accessory_driver.AccessoryEncoder"),
|
||||
@@ -55,9 +56,10 @@ def run_driver(
|
||||
|
||||
@pytest.fixture
|
||||
def hk_driver(
|
||||
hass: HomeAssistant, event_loop: AbstractEventLoop, iid_storage: AccessoryIIDStorage
|
||||
hass: HomeAssistant, iid_storage: AccessoryIIDStorage
|
||||
) -> Generator[HomeDriver]:
|
||||
"""Return a custom AccessoryDriver instance for HomeKit accessory init."""
|
||||
event_loop = asyncio.get_event_loop()
|
||||
with (
|
||||
patch("pyhap.accessory_driver.AsyncZeroconf"),
|
||||
patch("pyhap.accessory_driver.AccessoryEncoder"),
|
||||
@@ -85,11 +87,11 @@ def hk_driver(
|
||||
@pytest.fixture
|
||||
def mock_hap(
|
||||
hass: HomeAssistant,
|
||||
event_loop: AbstractEventLoop,
|
||||
iid_storage: AccessoryIIDStorage,
|
||||
mock_zeroconf: MagicMock,
|
||||
) -> Generator[HomeDriver]:
|
||||
"""Return a custom AccessoryDriver instance for HomeKit accessory init."""
|
||||
event_loop = asyncio.get_event_loop()
|
||||
with (
|
||||
patch("pyhap.accessory_driver.AsyncZeroconf"),
|
||||
patch("pyhap.accessory_driver.AccessoryEncoder"),
|
||||
|
@@ -1,7 +1,5 @@
|
||||
"""Test configuration for http."""
|
||||
|
||||
from asyncio import AbstractEventLoop
|
||||
|
||||
import pytest
|
||||
|
||||
from tests.typing import ClientSessionGenerator
|
||||
@@ -9,7 +7,6 @@ from tests.typing import ClientSessionGenerator
|
||||
|
||||
@pytest.fixture
|
||||
def aiohttp_client(
|
||||
event_loop: AbstractEventLoop,
|
||||
aiohttp_client: ClientSessionGenerator,
|
||||
socket_enabled: None,
|
||||
) -> ClientSessionGenerator:
|
||||
|
@@ -1,6 +1,5 @@
|
||||
"""The tests for the image_processing component."""
|
||||
|
||||
from asyncio import AbstractEventLoop
|
||||
from collections.abc import Callable
|
||||
from unittest.mock import PropertyMock, patch
|
||||
|
||||
@@ -26,7 +25,6 @@ async def setup_homeassistant(hass: HomeAssistant):
|
||||
|
||||
@pytest.fixture
|
||||
def aiohttp_unused_port_factory(
|
||||
event_loop: AbstractEventLoop,
|
||||
unused_tcp_port_factory: Callable[[], int],
|
||||
socket_enabled: None,
|
||||
) -> Callable[[], int]:
|
||||
|
@@ -1,7 +1,6 @@
|
||||
"""Common fixtures for the Immich tests."""
|
||||
|
||||
from collections.abc import AsyncGenerator, Generator
|
||||
from datetime import datetime
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
from aioimmich import ImmichAlbums, ImmichAssests, ImmichServer, ImmichUsers
|
||||
@@ -10,7 +9,7 @@ from aioimmich.server.models import (
|
||||
ImmichServerStatistics,
|
||||
ImmichServerStorage,
|
||||
)
|
||||
from aioimmich.users.models import AvatarColor, ImmichUser, UserStatus
|
||||
from aioimmich.users.models import ImmichUserObject
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.immich.const import DOMAIN
|
||||
@@ -78,36 +77,58 @@ def mock_immich_assets() -> AsyncMock:
|
||||
def mock_immich_server() -> AsyncMock:
|
||||
"""Mock the Immich server."""
|
||||
mock = AsyncMock(spec=ImmichServer)
|
||||
mock.async_get_about_info.return_value = ImmichServerAbout(
|
||||
"v1.132.3",
|
||||
"some_url",
|
||||
False,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
mock.async_get_about_info.return_value = ImmichServerAbout.from_dict(
|
||||
{
|
||||
"version": "v1.132.3",
|
||||
"versionUrl": "https://github.com/immich-app/immich/releases/tag/v1.132.3",
|
||||
"licensed": False,
|
||||
"build": "14709928600",
|
||||
"buildUrl": "https://github.com/immich-app/immich/actions/runs/14709928600",
|
||||
"buildImage": "v1.132.3",
|
||||
"buildImageUrl": "https://github.com/immich-app/immich/pkgs/container/immich-server",
|
||||
"repository": "immich-app/immich",
|
||||
"repositoryUrl": "https://github.com/immich-app/immich",
|
||||
"sourceRef": "v1.132.3",
|
||||
"sourceCommit": "02994883fe3f3972323bb6759d0170a4062f5236",
|
||||
"sourceUrl": "https://github.com/immich-app/immich/commit/02994883fe3f3972323bb6759d0170a4062f5236",
|
||||
"nodejs": "v22.14.0",
|
||||
"exiftool": "13.00",
|
||||
"ffmpeg": "7.0.2-7",
|
||||
"libvips": "8.16.1",
|
||||
"imagemagick": "7.1.1-47",
|
||||
}
|
||||
)
|
||||
mock.async_get_storage_info.return_value = ImmichServerStorage(
|
||||
"294.2 GiB",
|
||||
"142.9 GiB",
|
||||
"136.3 GiB",
|
||||
315926315008,
|
||||
153400434688,
|
||||
146402975744,
|
||||
48.56,
|
||||
mock.async_get_storage_info.return_value = ImmichServerStorage.from_dict(
|
||||
{
|
||||
"diskSize": "294.2 GiB",
|
||||
"diskUse": "142.9 GiB",
|
||||
"diskAvailable": "136.3 GiB",
|
||||
"diskSizeRaw": 315926315008,
|
||||
"diskUseRaw": 153400406016,
|
||||
"diskAvailableRaw": 146403004416,
|
||||
"diskUsagePercentage": 48.56,
|
||||
}
|
||||
)
|
||||
mock.async_get_server_statistics.return_value = ImmichServerStatistics(
|
||||
27038, 1836, 119525451912, 54291170551, 65234281361
|
||||
mock.async_get_server_statistics.return_value = ImmichServerStatistics.from_dict(
|
||||
{
|
||||
"photos": 27038,
|
||||
"videos": 1836,
|
||||
"usage": 119525451912,
|
||||
"usagePhotos": 54291170551,
|
||||
"usageVideos": 65234281361,
|
||||
"usageByUser": [
|
||||
{
|
||||
"userId": "e7ef5713-9dab-4bd4-b899-715b0ca4379e",
|
||||
"userName": "admin",
|
||||
"photos": 27038,
|
||||
"videos": 1836,
|
||||
"usage": 119525451912,
|
||||
"usagePhotos": 54291170551,
|
||||
"usageVideos": 65234281361,
|
||||
"quotaSizeInBytes": None,
|
||||
}
|
||||
],
|
||||
}
|
||||
)
|
||||
return mock
|
||||
|
||||
@@ -116,23 +137,26 @@ def mock_immich_server() -> AsyncMock:
|
||||
def mock_immich_user() -> AsyncMock:
|
||||
"""Mock the Immich server."""
|
||||
mock = AsyncMock(spec=ImmichUsers)
|
||||
mock.async_get_my_user.return_value = ImmichUser(
|
||||
"e7ef5713-9dab-4bd4-b899-715b0ca4379e",
|
||||
"user@immich.local",
|
||||
"user",
|
||||
"",
|
||||
AvatarColor.PRIMARY,
|
||||
datetime.fromisoformat("2025-05-11T10:07:46.866Z"),
|
||||
"user",
|
||||
False,
|
||||
True,
|
||||
datetime.fromisoformat("2025-05-11T10:07:46.866Z"),
|
||||
None,
|
||||
None,
|
||||
"",
|
||||
None,
|
||||
None,
|
||||
UserStatus.ACTIVE,
|
||||
mock.async_get_my_user.return_value = ImmichUserObject.from_dict(
|
||||
{
|
||||
"id": "e7ef5713-9dab-4bd4-b899-715b0ca4379e",
|
||||
"email": "user@immich.local",
|
||||
"name": "user",
|
||||
"profileImagePath": "",
|
||||
"avatarColor": "primary",
|
||||
"profileChangedAt": "2025-05-11T10:07:46.866Z",
|
||||
"storageLabel": "user",
|
||||
"shouldChangePassword": True,
|
||||
"isAdmin": True,
|
||||
"createdAt": "2025-05-11T10:07:46.866Z",
|
||||
"deletedAt": None,
|
||||
"updatedAt": "2025-05-18T00:59:55.547Z",
|
||||
"oauthId": "",
|
||||
"quotaSizeInBytes": None,
|
||||
"quotaUsageInBytes": 119526467534,
|
||||
"status": "active",
|
||||
"license": None,
|
||||
}
|
||||
)
|
||||
return mock
|
||||
|
||||
|
@@ -1,7 +1,6 @@
|
||||
"""Constants for the Immich integration tests."""
|
||||
|
||||
from aioimmich.albums.models import ImmichAlbum
|
||||
from aioimmich.assets.models import ImmichAsset
|
||||
|
||||
from homeassistant.const import (
|
||||
CONF_API_KEY,
|
||||
@@ -26,27 +25,91 @@ MOCK_CONFIG_ENTRY_DATA = {
|
||||
CONF_VERIFY_SSL: False,
|
||||
}
|
||||
|
||||
MOCK_ALBUM_WITHOUT_ASSETS = ImmichAlbum(
|
||||
"721e1a4b-aa12-441e-8d3b-5ac7ab283bb6",
|
||||
"My Album",
|
||||
"This is my first great album",
|
||||
"0d03a7ad-ddc7-45a6-adee-68d322a6d2f5",
|
||||
1,
|
||||
[],
|
||||
)
|
||||
ALBUM_DATA = {
|
||||
"id": "721e1a4b-aa12-441e-8d3b-5ac7ab283bb6",
|
||||
"albumName": "My Album",
|
||||
"albumThumbnailAssetId": "0d03a7ad-ddc7-45a6-adee-68d322a6d2f5",
|
||||
"albumUsers": [],
|
||||
"assetCount": 1,
|
||||
"assets": [],
|
||||
"createdAt": "2025-05-11T10:13:22.799Z",
|
||||
"hasSharedLink": False,
|
||||
"isActivityEnabled": False,
|
||||
"ownerId": "e7ef5713-9dab-4bd4-b899-715b0ca4379e",
|
||||
"owner": {
|
||||
"id": "e7ef5713-9dab-4bd4-b899-715b0ca4379e",
|
||||
"email": "admin@immich.local",
|
||||
"name": "admin",
|
||||
"profileImagePath": "",
|
||||
"avatarColor": "primary",
|
||||
"profileChangedAt": "2025-05-11T10:07:46.866Z",
|
||||
},
|
||||
"shared": False,
|
||||
"updatedAt": "2025-05-17T11:26:03.696Z",
|
||||
}
|
||||
|
||||
MOCK_ALBUM_WITH_ASSETS = ImmichAlbum(
|
||||
"721e1a4b-aa12-441e-8d3b-5ac7ab283bb6",
|
||||
"My Album",
|
||||
"This is my first great album",
|
||||
"0d03a7ad-ddc7-45a6-adee-68d322a6d2f5",
|
||||
1,
|
||||
[
|
||||
ImmichAsset(
|
||||
"2e94c203-50aa-4ad2-8e29-56dd74e0eff4", "filename.jpg", "image/jpeg"
|
||||
),
|
||||
ImmichAsset(
|
||||
"2e65a5f2-db83-44c4-81ab-f5ff20c9bd7b", "filename.mp4", "video/mp4"
|
||||
),
|
||||
],
|
||||
MOCK_ALBUM_WITHOUT_ASSETS = ImmichAlbum.from_dict(ALBUM_DATA)
|
||||
|
||||
MOCK_ALBUM_WITH_ASSETS = ImmichAlbum.from_dict(
|
||||
{
|
||||
**ALBUM_DATA,
|
||||
"assets": [
|
||||
{
|
||||
"id": "2e94c203-50aa-4ad2-8e29-56dd74e0eff4",
|
||||
"deviceAssetId": "web-filename.jpg-1675185639000",
|
||||
"ownerId": "e7ef5713-9dab-4bd4-b899-715b0ca4379e",
|
||||
"deviceId": "WEB",
|
||||
"libraryId": None,
|
||||
"type": "IMAGE",
|
||||
"originalPath": "upload/upload/e7ef5713-9dab-4bd4-b899-715b0ca4379e/b4/b8/b4b8ef00-8a6d-4056-91ff-7f86dc66e427.jpg",
|
||||
"originalFileName": "filename.jpg",
|
||||
"originalMimeType": "image/jpeg",
|
||||
"thumbhash": "1igGFALX8mVGdHc5aChJf5nxNg==",
|
||||
"fileCreatedAt": "2023-01-31T17:20:37.085+00:00",
|
||||
"fileModifiedAt": "2023-01-31T17:20:39+00:00",
|
||||
"localDateTime": "2023-01-31T18:20:37.085+00:00",
|
||||
"updatedAt": "2025-05-11T10:13:49.590401+00:00",
|
||||
"isFavorite": False,
|
||||
"isArchived": False,
|
||||
"isTrashed": False,
|
||||
"duration": "0:00:00.00000",
|
||||
"exifInfo": {},
|
||||
"livePhotoVideoId": None,
|
||||
"people": [],
|
||||
"checksum": "HJm7TVOP80S+eiYZnAhWyRaB/Yc=",
|
||||
"isOffline": False,
|
||||
"hasMetadata": True,
|
||||
"duplicateId": None,
|
||||
"resized": True,
|
||||
},
|
||||
{
|
||||
"id": "2e65a5f2-db83-44c4-81ab-f5ff20c9bd7b",
|
||||
"deviceAssetId": "web-filename.mp4-1675185639000",
|
||||
"ownerId": "e7ef5713-9dab-4bd4-b899-715b0ca4379e",
|
||||
"deviceId": "WEB",
|
||||
"libraryId": None,
|
||||
"type": "IMAGE",
|
||||
"originalPath": "upload/upload/e7ef5713-9dab-4bd4-b899-715b0ca4379e/b4/b8/b4b8ef00-8a6d-4056-eeff-7f86dc66e427.mp4",
|
||||
"originalFileName": "filename.mp4",
|
||||
"originalMimeType": "video/mp4",
|
||||
"thumbhash": "1igGFALX8mVGdHc5aChJf5nxNg==",
|
||||
"fileCreatedAt": "2023-01-31T17:20:37.085+00:00",
|
||||
"fileModifiedAt": "2023-01-31T17:20:39+00:00",
|
||||
"localDateTime": "2023-01-31T18:20:37.085+00:00",
|
||||
"updatedAt": "2025-05-11T10:13:49.590401+00:00",
|
||||
"isFavorite": False,
|
||||
"isArchived": False,
|
||||
"isTrashed": False,
|
||||
"duration": "0:00:00.00000",
|
||||
"exifInfo": {},
|
||||
"livePhotoVideoId": None,
|
||||
"people": [],
|
||||
"checksum": "HJm7TVOP80S+eiYZnAhWyRaB/Yc=",
|
||||
"isOffline": False,
|
||||
"hasMetadata": True,
|
||||
"duplicateId": None,
|
||||
"resized": True,
|
||||
},
|
||||
],
|
||||
}
|
||||
)
|
||||
|
@@ -3,36 +3,48 @@
|
||||
dict({
|
||||
'data': dict({
|
||||
'server_about': dict({
|
||||
'build': None,
|
||||
'build_image': None,
|
||||
'build_image_url': None,
|
||||
'build_url': None,
|
||||
'exiftool': None,
|
||||
'ffmpeg': None,
|
||||
'imagemagick': None,
|
||||
'libvips': None,
|
||||
'build': '14709928600',
|
||||
'build_image': 'v1.132.3',
|
||||
'build_image_url': 'https://github.com/immich-app/immich/pkgs/container/immich-server',
|
||||
'build_url': 'https://github.com/immich-app/immich/actions/runs/14709928600',
|
||||
'exiftool': '13.00',
|
||||
'ffmpeg': '7.0.2-7',
|
||||
'imagemagick': '7.1.1-47',
|
||||
'libvips': '8.16.1',
|
||||
'licensed': False,
|
||||
'nodejs': None,
|
||||
'repository': None,
|
||||
'repository_url': None,
|
||||
'source_commit': None,
|
||||
'source_ref': None,
|
||||
'source_url': None,
|
||||
'nodejs': 'v22.14.0',
|
||||
'repository': 'immich-app/immich',
|
||||
'repository_url': 'https://github.com/immich-app/immich',
|
||||
'source_commit': '02994883fe3f3972323bb6759d0170a4062f5236',
|
||||
'source_ref': 'v1.132.3',
|
||||
'source_url': 'https://github.com/immich-app/immich/commit/02994883fe3f3972323bb6759d0170a4062f5236',
|
||||
'version': 'v1.132.3',
|
||||
'version_url': 'some_url',
|
||||
'version_url': 'https://github.com/immich-app/immich/releases/tag/v1.132.3',
|
||||
}),
|
||||
'server_storage': dict({
|
||||
'disk_available': '136.3 GiB',
|
||||
'disk_available_raw': 146402975744,
|
||||
'disk_available_raw': 146403004416,
|
||||
'disk_size': '294.2 GiB',
|
||||
'disk_size_raw': 315926315008,
|
||||
'disk_usage_percentage': 48.56,
|
||||
'disk_use': '142.9 GiB',
|
||||
'disk_use_raw': 153400434688,
|
||||
'disk_use_raw': 153400406016,
|
||||
}),
|
||||
'server_usage': dict({
|
||||
'photos': 27038,
|
||||
'usage': 119525451912,
|
||||
'usage_by_user': list([
|
||||
dict({
|
||||
'photos': 27038,
|
||||
'quota_size_in_bytes': None,
|
||||
'usage': 119525451912,
|
||||
'usage_photos': 54291170551,
|
||||
'usage_videos': 65234281361,
|
||||
'user_id': 'e7ef5713-9dab-4bd4-b899-715b0ca4379e',
|
||||
'user_name': 'admin',
|
||||
'videos': 1836,
|
||||
}),
|
||||
]),
|
||||
'usage_photos': 54291170551,
|
||||
'usage_videos': 65234281361,
|
||||
'videos': 1836,
|
||||
|
@@ -55,7 +55,7 @@
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '136.34839630127',
|
||||
'state': '136.34842300415',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensors[sensor.someone_disk_size-entry]
|
||||
@@ -225,7 +225,7 @@
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '142.865287780762',
|
||||
'state': '142.865261077881',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensors[sensor.someone_disk_used_by_photos-entry]
|
||||
|
@@ -2,9 +2,7 @@
|
||||
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from homeassistant.components.landisgyr_heat_meter.const import (
|
||||
DOMAIN as LANDISGYR_HEAT_METER_DOMAIN,
|
||||
)
|
||||
from homeassistant.components.landisgyr_heat_meter.const import DOMAIN
|
||||
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
@@ -66,7 +64,7 @@ async def test_migrate_entry(
|
||||
# Create entity entry to migrate to new unique ID
|
||||
entity_registry.async_get_or_create(
|
||||
SENSOR_DOMAIN,
|
||||
LANDISGYR_HEAT_METER_DOMAIN,
|
||||
DOMAIN,
|
||||
"landisgyr_heat_meter_987654321_measuring_range_m3ph",
|
||||
suggested_object_id="heat_meter_measuring_range",
|
||||
config_entry=mock_entry,
|
||||
|
@@ -6,7 +6,7 @@ from unittest.mock import MagicMock
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.lawn_mower import (
|
||||
DOMAIN as LAWN_MOWER_DOMAIN,
|
||||
DOMAIN,
|
||||
LawnMowerActivity,
|
||||
LawnMowerEntity,
|
||||
LawnMowerEntityFeature,
|
||||
@@ -104,7 +104,7 @@ async def test_lawn_mower_setup(hass: HomeAssistant) -> None:
|
||||
|
||||
mock_platform(
|
||||
hass,
|
||||
f"{TEST_DOMAIN}.{LAWN_MOWER_DOMAIN}",
|
||||
f"{TEST_DOMAIN}.{DOMAIN}",
|
||||
MockPlatform(async_setup_entry=async_setup_entry_platform),
|
||||
)
|
||||
|
||||
|
@@ -14,18 +14,18 @@ async def test_repair_issue_is_created(
|
||||
) -> None:
|
||||
"""Test repair issue is created."""
|
||||
from homeassistant.components.lirc import ( # pylint: disable=import-outside-toplevel
|
||||
DOMAIN as LIRC_DOMAIN,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
LIRC_DOMAIN,
|
||||
DOMAIN,
|
||||
{
|
||||
LIRC_DOMAIN: {},
|
||||
DOMAIN: {},
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
assert (
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_system_packages_yaml_integration_{LIRC_DOMAIN}",
|
||||
f"deprecated_system_packages_yaml_integration_{DOMAIN}",
|
||||
) in issue_registry.issues
|
||||
|
@@ -6,11 +6,7 @@ from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.lock import (
|
||||
DOMAIN as LOCK_DOMAIN,
|
||||
LockEntity,
|
||||
LockEntityFeature,
|
||||
)
|
||||
from homeassistant.components.lock import DOMAIN, LockEntity, LockEntityFeature
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigFlow
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -128,7 +124,7 @@ async def setup_lock_platform_test_entity(
|
||||
|
||||
mock_platform(
|
||||
hass,
|
||||
f"{TEST_DOMAIN}.{LOCK_DOMAIN}",
|
||||
f"{TEST_DOMAIN}.{DOMAIN}",
|
||||
MockPlatform(async_setup_entry=async_setup_entry_platform),
|
||||
)
|
||||
|
||||
|
@@ -38,7 +38,7 @@ from homeassistant.components.matrix import (
|
||||
RoomAnyID,
|
||||
RoomID,
|
||||
)
|
||||
from homeassistant.components.matrix.const import DOMAIN as MATRIX_DOMAIN
|
||||
from homeassistant.components.matrix.const import DOMAIN
|
||||
from homeassistant.components.matrix.notify import CONF_DEFAULT_ROOM
|
||||
from homeassistant.components.notify import DOMAIN as NOTIFY_DOMAIN
|
||||
from homeassistant.const import (
|
||||
@@ -137,7 +137,7 @@ class _MockAsyncClient(AsyncClient):
|
||||
|
||||
|
||||
MOCK_CONFIG_DATA = {
|
||||
MATRIX_DOMAIN: {
|
||||
DOMAIN: {
|
||||
CONF_HOMESERVER: "https://matrix.example.com",
|
||||
CONF_USERNAME: TEST_MXID,
|
||||
CONF_PASSWORD: TEST_PASSWORD,
|
||||
@@ -166,7 +166,7 @@ MOCK_CONFIG_DATA = {
|
||||
},
|
||||
NOTIFY_DOMAIN: {
|
||||
CONF_NAME: TEST_NOTIFIER_NAME,
|
||||
CONF_PLATFORM: MATRIX_DOMAIN,
|
||||
CONF_PLATFORM: DOMAIN,
|
||||
CONF_DEFAULT_ROOM: TEST_DEFAULT_ROOM,
|
||||
},
|
||||
}
|
||||
@@ -282,13 +282,13 @@ async def matrix_bot(
|
||||
The resulting MatrixBot will have a mocked _client.
|
||||
"""
|
||||
|
||||
assert await async_setup_component(hass, MATRIX_DOMAIN, MOCK_CONFIG_DATA)
|
||||
assert await async_setup_component(hass, DOMAIN, MOCK_CONFIG_DATA)
|
||||
assert await async_setup_component(hass, NOTIFY_DOMAIN, MOCK_CONFIG_DATA)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Accessing hass.data in tests is not desirable, but all the tests here
|
||||
# currently do this.
|
||||
assert isinstance(matrix_bot := hass.data[MATRIX_DOMAIN], MatrixBot)
|
||||
assert isinstance(matrix_bot := hass.data[DOMAIN], MatrixBot)
|
||||
|
||||
await hass.async_start()
|
||||
|
||||
@@ -298,7 +298,7 @@ async def matrix_bot(
|
||||
@pytest.fixture
|
||||
def matrix_events(hass: HomeAssistant) -> list[Event]:
|
||||
"""Track event calls."""
|
||||
return async_capture_events(hass, MATRIX_DOMAIN)
|
||||
return async_capture_events(hass, DOMAIN)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
@@ -1,10 +1,6 @@
|
||||
"""Configure and test MatrixBot."""
|
||||
|
||||
from homeassistant.components.matrix import (
|
||||
DOMAIN as MATRIX_DOMAIN,
|
||||
SERVICE_SEND_MESSAGE,
|
||||
MatrixBot,
|
||||
)
|
||||
from homeassistant.components.matrix import DOMAIN, SERVICE_SEND_MESSAGE, MatrixBot
|
||||
from homeassistant.components.notify import DOMAIN as NOTIFY_DOMAIN
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
@@ -17,7 +13,7 @@ async def test_services(hass: HomeAssistant, matrix_bot: MatrixBot) -> None:
|
||||
services = hass.services.async_services()
|
||||
|
||||
# Verify that the matrix service is registered
|
||||
assert (matrix_service := services.get(MATRIX_DOMAIN))
|
||||
assert (matrix_service := services.get(DOMAIN))
|
||||
assert SERVICE_SEND_MESSAGE in matrix_service
|
||||
|
||||
# Verify that the matrix notifier is registered
|
||||
|
@@ -3,7 +3,7 @@
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.matrix import MatrixBot
|
||||
from homeassistant.components.matrix.const import DOMAIN as MATRIX_DOMAIN
|
||||
from homeassistant.components.matrix.const import DOMAIN
|
||||
from homeassistant.components.notify import DOMAIN as NOTIFY_DOMAIN
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_START
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -20,14 +20,14 @@ async def test_join(
|
||||
mock_allowed_path,
|
||||
) -> None:
|
||||
"""Test joining configured rooms."""
|
||||
assert await async_setup_component(hass, MATRIX_DOMAIN, MOCK_CONFIG_DATA)
|
||||
assert await async_setup_component(hass, DOMAIN, MOCK_CONFIG_DATA)
|
||||
assert await async_setup_component(hass, NOTIFY_DOMAIN, MOCK_CONFIG_DATA)
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||
await hass.async_block_till_done(wait_background_tasks=True)
|
||||
|
||||
# Accessing hass.data in tests is not desirable, but all the tests here
|
||||
# currently do this.
|
||||
matrix_bot = hass.data[MATRIX_DOMAIN]
|
||||
matrix_bot = hass.data[DOMAIN]
|
||||
|
||||
for room_id in TEST_JOINABLE_ROOMS:
|
||||
assert f"Joined or already in room '{room_id}'" in caplog.messages
|
||||
|
@@ -2,12 +2,7 @@
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.matrix import (
|
||||
ATTR_FORMAT,
|
||||
ATTR_IMAGES,
|
||||
DOMAIN as MATRIX_DOMAIN,
|
||||
MatrixBot,
|
||||
)
|
||||
from homeassistant.components.matrix import ATTR_FORMAT, ATTR_IMAGES, DOMAIN, MatrixBot
|
||||
from homeassistant.components.matrix.const import FORMAT_HTML, SERVICE_SEND_MESSAGE
|
||||
from homeassistant.components.notify import ATTR_DATA, ATTR_MESSAGE, ATTR_TARGET
|
||||
from homeassistant.core import Event, HomeAssistant
|
||||
@@ -30,9 +25,7 @@ async def test_send_message(
|
||||
|
||||
# Send a message without an attached image.
|
||||
data = {ATTR_MESSAGE: "Test message", ATTR_TARGET: list(TEST_JOINABLE_ROOMS)}
|
||||
await hass.services.async_call(
|
||||
MATRIX_DOMAIN, SERVICE_SEND_MESSAGE, data, blocking=True
|
||||
)
|
||||
await hass.services.async_call(DOMAIN, SERVICE_SEND_MESSAGE, data, blocking=True)
|
||||
|
||||
for room_alias_or_id in TEST_JOINABLE_ROOMS:
|
||||
assert f"Message delivered to room '{room_alias_or_id}'" in caplog.messages
|
||||
@@ -43,18 +36,14 @@ async def test_send_message(
|
||||
ATTR_TARGET: list(TEST_JOINABLE_ROOMS),
|
||||
ATTR_DATA: {ATTR_FORMAT: FORMAT_HTML},
|
||||
}
|
||||
await hass.services.async_call(
|
||||
MATRIX_DOMAIN, SERVICE_SEND_MESSAGE, data, blocking=True
|
||||
)
|
||||
await hass.services.async_call(DOMAIN, SERVICE_SEND_MESSAGE, data, blocking=True)
|
||||
|
||||
for room_alias_or_id in TEST_JOINABLE_ROOMS:
|
||||
assert f"Message delivered to room '{room_alias_or_id}'" in caplog.messages
|
||||
|
||||
# Send a message with an attached image.
|
||||
data[ATTR_DATA] = {ATTR_IMAGES: [image_path.name]}
|
||||
await hass.services.async_call(
|
||||
MATRIX_DOMAIN, SERVICE_SEND_MESSAGE, data, blocking=True
|
||||
)
|
||||
await hass.services.async_call(DOMAIN, SERVICE_SEND_MESSAGE, data, blocking=True)
|
||||
|
||||
for room_alias_or_id in TEST_JOINABLE_ROOMS:
|
||||
assert f"Message delivered to room '{room_alias_or_id}'" in caplog.messages
|
||||
@@ -72,9 +61,7 @@ async def test_unsendable_message(
|
||||
|
||||
data = {ATTR_MESSAGE: "Test message", ATTR_TARGET: TEST_BAD_ROOM}
|
||||
|
||||
await hass.services.async_call(
|
||||
MATRIX_DOMAIN, SERVICE_SEND_MESSAGE, data, blocking=True
|
||||
)
|
||||
await hass.services.async_call(DOMAIN, SERVICE_SEND_MESSAGE, data, blocking=True)
|
||||
|
||||
assert (
|
||||
f"Unable to deliver message to room '{TEST_BAD_ROOM}': ErrorResponse: Cannot send a message in this room."
|
||||
|
@@ -1,21 +1,23 @@
|
||||
"""Tests for the moehlenhoff_alpha2 integration."""
|
||||
|
||||
from functools import partialmethod
|
||||
from unittest.mock import patch
|
||||
|
||||
from moehlenhoff_alpha2 import Alpha2Base
|
||||
import xmltodict
|
||||
|
||||
from homeassistant.components.moehlenhoff_alpha2.const import DOMAIN
|
||||
from homeassistant.const import CONF_HOST
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from tests.common import MockConfigEntry, load_fixture
|
||||
from tests.common import MockConfigEntry, async_load_fixture
|
||||
|
||||
MOCK_BASE_HOST = "fake-base-host"
|
||||
|
||||
|
||||
async def mock_update_data(self):
|
||||
async def mock_update_data(self: Alpha2Base, hass: HomeAssistant) -> None:
|
||||
"""Mock Alpha2Base.update_data."""
|
||||
data = xmltodict.parse(load_fixture("static2.xml", DOMAIN))
|
||||
data = xmltodict.parse(await async_load_fixture(hass, "static2.xml", DOMAIN))
|
||||
for _type in ("HEATAREA", "HEATCTRL", "IODEVICE"):
|
||||
if not isinstance(data["Devices"]["Device"][_type], list):
|
||||
data["Devices"]["Device"][_type] = [data["Devices"]["Device"][_type]]
|
||||
@@ -26,7 +28,7 @@ async def init_integration(hass: HomeAssistant) -> MockConfigEntry:
|
||||
"""Mock integration setup."""
|
||||
with patch(
|
||||
"homeassistant.components.moehlenhoff_alpha2.coordinator.Alpha2Base.update_data",
|
||||
mock_update_data,
|
||||
partialmethod(mock_update_data, hass),
|
||||
):
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
|
@@ -1,5 +1,6 @@
|
||||
"""Test the moehlenhoff_alpha2 config flow."""
|
||||
|
||||
from functools import partialmethod
|
||||
from unittest.mock import patch
|
||||
|
||||
from homeassistant import config_entries
|
||||
@@ -24,7 +25,7 @@ async def test_form(hass: HomeAssistant) -> None:
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.moehlenhoff_alpha2.config_flow.Alpha2Base.update_data",
|
||||
mock_update_data,
|
||||
partialmethod(mock_update_data, hass),
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.moehlenhoff_alpha2.async_setup_entry",
|
||||
@@ -54,7 +55,10 @@ async def test_form_duplicate_error(hass: HomeAssistant) -> None:
|
||||
|
||||
assert config_entry.data["host"] == MOCK_BASE_HOST
|
||||
|
||||
with patch("moehlenhoff_alpha2.Alpha2Base.update_data", mock_update_data):
|
||||
with patch(
|
||||
"moehlenhoff_alpha2.Alpha2Base.update_data",
|
||||
partialmethod(mock_update_data, hass),
|
||||
):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
data={"host": MOCK_BASE_HOST},
|
||||
|
@@ -1,6 +1,5 @@
|
||||
"""Test the motionEye camera."""
|
||||
|
||||
from asyncio import AbstractEventLoop
|
||||
from collections.abc import Callable
|
||||
import copy
|
||||
from unittest.mock import AsyncMock, Mock, call
|
||||
@@ -67,7 +66,6 @@ from tests.common import async_fire_time_changed
|
||||
|
||||
@pytest.fixture
|
||||
def aiohttp_server(
|
||||
event_loop: AbstractEventLoop,
|
||||
aiohttp_server: Callable[[], TestServer],
|
||||
socket_enabled: None,
|
||||
) -> Callable[[], TestServer]:
|
||||
|
@@ -6,7 +6,7 @@ from freezegun.api import FrozenDateTimeFactory
|
||||
import pytest
|
||||
|
||||
from homeassistant.components import device_tracker, mqtt
|
||||
from homeassistant.components.mqtt.const import DOMAIN as MQTT_DOMAIN
|
||||
from homeassistant.components.mqtt.const import DOMAIN
|
||||
from homeassistant.const import STATE_HOME, STATE_NOT_HOME, STATE_UNKNOWN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
@@ -275,7 +275,7 @@ async def test_cleanup_device_tracker(
|
||||
assert state is not None
|
||||
|
||||
# Remove MQTT from the device
|
||||
mqtt_config_entry = hass.config_entries.async_entries(MQTT_DOMAIN)[0]
|
||||
mqtt_config_entry = hass.config_entries.async_entries(DOMAIN)[0]
|
||||
response = await ws_client.remove_device(
|
||||
device_entry.id, mqtt_config_entry.entry_id
|
||||
)
|
||||
|
@@ -8,7 +8,7 @@ from unittest.mock import ANY, AsyncMock
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.device_automation import DeviceAutomationType
|
||||
from homeassistant.components.mqtt.const import DOMAIN as MQTT_DOMAIN
|
||||
from homeassistant.components.mqtt.const import DOMAIN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.setup import async_setup_component
|
||||
@@ -403,7 +403,7 @@ async def test_not_fires_on_mqtt_message_after_remove_from_registry(
|
||||
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, device_entry.id)
|
||||
|
||||
# Remove MQTT from the device
|
||||
mqtt_config_entry = hass.config_entries.async_entries(MQTT_DOMAIN)[0]
|
||||
mqtt_config_entry = hass.config_entries.async_entries(DOMAIN)[0]
|
||||
response = await ws_client.remove_device(
|
||||
device_entry.id, mqtt_config_entry.entry_id
|
||||
)
|
||||
@@ -590,7 +590,7 @@ async def test_cleanup_tag(
|
||||
mqtt_mock.async_publish.assert_not_called()
|
||||
|
||||
# Remove MQTT from the device
|
||||
mqtt_config_entry = hass.config_entries.async_entries(MQTT_DOMAIN)[0]
|
||||
mqtt_config_entry = hass.config_entries.async_entries(DOMAIN)[0]
|
||||
response = await ws_client.remove_device(
|
||||
device_entry1.id, mqtt_config_entry.entry_id
|
||||
)
|
||||
|
@@ -15,7 +15,7 @@ from homeassistant.components.music_assistant.const import (
|
||||
ATTR_FAVORITE,
|
||||
ATTR_MEDIA_TYPE,
|
||||
ATTR_SEARCH_NAME,
|
||||
DOMAIN as MASS_DOMAIN,
|
||||
DOMAIN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
@@ -36,7 +36,7 @@ async def test_search_action(
|
||||
)
|
||||
)
|
||||
response = await hass.services.async_call(
|
||||
MASS_DOMAIN,
|
||||
DOMAIN,
|
||||
SERVICE_SEARCH,
|
||||
{
|
||||
ATTR_CONFIG_ENTRY_ID: entry.entry_id,
|
||||
@@ -69,7 +69,7 @@ async def test_get_library_action(
|
||||
"""Test music assistant get_library action."""
|
||||
entry = await setup_integration_from_fixtures(hass, music_assistant_client)
|
||||
response = await hass.services.async_call(
|
||||
MASS_DOMAIN,
|
||||
DOMAIN,
|
||||
SERVICE_GET_LIBRARY,
|
||||
{
|
||||
ATTR_CONFIG_ENTRY_ID: entry.entry_id,
|
||||
|
@@ -30,7 +30,7 @@ from homeassistant.components.media_player import (
|
||||
SERVICE_UNJOIN,
|
||||
MediaPlayerEntityFeature,
|
||||
)
|
||||
from homeassistant.components.music_assistant.const import DOMAIN as MASS_DOMAIN
|
||||
from homeassistant.components.music_assistant.const import DOMAIN
|
||||
from homeassistant.components.music_assistant.media_player import (
|
||||
ATTR_ALBUM,
|
||||
ATTR_ANNOUNCE_VOLUME,
|
||||
@@ -389,7 +389,7 @@ async def test_media_player_play_media_action(
|
||||
|
||||
# test simple play_media call with URI as media_id and no media type
|
||||
await hass.services.async_call(
|
||||
MASS_DOMAIN,
|
||||
DOMAIN,
|
||||
SERVICE_PLAY_MEDIA_ADVANCED,
|
||||
{
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
@@ -410,7 +410,7 @@ async def test_media_player_play_media_action(
|
||||
# test simple play_media call with URI and enqueue specified
|
||||
music_assistant_client.send_command.reset_mock()
|
||||
await hass.services.async_call(
|
||||
MASS_DOMAIN,
|
||||
DOMAIN,
|
||||
SERVICE_PLAY_MEDIA_ADVANCED,
|
||||
{
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
@@ -432,7 +432,7 @@ async def test_media_player_play_media_action(
|
||||
# test basic play_media call with URL and radio mode specified
|
||||
music_assistant_client.send_command.reset_mock()
|
||||
await hass.services.async_call(
|
||||
MASS_DOMAIN,
|
||||
DOMAIN,
|
||||
SERVICE_PLAY_MEDIA_ADVANCED,
|
||||
{
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
@@ -455,7 +455,7 @@ async def test_media_player_play_media_action(
|
||||
music_assistant_client.send_command.reset_mock()
|
||||
music_assistant_client.music.get_item = AsyncMock(return_value=MOCK_TRACK)
|
||||
await hass.services.async_call(
|
||||
MASS_DOMAIN,
|
||||
DOMAIN,
|
||||
SERVICE_PLAY_MEDIA_ADVANCED,
|
||||
{
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
@@ -482,7 +482,7 @@ async def test_media_player_play_media_action(
|
||||
music_assistant_client.send_command.reset_mock()
|
||||
music_assistant_client.music.get_item_by_name = AsyncMock(return_value=MOCK_TRACK)
|
||||
await hass.services.async_call(
|
||||
MASS_DOMAIN,
|
||||
DOMAIN,
|
||||
SERVICE_PLAY_MEDIA_ADVANCED,
|
||||
{
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
@@ -521,7 +521,7 @@ async def test_media_player_play_announcement_action(
|
||||
state = hass.states.get(entity_id)
|
||||
assert state
|
||||
await hass.services.async_call(
|
||||
MASS_DOMAIN,
|
||||
DOMAIN,
|
||||
SERVICE_PLAY_ANNOUNCEMENT,
|
||||
{
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
@@ -551,7 +551,7 @@ async def test_media_player_transfer_queue_action(
|
||||
state = hass.states.get(entity_id)
|
||||
assert state
|
||||
await hass.services.async_call(
|
||||
MASS_DOMAIN,
|
||||
DOMAIN,
|
||||
SERVICE_TRANSFER_QUEUE,
|
||||
{
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
@@ -572,7 +572,7 @@ async def test_media_player_transfer_queue_action(
|
||||
music_assistant_client.send_command.reset_mock()
|
||||
with pytest.raises(HomeAssistantError, match="Source player not available."):
|
||||
await hass.services.async_call(
|
||||
MASS_DOMAIN,
|
||||
DOMAIN,
|
||||
SERVICE_TRANSFER_QUEUE,
|
||||
{
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
@@ -583,7 +583,7 @@ async def test_media_player_transfer_queue_action(
|
||||
# test again with no source player specified (which picks first playing playerqueue)
|
||||
music_assistant_client.send_command.reset_mock()
|
||||
await hass.services.async_call(
|
||||
MASS_DOMAIN,
|
||||
DOMAIN,
|
||||
SERVICE_TRANSFER_QUEUE,
|
||||
{
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
@@ -609,7 +609,7 @@ async def test_media_player_get_queue_action(
|
||||
await setup_integration_from_fixtures(hass, music_assistant_client)
|
||||
entity_id = "media_player.test_group_player_1"
|
||||
response = await hass.services.async_call(
|
||||
MASS_DOMAIN,
|
||||
DOMAIN,
|
||||
SERVICE_GET_QUEUE,
|
||||
{
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
|
@@ -8,13 +8,14 @@ from unittest.mock import patch
|
||||
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
|
||||
from homeassistant.components.netatmo.const import DOMAIN
|
||||
from homeassistant.components.webhook import async_handle_webhook
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.util.aiohttp import MockRequest
|
||||
|
||||
from tests.common import MockConfigEntry, load_fixture
|
||||
from tests.common import MockConfigEntry, async_load_fixture
|
||||
from tests.test_util.aiohttp import AiohttpClientMockResponse
|
||||
|
||||
COMMON_RESPONSE = {
|
||||
@@ -53,7 +54,7 @@ async def snapshot_platform_entities(
|
||||
)
|
||||
|
||||
|
||||
async def fake_post_request(*args: Any, **kwargs: Any):
|
||||
async def fake_post_request(hass: HomeAssistant, *args: Any, **kwargs: Any):
|
||||
"""Return fake data."""
|
||||
if "endpoint" not in kwargs:
|
||||
return "{}"
|
||||
@@ -75,10 +76,12 @@ async def fake_post_request(*args: Any, **kwargs: Any):
|
||||
|
||||
elif endpoint == "homestatus":
|
||||
home_id = kwargs.get("params", {}).get("home_id")
|
||||
payload = json.loads(load_fixture(f"netatmo/{endpoint}_{home_id}.json"))
|
||||
payload = json.loads(
|
||||
await async_load_fixture(hass, f"{endpoint}_{home_id}.json", DOMAIN)
|
||||
)
|
||||
|
||||
else:
|
||||
payload = json.loads(load_fixture(f"netatmo/{endpoint}.json"))
|
||||
payload = json.loads(await async_load_fixture(hass, f"{endpoint}.json", DOMAIN))
|
||||
|
||||
return AiohttpClientMockResponse(
|
||||
method="POST",
|
||||
|
@@ -1,5 +1,7 @@
|
||||
"""Provide common Netatmo fixtures."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from functools import partial
|
||||
from time import time
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
@@ -87,13 +89,17 @@ def mock_config_entry_fixture(hass: HomeAssistant) -> MockConfigEntry:
|
||||
|
||||
|
||||
@pytest.fixture(name="netatmo_auth")
|
||||
def netatmo_auth() -> AsyncMock:
|
||||
def netatmo_auth(hass: HomeAssistant) -> Generator[None]:
|
||||
"""Restrict loaded platforms to list given."""
|
||||
with patch(
|
||||
"homeassistant.components.netatmo.api.AsyncConfigEntryNetatmoAuth"
|
||||
) as mock_auth:
|
||||
mock_auth.return_value.async_post_request.side_effect = fake_post_request
|
||||
mock_auth.return_value.async_post_api_request.side_effect = fake_post_request
|
||||
mock_auth.return_value.async_post_request.side_effect = partial(
|
||||
fake_post_request, hass
|
||||
)
|
||||
mock_auth.return_value.async_post_api_request.side_effect = partial(
|
||||
fake_post_request, hass
|
||||
)
|
||||
mock_auth.return_value.async_get_image.side_effect = fake_get_image
|
||||
mock_auth.return_value.async_addwebhook.side_effect = AsyncMock()
|
||||
mock_auth.return_value.async_dropwebhook.side_effect = AsyncMock()
|
||||
|
@@ -408,7 +408,7 @@ async def test_camera_reconnect_webhook(
|
||||
"""Fake error during requesting backend data."""
|
||||
nonlocal fake_post_hits
|
||||
fake_post_hits += 1
|
||||
return await fake_post_request(*args, **kwargs)
|
||||
return await fake_post_request(hass, *args, **kwargs)
|
||||
|
||||
with (
|
||||
patch(
|
||||
@@ -507,7 +507,7 @@ async def test_setup_component_no_devices(
|
||||
"""Fake error during requesting backend data."""
|
||||
nonlocal fake_post_hits
|
||||
fake_post_hits += 1
|
||||
return await fake_post_request(*args, **kwargs)
|
||||
return await fake_post_request(hass, *args, **kwargs)
|
||||
|
||||
with (
|
||||
patch(
|
||||
@@ -550,7 +550,7 @@ async def test_camera_image_raises_exception(
|
||||
if "snapshot_720.jpg" in endpoint:
|
||||
raise pyatmo.ApiError
|
||||
|
||||
return await fake_post_request(*args, **kwargs)
|
||||
return await fake_post_request(hass, *args, **kwargs)
|
||||
|
||||
with (
|
||||
patch(
|
||||
|
@@ -26,7 +26,7 @@ from homeassistant.components.netatmo.const import (
|
||||
ATTR_SCHEDULE_NAME,
|
||||
ATTR_TARGET_TEMPERATURE,
|
||||
ATTR_TIME_PERIOD,
|
||||
DOMAIN as NETATMO_DOMAIN,
|
||||
DOMAIN,
|
||||
SERVICE_CLEAR_TEMPERATURE_SETTING,
|
||||
SERVICE_SET_PRESET_MODE_WITH_END_DATETIME,
|
||||
SERVICE_SET_SCHEDULE,
|
||||
@@ -437,7 +437,7 @@ async def test_service_set_temperature_with_end_datetime(
|
||||
|
||||
# Test service setting the temperature without an end datetime
|
||||
await hass.services.async_call(
|
||||
NETATMO_DOMAIN,
|
||||
DOMAIN,
|
||||
SERVICE_SET_TEMPERATURE_WITH_END_DATETIME,
|
||||
{
|
||||
ATTR_ENTITY_ID: climate_entity_livingroom,
|
||||
@@ -495,7 +495,7 @@ async def test_service_set_temperature_with_time_period(
|
||||
|
||||
# Test service setting the temperature without an end datetime
|
||||
await hass.services.async_call(
|
||||
NETATMO_DOMAIN,
|
||||
DOMAIN,
|
||||
SERVICE_SET_TEMPERATURE_WITH_TIME_PERIOD,
|
||||
{
|
||||
ATTR_ENTITY_ID: climate_entity_livingroom,
|
||||
@@ -583,7 +583,7 @@ async def test_service_clear_temperature_setting(
|
||||
|
||||
# Test service setting the temperature without an end datetime
|
||||
await hass.services.async_call(
|
||||
NETATMO_DOMAIN,
|
||||
DOMAIN,
|
||||
SERVICE_CLEAR_TEMPERATURE_SETTING,
|
||||
{ATTR_ENTITY_ID: climate_entity_livingroom},
|
||||
blocking=True,
|
||||
|
@@ -5,7 +5,7 @@ from pytest_unordered import unordered
|
||||
|
||||
from homeassistant.components import automation
|
||||
from homeassistant.components.device_automation import DeviceAutomationType
|
||||
from homeassistant.components.netatmo import DOMAIN as NETATMO_DOMAIN
|
||||
from homeassistant.components.netatmo import DOMAIN
|
||||
from homeassistant.components.netatmo.const import (
|
||||
CLIMATE_TRIGGERS,
|
||||
INDOOR_CAMERA_TRIGGERS,
|
||||
@@ -43,7 +43,7 @@ async def test_get_triggers(
|
||||
event_types,
|
||||
) -> None:
|
||||
"""Test we get the expected triggers from a netatmo devices."""
|
||||
config_entry = MockConfigEntry(domain=NETATMO_DOMAIN, data={})
|
||||
config_entry = MockConfigEntry(domain=DOMAIN, data={})
|
||||
config_entry.add_to_hass(hass)
|
||||
device_entry = device_registry.async_get_or_create(
|
||||
config_entry_id=config_entry.entry_id,
|
||||
@@ -51,7 +51,7 @@ async def test_get_triggers(
|
||||
model=device_type,
|
||||
)
|
||||
entity_entry = entity_registry.async_get_or_create(
|
||||
platform, NETATMO_DOMAIN, "5678", device_id=device_entry.id
|
||||
platform, DOMAIN, "5678", device_id=device_entry.id
|
||||
)
|
||||
expected_triggers = []
|
||||
for event_type in event_types:
|
||||
@@ -59,7 +59,7 @@ async def test_get_triggers(
|
||||
expected_triggers.extend(
|
||||
{
|
||||
"platform": "device",
|
||||
"domain": NETATMO_DOMAIN,
|
||||
"domain": DOMAIN,
|
||||
"type": event_type,
|
||||
"subtype": subtype,
|
||||
"device_id": device_entry.id,
|
||||
@@ -72,7 +72,7 @@ async def test_get_triggers(
|
||||
expected_triggers.append(
|
||||
{
|
||||
"platform": "device",
|
||||
"domain": NETATMO_DOMAIN,
|
||||
"domain": DOMAIN,
|
||||
"type": event_type,
|
||||
"device_id": device_entry.id,
|
||||
"entity_id": entity_entry.id,
|
||||
@@ -84,7 +84,7 @@ async def test_get_triggers(
|
||||
for trigger in await async_get_device_automations(
|
||||
hass, DeviceAutomationType.TRIGGER, device_entry.id
|
||||
)
|
||||
if trigger["domain"] == NETATMO_DOMAIN
|
||||
if trigger["domain"] == DOMAIN
|
||||
]
|
||||
assert triggers == unordered(expected_triggers)
|
||||
|
||||
@@ -116,16 +116,16 @@ async def test_if_fires_on_event(
|
||||
"""Test for event triggers firing."""
|
||||
mac_address = "12:34:56:AB:CD:EF"
|
||||
connection = (dr.CONNECTION_NETWORK_MAC, mac_address)
|
||||
config_entry = MockConfigEntry(domain=NETATMO_DOMAIN, data={})
|
||||
config_entry = MockConfigEntry(domain=DOMAIN, data={})
|
||||
config_entry.add_to_hass(hass)
|
||||
device_entry = device_registry.async_get_or_create(
|
||||
config_entry_id=config_entry.entry_id,
|
||||
connections={connection},
|
||||
identifiers={(NETATMO_DOMAIN, mac_address)},
|
||||
identifiers={(DOMAIN, mac_address)},
|
||||
model=camera_type,
|
||||
)
|
||||
entity_entry = entity_registry.async_get_or_create(
|
||||
platform, NETATMO_DOMAIN, "5678", device_id=device_entry.id
|
||||
platform, DOMAIN, "5678", device_id=device_entry.id
|
||||
)
|
||||
events = async_capture_events(hass, "netatmo_event")
|
||||
|
||||
@@ -137,7 +137,7 @@ async def test_if_fires_on_event(
|
||||
{
|
||||
"trigger": {
|
||||
"platform": "device",
|
||||
"domain": NETATMO_DOMAIN,
|
||||
"domain": DOMAIN,
|
||||
"device_id": device_entry.id,
|
||||
"entity_id": entity_entry.id,
|
||||
"type": event_type,
|
||||
@@ -199,16 +199,16 @@ async def test_if_fires_on_event_legacy(
|
||||
"""Test for event triggers firing."""
|
||||
mac_address = "12:34:56:AB:CD:EF"
|
||||
connection = (dr.CONNECTION_NETWORK_MAC, mac_address)
|
||||
config_entry = MockConfigEntry(domain=NETATMO_DOMAIN, data={})
|
||||
config_entry = MockConfigEntry(domain=DOMAIN, data={})
|
||||
config_entry.add_to_hass(hass)
|
||||
device_entry = device_registry.async_get_or_create(
|
||||
config_entry_id=config_entry.entry_id,
|
||||
connections={connection},
|
||||
identifiers={(NETATMO_DOMAIN, mac_address)},
|
||||
identifiers={(DOMAIN, mac_address)},
|
||||
model=camera_type,
|
||||
)
|
||||
entity_entry = entity_registry.async_get_or_create(
|
||||
platform, NETATMO_DOMAIN, "5678", device_id=device_entry.id
|
||||
platform, DOMAIN, "5678", device_id=device_entry.id
|
||||
)
|
||||
events = async_capture_events(hass, "netatmo_event")
|
||||
|
||||
@@ -220,7 +220,7 @@ async def test_if_fires_on_event_legacy(
|
||||
{
|
||||
"trigger": {
|
||||
"platform": "device",
|
||||
"domain": NETATMO_DOMAIN,
|
||||
"domain": DOMAIN,
|
||||
"device_id": device_entry.id,
|
||||
"entity_id": entity_entry.entity_id,
|
||||
"type": event_type,
|
||||
@@ -279,16 +279,16 @@ async def test_if_fires_on_event_with_subtype(
|
||||
"""Test for event triggers firing."""
|
||||
mac_address = "12:34:56:AB:CD:EF"
|
||||
connection = (dr.CONNECTION_NETWORK_MAC, mac_address)
|
||||
config_entry = MockConfigEntry(domain=NETATMO_DOMAIN, data={})
|
||||
config_entry = MockConfigEntry(domain=DOMAIN, data={})
|
||||
config_entry.add_to_hass(hass)
|
||||
device_entry = device_registry.async_get_or_create(
|
||||
config_entry_id=config_entry.entry_id,
|
||||
connections={connection},
|
||||
identifiers={(NETATMO_DOMAIN, mac_address)},
|
||||
identifiers={(DOMAIN, mac_address)},
|
||||
model=camera_type,
|
||||
)
|
||||
entity_entry = entity_registry.async_get_or_create(
|
||||
platform, NETATMO_DOMAIN, "5678", device_id=device_entry.id
|
||||
platform, DOMAIN, "5678", device_id=device_entry.id
|
||||
)
|
||||
events = async_capture_events(hass, "netatmo_event")
|
||||
|
||||
@@ -300,7 +300,7 @@ async def test_if_fires_on_event_with_subtype(
|
||||
{
|
||||
"trigger": {
|
||||
"platform": "device",
|
||||
"domain": NETATMO_DOMAIN,
|
||||
"domain": DOMAIN,
|
||||
"device_id": device_entry.id,
|
||||
"entity_id": entity_entry.id,
|
||||
"type": event_type,
|
||||
@@ -358,16 +358,16 @@ async def test_if_invalid_device(
|
||||
"""Test for event triggers firing."""
|
||||
mac_address = "12:34:56:AB:CD:EF"
|
||||
connection = (dr.CONNECTION_NETWORK_MAC, mac_address)
|
||||
config_entry = MockConfigEntry(domain=NETATMO_DOMAIN, data={})
|
||||
config_entry = MockConfigEntry(domain=DOMAIN, data={})
|
||||
config_entry.add_to_hass(hass)
|
||||
device_entry = device_registry.async_get_or_create(
|
||||
config_entry_id=config_entry.entry_id,
|
||||
connections={connection},
|
||||
identifiers={(NETATMO_DOMAIN, mac_address)},
|
||||
identifiers={(DOMAIN, mac_address)},
|
||||
model=device_type,
|
||||
)
|
||||
entity_entry = entity_registry.async_get_or_create(
|
||||
platform, NETATMO_DOMAIN, "5678", device_id=device_entry.id
|
||||
platform, DOMAIN, "5678", device_id=device_entry.id
|
||||
)
|
||||
|
||||
assert await async_setup_component(
|
||||
@@ -378,7 +378,7 @@ async def test_if_invalid_device(
|
||||
{
|
||||
"trigger": {
|
||||
"platform": "device",
|
||||
"domain": NETATMO_DOMAIN,
|
||||
"domain": DOMAIN,
|
||||
"device_id": device_entry.id,
|
||||
"entity_id": entity_entry.id,
|
||||
"type": event_type,
|
||||
|
@@ -1,5 +1,6 @@
|
||||
"""Test the Netatmo diagnostics."""
|
||||
|
||||
from functools import partial
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
@@ -33,7 +34,9 @@ async def test_entry_diagnostics(
|
||||
"homeassistant.components.netatmo.webhook_generate_url",
|
||||
),
|
||||
):
|
||||
mock_auth.return_value.async_post_api_request.side_effect = fake_post_request
|
||||
mock_auth.return_value.async_post_api_request.side_effect = partial(
|
||||
fake_post_request, hass
|
||||
)
|
||||
mock_auth.return_value.async_addwebhook.side_effect = AsyncMock()
|
||||
mock_auth.return_value.async_dropwebhook.side_effect = AsyncMock()
|
||||
assert await async_setup_component(hass, "netatmo", {})
|
||||
|
@@ -1,6 +1,7 @@
|
||||
"""The tests for Netatmo component."""
|
||||
|
||||
from datetime import timedelta
|
||||
from functools import partial
|
||||
from time import time
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
@@ -68,7 +69,9 @@ async def test_setup_component(
|
||||
) as mock_impl,
|
||||
patch("homeassistant.components.netatmo.webhook_generate_url") as mock_webhook,
|
||||
):
|
||||
mock_auth.return_value.async_post_api_request.side_effect = fake_post_request
|
||||
mock_auth.return_value.async_post_api_request.side_effect = partial(
|
||||
fake_post_request, hass
|
||||
)
|
||||
mock_auth.return_value.async_addwebhook.side_effect = AsyncMock()
|
||||
mock_auth.return_value.async_dropwebhook.side_effect = AsyncMock()
|
||||
assert await async_setup_component(hass, "netatmo", {})
|
||||
@@ -101,7 +104,7 @@ async def test_setup_component_with_config(
|
||||
"""Fake error during requesting backend data."""
|
||||
nonlocal fake_post_hits
|
||||
fake_post_hits += 1
|
||||
return await fake_post_request(*args, **kwargs)
|
||||
return await fake_post_request(hass, *args, **kwargs)
|
||||
|
||||
with (
|
||||
patch(
|
||||
@@ -184,7 +187,9 @@ async def test_setup_without_https(
|
||||
"homeassistant.components.netatmo.webhook_generate_url"
|
||||
) as mock_async_generate_url,
|
||||
):
|
||||
mock_auth.return_value.async_post_api_request.side_effect = fake_post_request
|
||||
mock_auth.return_value.async_post_api_request.side_effect = partial(
|
||||
fake_post_request, hass
|
||||
)
|
||||
mock_async_generate_url.return_value = "http://example.com"
|
||||
assert await async_setup_component(
|
||||
hass, "netatmo", {"netatmo": {"client_id": "123", "client_secret": "abc"}}
|
||||
@@ -226,7 +231,9 @@ async def test_setup_with_cloud(
|
||||
"homeassistant.components.netatmo.webhook_generate_url",
|
||||
),
|
||||
):
|
||||
mock_auth.return_value.async_post_api_request.side_effect = fake_post_request
|
||||
mock_auth.return_value.async_post_api_request.side_effect = partial(
|
||||
fake_post_request, hass
|
||||
)
|
||||
assert await async_setup_component(
|
||||
hass, "netatmo", {"netatmo": {"client_id": "123", "client_secret": "abc"}}
|
||||
)
|
||||
@@ -294,7 +301,9 @@ async def test_setup_with_cloudhook(hass: HomeAssistant) -> None:
|
||||
"homeassistant.components.netatmo.webhook_generate_url",
|
||||
),
|
||||
):
|
||||
mock_auth.return_value.async_post_api_request.side_effect = fake_post_request
|
||||
mock_auth.return_value.async_post_api_request.side_effect = partial(
|
||||
fake_post_request, hass
|
||||
)
|
||||
mock_auth.return_value.async_addwebhook.side_effect = AsyncMock()
|
||||
mock_auth.return_value.async_dropwebhook.side_effect = AsyncMock()
|
||||
assert await async_setup_component(hass, "netatmo", {})
|
||||
@@ -336,7 +345,7 @@ async def test_setup_component_with_delay(
|
||||
patch("homeassistant.components.netatmo.webhook_generate_url") as mock_webhook,
|
||||
patch(
|
||||
"pyatmo.AbstractAsyncAuth.async_post_api_request",
|
||||
side_effect=fake_post_request,
|
||||
side_effect=partial(fake_post_request, hass),
|
||||
) as mock_post_api_request,
|
||||
patch("homeassistant.components.netatmo.data_handler.PLATFORMS", ["light"]),
|
||||
):
|
||||
@@ -405,7 +414,9 @@ async def test_setup_component_invalid_token_scope(hass: HomeAssistant) -> None:
|
||||
) as mock_impl,
|
||||
patch("homeassistant.components.netatmo.webhook_generate_url") as mock_webhook,
|
||||
):
|
||||
mock_auth.return_value.async_post_api_request.side_effect = fake_post_request
|
||||
mock_auth.return_value.async_post_api_request.side_effect = partial(
|
||||
fake_post_request, hass
|
||||
)
|
||||
mock_auth.return_value.async_addwebhook.side_effect = AsyncMock()
|
||||
mock_auth.return_value.async_dropwebhook.side_effect = AsyncMock()
|
||||
assert await async_setup_component(hass, "netatmo", {})
|
||||
@@ -455,7 +466,9 @@ async def test_setup_component_invalid_token(
|
||||
"homeassistant.helpers.config_entry_oauth2_flow.OAuth2Session"
|
||||
) as mock_session,
|
||||
):
|
||||
mock_auth.return_value.async_post_api_request.side_effect = fake_post_request
|
||||
mock_auth.return_value.async_post_api_request.side_effect = partial(
|
||||
fake_post_request, hass
|
||||
)
|
||||
mock_auth.return_value.async_addwebhook.side_effect = AsyncMock()
|
||||
mock_auth.return_value.async_dropwebhook.side_effect = AsyncMock()
|
||||
mock_session.return_value.async_ensure_token_valid.side_effect = (
|
||||
|
@@ -4,10 +4,7 @@ from unittest.mock import AsyncMock
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.notify import (
|
||||
DOMAIN as NOTIFY_DOMAIN,
|
||||
migrate_notify_issue,
|
||||
)
|
||||
from homeassistant.components.notify import DOMAIN, migrate_notify_issue
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
from homeassistant.setup import async_setup_component
|
||||
@@ -36,7 +33,7 @@ async def test_notify_migration_repair_flow(
|
||||
translation_key: str,
|
||||
) -> None:
|
||||
"""Test the notify service repair flow is triggered."""
|
||||
await async_setup_component(hass, NOTIFY_DOMAIN, {})
|
||||
await async_setup_component(hass, DOMAIN, {})
|
||||
await hass.async_block_till_done()
|
||||
await async_process_repairs_platforms(hass)
|
||||
|
||||
@@ -58,12 +55,12 @@ async def test_notify_migration_repair_flow(
|
||||
await hass.async_block_till_done()
|
||||
# Assert the issue is present
|
||||
assert issue_registry.async_get_issue(
|
||||
domain=NOTIFY_DOMAIN,
|
||||
domain=DOMAIN,
|
||||
issue_id=translation_key,
|
||||
)
|
||||
assert len(issue_registry.issues) == 1
|
||||
|
||||
data = await start_repair_fix_flow(http_client, NOTIFY_DOMAIN, translation_key)
|
||||
data = await start_repair_fix_flow(http_client, DOMAIN, translation_key)
|
||||
|
||||
flow_id = data["flow_id"]
|
||||
assert data["step_id"] == "confirm"
|
||||
@@ -75,7 +72,7 @@ async def test_notify_migration_repair_flow(
|
||||
|
||||
# Assert the issue is no longer present
|
||||
assert not issue_registry.async_get_issue(
|
||||
domain=NOTIFY_DOMAIN,
|
||||
domain=DOMAIN,
|
||||
issue_id=translation_key,
|
||||
)
|
||||
assert len(issue_registry.issues) == 0
|
||||
|
@@ -5,7 +5,7 @@ from unittest.mock import AsyncMock, MagicMock
|
||||
from pyotgw.vars import OTGW_MODE_RESET
|
||||
|
||||
from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS
|
||||
from homeassistant.components.opentherm_gw import DOMAIN as OPENTHERM_DOMAIN
|
||||
from homeassistant.components.opentherm_gw import DOMAIN
|
||||
from homeassistant.components.opentherm_gw.const import OpenThermDeviceIdentifier
|
||||
from homeassistant.const import ATTR_ENTITY_ID, CONF_ID
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -33,7 +33,7 @@ async def test_cancel_room_setpoint_override_button(
|
||||
assert (
|
||||
button_entity_id := entity_registry.async_get_entity_id(
|
||||
BUTTON_DOMAIN,
|
||||
OPENTHERM_DOMAIN,
|
||||
DOMAIN,
|
||||
f"{mock_config_entry.data[CONF_ID]}-{OpenThermDeviceIdentifier.THERMOSTAT}-cancel_room_setpoint_override",
|
||||
)
|
||||
) is not None
|
||||
@@ -67,7 +67,7 @@ async def test_restart_button(
|
||||
assert (
|
||||
button_entity_id := entity_registry.async_get_entity_id(
|
||||
BUTTON_DOMAIN,
|
||||
OPENTHERM_DOMAIN,
|
||||
DOMAIN,
|
||||
f"{mock_config_entry.data[CONF_ID]}-{OpenThermDeviceIdentifier.GATEWAY}-restart_button",
|
||||
)
|
||||
) is not None
|
||||
|
@@ -15,7 +15,7 @@ from pyotgw.vars import (
|
||||
)
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.opentherm_gw import DOMAIN as OPENTHERM_DOMAIN
|
||||
from homeassistant.components.opentherm_gw import DOMAIN
|
||||
from homeassistant.components.opentherm_gw.const import (
|
||||
DATA_GATEWAYS,
|
||||
DATA_OPENTHERM_GW,
|
||||
@@ -133,7 +133,7 @@ async def test_select_change_value(
|
||||
assert (
|
||||
select_entity_id := entity_registry.async_get_entity_id(
|
||||
SELECT_DOMAIN,
|
||||
OPENTHERM_DOMAIN,
|
||||
DOMAIN,
|
||||
f"{mock_config_entry.data[CONF_ID]}-{OpenThermDeviceIdentifier.GATEWAY}-{entity_key}",
|
||||
)
|
||||
) is not None
|
||||
@@ -203,7 +203,7 @@ async def test_select_state_update(
|
||||
assert (
|
||||
select_entity_id := entity_registry.async_get_entity_id(
|
||||
SELECT_DOMAIN,
|
||||
OPENTHERM_DOMAIN,
|
||||
DOMAIN,
|
||||
f"{mock_config_entry.data[CONF_ID]}-{OpenThermDeviceIdentifier.GATEWAY}-{entity_key}",
|
||||
)
|
||||
) is not None
|
||||
|
@@ -4,7 +4,7 @@ from unittest.mock import AsyncMock, MagicMock, call
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.opentherm_gw import DOMAIN as OPENTHERM_DOMAIN
|
||||
from homeassistant.components.opentherm_gw import DOMAIN
|
||||
from homeassistant.components.opentherm_gw.const import OpenThermDeviceIdentifier
|
||||
from homeassistant.components.switch import (
|
||||
DOMAIN as SWITCH_DOMAIN,
|
||||
@@ -44,7 +44,7 @@ async def test_switch_added_disabled(
|
||||
assert (
|
||||
switch_entity_id := entity_registry.async_get_entity_id(
|
||||
SWITCH_DOMAIN,
|
||||
OPENTHERM_DOMAIN,
|
||||
DOMAIN,
|
||||
f"{mock_config_entry.data[CONF_ID]}-{OpenThermDeviceIdentifier.GATEWAY}-{entity_key}",
|
||||
)
|
||||
) is not None
|
||||
@@ -80,7 +80,7 @@ async def test_ch_override_switch(
|
||||
assert (
|
||||
switch_entity_id := entity_registry.async_get_entity_id(
|
||||
SWITCH_DOMAIN,
|
||||
OPENTHERM_DOMAIN,
|
||||
DOMAIN,
|
||||
f"{mock_config_entry.data[CONF_ID]}-{OpenThermDeviceIdentifier.GATEWAY}-{entity_key}",
|
||||
)
|
||||
) is not None
|
||||
|
@@ -1,7 +1,7 @@
|
||||
"""Pandora media player tests."""
|
||||
|
||||
from homeassistant.components.media_player import DOMAIN as PLATFORM_DOMAIN
|
||||
from homeassistant.components.pandora import DOMAIN as PANDORA_DOMAIN
|
||||
from homeassistant.components.pandora import DOMAIN
|
||||
from homeassistant.const import CONF_PLATFORM
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
@@ -19,7 +19,7 @@ async def test_repair_issue_is_created(
|
||||
{
|
||||
PLATFORM_DOMAIN: [
|
||||
{
|
||||
CONF_PLATFORM: PANDORA_DOMAIN,
|
||||
CONF_PLATFORM: DOMAIN,
|
||||
}
|
||||
],
|
||||
},
|
||||
@@ -27,5 +27,5 @@ async def test_repair_issue_is_created(
|
||||
await hass.async_block_till_done()
|
||||
assert (
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_system_packages_yaml_integration_{PANDORA_DOMAIN}",
|
||||
f"deprecated_system_packages_yaml_integration_{DOMAIN}",
|
||||
) in issue_registry.issues
|
||||
|
@@ -5,7 +5,7 @@ from unittest.mock import AsyncMock, MagicMock, patch
|
||||
from pyprosegur.installation import Camera
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.prosegur import DOMAIN as PROSEGUR_DOMAIN
|
||||
from homeassistant.components.prosegur import DOMAIN
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
@@ -18,7 +18,7 @@ CONTRACT = "1234abcd"
|
||||
def mock_config_entry() -> MockConfigEntry:
|
||||
"""Return the default mocked config entry."""
|
||||
return MockConfigEntry(
|
||||
domain=PROSEGUR_DOMAIN,
|
||||
domain=DOMAIN,
|
||||
data={
|
||||
"contract": CONTRACT,
|
||||
CONF_USERNAME: "user@email.com",
|
||||
|
@@ -8,7 +8,7 @@ from aiohttp.client_exceptions import ClientError
|
||||
import pytest
|
||||
from yarl import URL
|
||||
|
||||
from homeassistant.components.qwikswitch import DOMAIN as QWIKSWITCH
|
||||
from homeassistant.components.qwikswitch import DOMAIN
|
||||
from homeassistant.const import STATE_UNKNOWN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.setup import async_setup_component
|
||||
@@ -66,7 +66,7 @@ async def test_binary_sensor_device(
|
||||
aioclient_mock.get("http://127.0.0.1:2020/&device", json=qs_devices)
|
||||
listen_mock = MockLongPollSideEffect()
|
||||
aioclient_mock.get("http://127.0.0.1:2020/&listen", side_effect=listen_mock)
|
||||
assert await async_setup_component(hass, QWIKSWITCH, config)
|
||||
assert await async_setup_component(hass, DOMAIN, config)
|
||||
await hass.async_start()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
@@ -112,7 +112,7 @@ async def test_sensor_device(
|
||||
aioclient_mock.get("http://127.0.0.1:2020/&device", json=qs_devices)
|
||||
listen_mock = MockLongPollSideEffect()
|
||||
aioclient_mock.get("http://127.0.0.1:2020/&listen", side_effect=listen_mock)
|
||||
assert await async_setup_component(hass, QWIKSWITCH, config)
|
||||
assert await async_setup_component(hass, DOMAIN, config)
|
||||
await hass.async_start()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
@@ -143,7 +143,7 @@ async def test_switch_device(
|
||||
aioclient_mock.get("http://127.0.0.1:2020/&device", side_effect=get_devices_json)
|
||||
listen_mock = MockLongPollSideEffect()
|
||||
aioclient_mock.get("http://127.0.0.1:2020/&listen", side_effect=listen_mock)
|
||||
assert await async_setup_component(hass, QWIKSWITCH, config)
|
||||
assert await async_setup_component(hass, DOMAIN, config)
|
||||
await hass.async_start()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
@@ -207,7 +207,7 @@ async def test_light_device(
|
||||
aioclient_mock.get("http://127.0.0.1:2020/&device", side_effect=get_devices_json)
|
||||
listen_mock = MockLongPollSideEffect()
|
||||
aioclient_mock.get("http://127.0.0.1:2020/&listen", side_effect=listen_mock)
|
||||
assert await async_setup_component(hass, QWIKSWITCH, config)
|
||||
assert await async_setup_component(hass, DOMAIN, config)
|
||||
await hass.async_start()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
@@ -281,7 +281,7 @@ async def test_button(
|
||||
aioclient_mock.get("http://127.0.0.1:2020/&device", side_effect=get_devices_json)
|
||||
listen_mock = MockLongPollSideEffect()
|
||||
aioclient_mock.get("http://127.0.0.1:2020/&listen", side_effect=listen_mock)
|
||||
assert await async_setup_component(hass, QWIKSWITCH, config)
|
||||
assert await async_setup_component(hass, DOMAIN, config)
|
||||
await hass.async_start()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
@@ -306,7 +306,7 @@ async def test_failed_update_devices(
|
||||
aioclient_mock.get("http://127.0.0.1:2020/&device", exc=ClientError())
|
||||
listen_mock = MockLongPollSideEffect()
|
||||
aioclient_mock.get("http://127.0.0.1:2020/&listen", side_effect=listen_mock)
|
||||
assert not await async_setup_component(hass, QWIKSWITCH, config)
|
||||
assert not await async_setup_component(hass, DOMAIN, config)
|
||||
await hass.async_start()
|
||||
await hass.async_block_till_done()
|
||||
listen_mock.stop()
|
||||
@@ -329,7 +329,7 @@ async def test_single_invalid_sensor(
|
||||
aioclient_mock.get("http://127.0.0.1:2020/&device", json=qs_devices)
|
||||
listen_mock = MockLongPollSideEffect()
|
||||
aioclient_mock.get("http://127.0.0.1:2020/&listen", side_effect=listen_mock)
|
||||
assert await async_setup_component(hass, QWIKSWITCH, config)
|
||||
assert await async_setup_component(hass, DOMAIN, config)
|
||||
await hass.async_start()
|
||||
await hass.async_block_till_done()
|
||||
await asyncio.sleep(0.01)
|
||||
@@ -363,7 +363,7 @@ async def test_non_binary_sensor_with_binary_args(
|
||||
aioclient_mock.get("http://127.0.0.1:2020/&device", json=qs_devices)
|
||||
listen_mock = MockLongPollSideEffect()
|
||||
aioclient_mock.get("http://127.0.0.1:2020/&listen", side_effect=listen_mock)
|
||||
assert await async_setup_component(hass, QWIKSWITCH, config)
|
||||
assert await async_setup_component(hass, DOMAIN, config)
|
||||
await hass.async_start()
|
||||
await hass.async_block_till_done()
|
||||
await asyncio.sleep(0.01)
|
||||
@@ -385,7 +385,7 @@ async def test_non_relay_switch(
|
||||
aioclient_mock.get("http://127.0.0.1:2020/&device", json=qs_devices)
|
||||
listen_mock = MockLongPollSideEffect()
|
||||
aioclient_mock.get("http://127.0.0.1:2020/&listen", side_effect=listen_mock)
|
||||
assert await async_setup_component(hass, QWIKSWITCH, config)
|
||||
assert await async_setup_component(hass, DOMAIN, config)
|
||||
await hass.async_start()
|
||||
await hass.async_block_till_done()
|
||||
await asyncio.sleep(0.01)
|
||||
@@ -408,7 +408,7 @@ async def test_unknown_device(
|
||||
aioclient_mock.get("http://127.0.0.1:2020/&device", json=qs_devices)
|
||||
listen_mock = MockLongPollSideEffect()
|
||||
aioclient_mock.get("http://127.0.0.1:2020/&listen", side_effect=listen_mock)
|
||||
assert await async_setup_component(hass, QWIKSWITCH, config)
|
||||
assert await async_setup_component(hass, DOMAIN, config)
|
||||
await hass.async_start()
|
||||
await hass.async_block_till_done()
|
||||
await asyncio.sleep(0.01)
|
||||
|
@@ -12,7 +12,7 @@ from sqlalchemy.exc import DatabaseError, OperationalError
|
||||
from sqlalchemy.orm.session import Session
|
||||
from voluptuous.error import MultipleInvalid
|
||||
|
||||
from homeassistant.components.recorder import DOMAIN as RECORDER_DOMAIN, Recorder
|
||||
from homeassistant.components.recorder import DOMAIN, Recorder
|
||||
from homeassistant.components.recorder.const import SupportedDialect
|
||||
from homeassistant.components.recorder.db_schema import (
|
||||
Events,
|
||||
@@ -248,7 +248,7 @@ async def test_purge_old_states_encouters_database_corruption(
|
||||
side_effect=sqlite3_exception,
|
||||
),
|
||||
):
|
||||
await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, {"keep_days": 0})
|
||||
await hass.services.async_call(DOMAIN, SERVICE_PURGE, {"keep_days": 0})
|
||||
await hass.async_block_till_done()
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
@@ -280,7 +280,7 @@ async def test_purge_old_states_encounters_temporary_mysql_error(
|
||||
),
|
||||
patch.object(recorder_mock.engine.dialect, "name", "mysql"),
|
||||
):
|
||||
await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, {"keep_days": 0})
|
||||
await hass.services.async_call(DOMAIN, SERVICE_PURGE, {"keep_days": 0})
|
||||
await hass.async_block_till_done()
|
||||
await async_wait_recording_done(hass)
|
||||
await async_wait_recording_done(hass)
|
||||
@@ -304,7 +304,7 @@ async def test_purge_old_states_encounters_operational_error(
|
||||
"homeassistant.components.recorder.purge._purge_old_recorder_runs",
|
||||
side_effect=exception,
|
||||
):
|
||||
await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, {"keep_days": 0})
|
||||
await hass.services.async_call(DOMAIN, SERVICE_PURGE, {"keep_days": 0})
|
||||
await hass.async_block_till_done()
|
||||
await async_wait_recording_done(hass)
|
||||
await async_wait_recording_done(hass)
|
||||
@@ -606,7 +606,7 @@ async def test_purge_edge_case(
|
||||
)
|
||||
assert events.count() == 1
|
||||
|
||||
await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data)
|
||||
await hass.services.async_call(DOMAIN, SERVICE_PURGE, service_data)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await async_recorder_block_till_done(hass)
|
||||
@@ -897,7 +897,7 @@ async def test_purge_filtered_states(
|
||||
assert events_keep.count() == 1
|
||||
|
||||
# Normal purge doesn't remove excluded entities
|
||||
await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data)
|
||||
await hass.services.async_call(DOMAIN, SERVICE_PURGE, service_data)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await async_recorder_block_till_done(hass)
|
||||
@@ -913,7 +913,7 @@ async def test_purge_filtered_states(
|
||||
|
||||
# Test with 'apply_filter' = True
|
||||
service_data["apply_filter"] = True
|
||||
await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data)
|
||||
await hass.services.async_call(DOMAIN, SERVICE_PURGE, service_data)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await async_recorder_block_till_done(hass)
|
||||
@@ -961,7 +961,7 @@ async def test_purge_filtered_states(
|
||||
assert session.query(StateAttributes).count() == 11
|
||||
|
||||
# Do it again to make sure nothing changes
|
||||
await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data)
|
||||
await hass.services.async_call(DOMAIN, SERVICE_PURGE, service_data)
|
||||
await async_recorder_block_till_done(hass)
|
||||
await async_wait_purge_done(hass)
|
||||
|
||||
@@ -973,7 +973,7 @@ async def test_purge_filtered_states(
|
||||
assert session.query(StateAttributes).count() == 11
|
||||
|
||||
service_data = {"keep_days": 0}
|
||||
await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data)
|
||||
await hass.services.async_call(DOMAIN, SERVICE_PURGE, service_data)
|
||||
await async_recorder_block_till_done(hass)
|
||||
await async_wait_purge_done(hass)
|
||||
|
||||
@@ -1091,9 +1091,7 @@ async def test_purge_filtered_states_multiple_rounds(
|
||||
)
|
||||
assert events_keep.count() == 1
|
||||
|
||||
await hass.services.async_call(
|
||||
RECORDER_DOMAIN, SERVICE_PURGE, service_data, blocking=True
|
||||
)
|
||||
await hass.services.async_call(DOMAIN, SERVICE_PURGE, service_data, blocking=True)
|
||||
|
||||
for _ in range(2):
|
||||
# Make sure the second round of purging runs
|
||||
@@ -1131,7 +1129,7 @@ async def test_purge_filtered_states_multiple_rounds(
|
||||
assert session.query(StateAttributes).count() == 11
|
||||
|
||||
# Do it again to make sure nothing changes
|
||||
await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data)
|
||||
await hass.services.async_call(DOMAIN, SERVICE_PURGE, service_data)
|
||||
await async_recorder_block_till_done(hass)
|
||||
await async_wait_purge_done(hass)
|
||||
|
||||
@@ -1188,7 +1186,7 @@ async def test_purge_filtered_states_to_empty(
|
||||
|
||||
# Test with 'apply_filter' = True
|
||||
service_data["apply_filter"] = True
|
||||
await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data)
|
||||
await hass.services.async_call(DOMAIN, SERVICE_PURGE, service_data)
|
||||
await async_recorder_block_till_done(hass)
|
||||
await async_wait_purge_done(hass)
|
||||
|
||||
@@ -1200,7 +1198,7 @@ async def test_purge_filtered_states_to_empty(
|
||||
|
||||
# Do it again to make sure nothing changes
|
||||
# Why do we do this? Should we check the end result?
|
||||
await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data)
|
||||
await hass.services.async_call(DOMAIN, SERVICE_PURGE, service_data)
|
||||
await async_recorder_block_till_done(hass)
|
||||
await async_wait_purge_done(hass)
|
||||
|
||||
@@ -1266,7 +1264,7 @@ async def test_purge_without_state_attributes_filtered_states_to_empty(
|
||||
|
||||
# Test with 'apply_filter' = True
|
||||
service_data["apply_filter"] = True
|
||||
await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data)
|
||||
await hass.services.async_call(DOMAIN, SERVICE_PURGE, service_data)
|
||||
await async_recorder_block_till_done(hass)
|
||||
await async_wait_purge_done(hass)
|
||||
|
||||
@@ -1278,7 +1276,7 @@ async def test_purge_without_state_attributes_filtered_states_to_empty(
|
||||
|
||||
# Do it again to make sure nothing changes
|
||||
# Why do we do this? Should we check the end result?
|
||||
await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data)
|
||||
await hass.services.async_call(DOMAIN, SERVICE_PURGE, service_data)
|
||||
await async_recorder_block_till_done(hass)
|
||||
await async_wait_purge_done(hass)
|
||||
|
||||
@@ -1334,7 +1332,7 @@ async def test_purge_filtered_events(
|
||||
assert states.count() == 10
|
||||
|
||||
# Normal purge doesn't remove excluded events
|
||||
await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data)
|
||||
await hass.services.async_call(DOMAIN, SERVICE_PURGE, service_data)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await async_recorder_block_till_done(hass)
|
||||
@@ -1350,7 +1348,7 @@ async def test_purge_filtered_events(
|
||||
|
||||
# Test with 'apply_filter' = True
|
||||
service_data["apply_filter"] = True
|
||||
await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data)
|
||||
await hass.services.async_call(DOMAIN, SERVICE_PURGE, service_data)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await async_recorder_block_till_done(hass)
|
||||
@@ -1479,7 +1477,7 @@ async def test_purge_filtered_events_state_changed(
|
||||
assert events_purge.count() == 1
|
||||
assert states.count() == 64
|
||||
|
||||
await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data)
|
||||
await hass.services.async_call(DOMAIN, SERVICE_PURGE, service_data)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
for _ in range(4):
|
||||
@@ -1525,9 +1523,7 @@ async def test_purge_entities(hass: HomeAssistant, recorder_mock: Recorder) -> N
|
||||
"entity_globs": entity_globs,
|
||||
}
|
||||
|
||||
await hass.services.async_call(
|
||||
RECORDER_DOMAIN, SERVICE_PURGE_ENTITIES, service_data
|
||||
)
|
||||
await hass.services.async_call(DOMAIN, SERVICE_PURGE_ENTITIES, service_data)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await async_recorder_block_till_done(hass)
|
||||
@@ -2210,7 +2206,7 @@ async def test_purge_entities_keep_days(
|
||||
assert len(states["sensor.purge"]) == 3
|
||||
|
||||
await hass.services.async_call(
|
||||
RECORDER_DOMAIN,
|
||||
DOMAIN,
|
||||
SERVICE_PURGE_ENTITIES,
|
||||
{
|
||||
"entity_id": "sensor.purge",
|
||||
@@ -2231,7 +2227,7 @@ async def test_purge_entities_keep_days(
|
||||
assert len(states["sensor.purge"]) == 1
|
||||
|
||||
await hass.services.async_call(
|
||||
RECORDER_DOMAIN,
|
||||
DOMAIN,
|
||||
SERVICE_PURGE_ENTITIES,
|
||||
{
|
||||
"entity_id": "sensor.purge",
|
||||
|
@@ -12,11 +12,7 @@ from sqlalchemy import text, update
|
||||
from sqlalchemy.exc import DatabaseError, OperationalError
|
||||
from sqlalchemy.orm.session import Session
|
||||
|
||||
from homeassistant.components.recorder import (
|
||||
DOMAIN as RECORDER_DOMAIN,
|
||||
Recorder,
|
||||
migration,
|
||||
)
|
||||
from homeassistant.components.recorder import DOMAIN, Recorder, migration
|
||||
from homeassistant.components.recorder.const import SupportedDialect
|
||||
from homeassistant.components.recorder.history import get_significant_states
|
||||
from homeassistant.components.recorder.purge import purge_old_data
|
||||
@@ -201,7 +197,7 @@ async def test_purge_old_states_encouters_database_corruption(
|
||||
side_effect=sqlite3_exception,
|
||||
),
|
||||
):
|
||||
await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, {"keep_days": 0})
|
||||
await hass.services.async_call(DOMAIN, SERVICE_PURGE, {"keep_days": 0})
|
||||
await hass.async_block_till_done()
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
@@ -235,7 +231,7 @@ async def test_purge_old_states_encounters_temporary_mysql_error(
|
||||
),
|
||||
patch.object(recorder_mock.engine.dialect, "name", "mysql"),
|
||||
):
|
||||
await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, {"keep_days": 0})
|
||||
await hass.services.async_call(DOMAIN, SERVICE_PURGE, {"keep_days": 0})
|
||||
await hass.async_block_till_done()
|
||||
await async_wait_recording_done(hass)
|
||||
await async_wait_recording_done(hass)
|
||||
@@ -261,7 +257,7 @@ async def test_purge_old_states_encounters_operational_error(
|
||||
"homeassistant.components.recorder.purge._purge_old_recorder_runs",
|
||||
side_effect=exception,
|
||||
):
|
||||
await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, {"keep_days": 0})
|
||||
await hass.services.async_call(DOMAIN, SERVICE_PURGE, {"keep_days": 0})
|
||||
await hass.async_block_till_done()
|
||||
await async_wait_recording_done(hass)
|
||||
await async_wait_recording_done(hass)
|
||||
@@ -549,7 +545,7 @@ async def test_purge_edge_case(hass: HomeAssistant, use_sqlite: bool) -> None:
|
||||
events = session.query(Events).filter(Events.event_type == "EVENT_TEST_PURGE")
|
||||
assert events.count() == 1
|
||||
|
||||
await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data)
|
||||
await hass.services.async_call(DOMAIN, SERVICE_PURGE, service_data)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await async_recorder_block_till_done(hass)
|
||||
@@ -1378,7 +1374,7 @@ async def test_purge_entities_keep_days(
|
||||
assert len(states["sensor.purge"]) == 3
|
||||
|
||||
await hass.services.async_call(
|
||||
RECORDER_DOMAIN,
|
||||
DOMAIN,
|
||||
SERVICE_PURGE_ENTITIES,
|
||||
{
|
||||
"entity_id": "sensor.purge",
|
||||
@@ -1399,7 +1395,7 @@ async def test_purge_entities_keep_days(
|
||||
assert len(states["sensor.purge"]) == 1
|
||||
|
||||
await hass.services.async_call(
|
||||
RECORDER_DOMAIN,
|
||||
DOMAIN,
|
||||
SERVICE_PURGE_ENTITIES,
|
||||
{
|
||||
"entity_id": "sensor.purge",
|
||||
|
@@ -6,7 +6,7 @@ import pytest
|
||||
from reolink_aio.api import Chime
|
||||
from reolink_aio.exceptions import InvalidParameterError, ReolinkError
|
||||
|
||||
from homeassistant.components.reolink.const import DOMAIN as REOLINK_DOMAIN
|
||||
from homeassistant.components.reolink.const import DOMAIN
|
||||
from homeassistant.components.reolink.services import ATTR_RINGTONE
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import ATTR_DEVICE_ID, Platform
|
||||
@@ -39,7 +39,7 @@ async def test_play_chime_service_entity(
|
||||
# Test chime play service with device
|
||||
test_chime.play = AsyncMock()
|
||||
await hass.services.async_call(
|
||||
REOLINK_DOMAIN,
|
||||
DOMAIN,
|
||||
"play_chime",
|
||||
{ATTR_DEVICE_ID: [device_id], ATTR_RINGTONE: "attraction"},
|
||||
blocking=True,
|
||||
@@ -49,7 +49,7 @@ async def test_play_chime_service_entity(
|
||||
# Test errors
|
||||
with pytest.raises(ServiceValidationError):
|
||||
await hass.services.async_call(
|
||||
REOLINK_DOMAIN,
|
||||
DOMAIN,
|
||||
"play_chime",
|
||||
{ATTR_DEVICE_ID: ["invalid_id"], ATTR_RINGTONE: "attraction"},
|
||||
blocking=True,
|
||||
@@ -58,7 +58,7 @@ async def test_play_chime_service_entity(
|
||||
test_chime.play = AsyncMock(side_effect=ReolinkError("Test error"))
|
||||
with pytest.raises(HomeAssistantError):
|
||||
await hass.services.async_call(
|
||||
REOLINK_DOMAIN,
|
||||
DOMAIN,
|
||||
"play_chime",
|
||||
{ATTR_DEVICE_ID: [device_id], ATTR_RINGTONE: "attraction"},
|
||||
blocking=True,
|
||||
@@ -67,7 +67,7 @@ async def test_play_chime_service_entity(
|
||||
test_chime.play = AsyncMock(side_effect=InvalidParameterError("Test error"))
|
||||
with pytest.raises(ServiceValidationError):
|
||||
await hass.services.async_call(
|
||||
REOLINK_DOMAIN,
|
||||
DOMAIN,
|
||||
"play_chime",
|
||||
{ATTR_DEVICE_ID: [device_id], ATTR_RINGTONE: "attraction"},
|
||||
blocking=True,
|
||||
@@ -76,7 +76,7 @@ async def test_play_chime_service_entity(
|
||||
reolink_connect.chime.return_value = None
|
||||
with pytest.raises(ServiceValidationError):
|
||||
await hass.services.async_call(
|
||||
REOLINK_DOMAIN,
|
||||
DOMAIN,
|
||||
"play_chime",
|
||||
{ATTR_DEVICE_ID: [device_id], ATTR_RINGTONE: "attraction"},
|
||||
blocking=True,
|
||||
@@ -109,7 +109,7 @@ async def test_play_chime_service_unloaded(
|
||||
# Test chime play service
|
||||
with pytest.raises(ServiceValidationError):
|
||||
await hass.services.async_call(
|
||||
REOLINK_DOMAIN,
|
||||
DOMAIN,
|
||||
"play_chime",
|
||||
{ATTR_DEVICE_ID: [device_id], ATTR_RINGTONE: "attraction"},
|
||||
blocking=True,
|
||||
|
@@ -11,7 +11,7 @@ from homeassistant.components.rflink import (
|
||||
CONF_RECONNECT_INTERVAL,
|
||||
DATA_ENTITY_LOOKUP,
|
||||
DEFAULT_TCP_KEEPALIVE_IDLE_TIMER,
|
||||
DOMAIN as RFLINK_DOMAIN,
|
||||
DOMAIN,
|
||||
EVENT_KEY_COMMAND,
|
||||
EVENT_KEY_SENSOR,
|
||||
SERVICE_SEND_COMMAND,
|
||||
@@ -425,9 +425,9 @@ async def test_keepalive(
|
||||
) -> None:
|
||||
"""Validate negative keepalive values."""
|
||||
keepalive_value = -3
|
||||
domain = RFLINK_DOMAIN
|
||||
domain = DOMAIN
|
||||
config = {
|
||||
RFLINK_DOMAIN: {
|
||||
DOMAIN: {
|
||||
CONF_HOST: "10.10.0.1",
|
||||
CONF_PORT: 1234,
|
||||
CONF_KEEPALIVE_IDLE: keepalive_value,
|
||||
@@ -455,9 +455,9 @@ async def test_keepalive_2(
|
||||
) -> None:
|
||||
"""Validate very short keepalive values."""
|
||||
keepalive_value = 30
|
||||
domain = RFLINK_DOMAIN
|
||||
domain = DOMAIN
|
||||
config = {
|
||||
RFLINK_DOMAIN: {
|
||||
DOMAIN: {
|
||||
CONF_HOST: "10.10.0.1",
|
||||
CONF_PORT: 1234,
|
||||
CONF_KEEPALIVE_IDLE: keepalive_value,
|
||||
@@ -484,10 +484,8 @@ async def test_keepalive_3(
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Validate keepalive=0 value."""
|
||||
domain = RFLINK_DOMAIN
|
||||
config = {
|
||||
RFLINK_DOMAIN: {CONF_HOST: "10.10.0.1", CONF_PORT: 1234, CONF_KEEPALIVE_IDLE: 0}
|
||||
}
|
||||
domain = DOMAIN
|
||||
config = {DOMAIN: {CONF_HOST: "10.10.0.1", CONF_PORT: 1234, CONF_KEEPALIVE_IDLE: 0}}
|
||||
|
||||
# setup mocking rflink module
|
||||
_, mock_create, _, _ = await mock_rflink(hass, config, domain, monkeypatch)
|
||||
@@ -506,8 +504,8 @@ async def test_default_keepalive(
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Validate keepalive=0 value."""
|
||||
domain = RFLINK_DOMAIN
|
||||
config = {RFLINK_DOMAIN: {CONF_HOST: "10.10.0.1", CONF_PORT: 1234}}
|
||||
domain = DOMAIN
|
||||
config = {DOMAIN: {CONF_HOST: "10.10.0.1", CONF_PORT: 1234}}
|
||||
|
||||
# setup mocking rflink module
|
||||
_, mock_create, _, _ = await mock_rflink(hass, config, domain, monkeypatch)
|
||||
@@ -567,8 +565,8 @@ async def test_enable_debug_logs(
|
||||
) -> None:
|
||||
"""Test that changing debug level enables RFDEBUG."""
|
||||
|
||||
domain = RFLINK_DOMAIN
|
||||
config = {RFLINK_DOMAIN: {CONF_HOST: "10.10.0.1", CONF_PORT: 1234}}
|
||||
domain = DOMAIN
|
||||
config = {DOMAIN: {CONF_HOST: "10.10.0.1", CONF_PORT: 1234}}
|
||||
|
||||
# setup mocking rflink module
|
||||
_, mock_create, _, _ = await mock_rflink(hass, config, domain, monkeypatch)
|
||||
|
@@ -15,7 +15,7 @@ from homeassistant.components.number import NumberDeviceClass
|
||||
from homeassistant.components.sensor import (
|
||||
DEVICE_CLASS_STATE_CLASSES,
|
||||
DEVICE_CLASS_UNITS,
|
||||
DOMAIN as SENSOR_DOMAIN,
|
||||
DOMAIN,
|
||||
NON_NUMERIC_DEVICE_CLASSES,
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
@@ -2752,7 +2752,7 @@ async def test_name(hass: HomeAssistant) -> None:
|
||||
|
||||
mock_platform(
|
||||
hass,
|
||||
f"{TEST_DOMAIN}.{SENSOR_DOMAIN}",
|
||||
f"{TEST_DOMAIN}.{DOMAIN}",
|
||||
MockPlatform(async_setup_entry=async_setup_entry_platform),
|
||||
)
|
||||
|
||||
|
@@ -24,7 +24,7 @@ async def test_repair_issue_is_created(
|
||||
"""Test repair issue is created."""
|
||||
from homeassistant.components.sms import ( # pylint: disable=import-outside-toplevel
|
||||
DEPRECATED_ISSUE_ID,
|
||||
DOMAIN as SMS_DOMAIN,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
with (
|
||||
@@ -33,7 +33,7 @@ async def test_repair_issue_is_created(
|
||||
):
|
||||
config_entry = MockConfigEntry(
|
||||
title="test",
|
||||
domain=SMS_DOMAIN,
|
||||
domain=DOMAIN,
|
||||
data={
|
||||
CONF_DEVICE: "/dev/ttyUSB0",
|
||||
},
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user